From ead4c3da2a15fef5744264d2278be813b5b392fd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 6 Nov 2023 10:12:42 -0500 Subject: [PATCH 001/544] move to rel_2_0 Change-Id: Ic52aea08e17e67ab19afce6ccb8fe74ca59ecd4c --- .gitreview | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitreview b/.gitreview index 01d8b1770f7..3e5e2b50dac 100644 --- a/.gitreview +++ b/.gitreview @@ -1,4 +1,4 @@ [gerrit] host=gerrit.sqlalchemy.org project=sqlalchemy/sqlalchemy -defaultbranch=main +defaultbranch=rel_2_0 From 64a97ab900e5876b8348d8d658bcbc90c31da9c1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 6 Nov 2023 17:46:14 -0500 Subject: [PATCH 002/544] always derive type from element in annotated case Fixed issue where use of :func:`_orm.foreign` annotation on a non-initialized :func:`_orm.mapped_column` construct would produce an expression without a type, which was then not updated at initialization time of the actual column, leading to issues such as relationships not determining ``use_get`` appropriately. Fixes: #10597 Change-Id: I8339ba715ec6bd1f50888f8a424c3ac156e2364f (cherry picked from commit 432eb350a4b81ba557f14d49ebd37cf5899d5423) --- doc/build/changelog/unreleased_20/10597.rst | 10 +++++ lib/sqlalchemy/sql/elements.py | 29 ++++++++++++++ lib/sqlalchemy/sql/schema.py | 2 + .../test_tm_future_annotations_sync.py | 39 +++++++++++++++++++ test/orm/declarative/test_typed_mapping.py | 39 +++++++++++++++++++ test/sql/test_selectable.py | 32 +++++++++++++++ 6 files changed, 151 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/10597.rst diff --git a/doc/build/changelog/unreleased_20/10597.rst b/doc/build/changelog/unreleased_20/10597.rst new file mode 100644 index 00000000000..97645188296 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10597.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, orm + :tickets: 10597 + + Fixed issue where use of :func:`_orm.foreign` annotation on a + non-initialized :func:`_orm.mapped_column` construct would produce an + expression without a type, which was then not updated at initialization + time of the actual column, leading to issues such as relationships not + determining ``use_get`` appropriately. + diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 90ee100aae0..48dfd25829a 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -5223,6 +5223,20 @@ def _corresponding_column_or_error(fromclause, column, require_embedded=False): return c +class _memoized_property_but_not_nulltype( + util.memoized_property["TypeEngine[_T]"] +): + """memoized property, but dont memoize NullType""" + + def __get__(self, obj, cls): + if obj is None: + return self + result = self.fget(obj) + if not result._isnull: + obj.__dict__[self.__name__] = result + return result + + class AnnotatedColumnElement(Annotated): _Annotated__element: ColumnElement[Any] @@ -5234,6 +5248,7 @@ def __init__(self, element, values): "_tq_key_label", "_tq_label", "_non_anon_label", + "type", ): self.__dict__.pop(attr, None) for attr in ("name", "key", "table"): @@ -5250,6 +5265,20 @@ def name(self): """pull 'name' from parent, if not present""" return self._Annotated__element.name + @_memoized_property_but_not_nulltype + def type(self): + """pull 'type' from parent and don't cache if null. + + type is routinely changed on existing columns within the + mapped_column() initialization process, and "type" is also consulted + during the creation of SQL expressions. Therefore it can change after + it was already retrieved. At the same time we don't want annotated + objects having overhead when expressions are produced, so continue + to memoize, but only when we have a non-null type. + + """ + return self._Annotated__element.type + @util.memoized_property def table(self): """pull 'table' from parent, if not present""" diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index c464d7eb0ea..d4e3f4cff51 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -2204,6 +2204,8 @@ def __init__( identity: Optional[Identity] def _set_type(self, type_: TypeEngine[Any]) -> None: + assert self.type._isnull or type_ is self.type + self.type = type_ if isinstance(self.type, SchemaEventTarget): self.type._set_parent_with_dispatch(self) diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index ec5f5e82097..e61900418e2 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -62,10 +62,12 @@ from sqlalchemy.orm import declared_attr from sqlalchemy.orm import deferred from sqlalchemy.orm import DynamicMapped +from sqlalchemy.orm import foreign from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import MappedAsDataclass from sqlalchemy.orm import relationship +from sqlalchemy.orm import remote from sqlalchemy.orm import Session from sqlalchemy.orm import undefer from sqlalchemy.orm import WriteOnlyMapped @@ -177,6 +179,43 @@ class MyClass(Base): is_(MyClass.__table__.c.data.type, typ) is_true(MyClass.__table__.c.id.primary_key) + @testing.variation("style", ["none", "lambda_", "string", "direct"]) + def test_foreign_annotation_propagates_correctly(self, decl_base, style): + """test #10597""" + + class Parent(decl_base): + __tablename__ = "parent" + id: Mapped[int] = mapped_column(primary_key=True) + + class Child(decl_base): + __tablename__ = "child" + + name: Mapped[str] = mapped_column(primary_key=True) + + if style.none: + parent_id: Mapped[int] = mapped_column(ForeignKey("parent.id")) + else: + parent_id: Mapped[int] = mapped_column() + + if style.lambda_: + parent: Mapped[Parent] = relationship( + primaryjoin=lambda: remote(Parent.id) + == foreign(Child.parent_id), + ) + elif style.string: + parent: Mapped[Parent] = relationship( + primaryjoin="remote(Parent.id) == " + "foreign(Child.parent_id)", + ) + elif style.direct: + parent: Mapped[Parent] = relationship( + primaryjoin=remote(Parent.id) == foreign(parent_id), + ) + elif style.none: + parent: Mapped[Parent] = relationship() + + assert Child.__mapper__.attrs.parent.strategy.use_get + @testing.combinations( (BIGINT(),), (BIGINT,), diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 6b8becf9c02..8da83ccb9d6 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -53,10 +53,12 @@ from sqlalchemy.orm import declared_attr from sqlalchemy.orm import deferred from sqlalchemy.orm import DynamicMapped +from sqlalchemy.orm import foreign from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import MappedAsDataclass from sqlalchemy.orm import relationship +from sqlalchemy.orm import remote from sqlalchemy.orm import Session from sqlalchemy.orm import undefer from sqlalchemy.orm import WriteOnlyMapped @@ -168,6 +170,43 @@ class MyClass(Base): is_(MyClass.__table__.c.data.type, typ) is_true(MyClass.__table__.c.id.primary_key) + @testing.variation("style", ["none", "lambda_", "string", "direct"]) + def test_foreign_annotation_propagates_correctly(self, decl_base, style): + """test #10597""" + + class Parent(decl_base): + __tablename__ = "parent" + id: Mapped[int] = mapped_column(primary_key=True) + + class Child(decl_base): + __tablename__ = "child" + + name: Mapped[str] = mapped_column(primary_key=True) + + if style.none: + parent_id: Mapped[int] = mapped_column(ForeignKey("parent.id")) + else: + parent_id: Mapped[int] = mapped_column() + + if style.lambda_: + parent: Mapped[Parent] = relationship( + primaryjoin=lambda: remote(Parent.id) + == foreign(Child.parent_id), + ) + elif style.string: + parent: Mapped[Parent] = relationship( + primaryjoin="remote(Parent.id) == " + "foreign(Child.parent_id)", + ) + elif style.direct: + parent: Mapped[Parent] = relationship( + primaryjoin=remote(Parent.id) == foreign(parent_id), + ) + elif style.none: + parent: Mapped[Parent] = relationship() + + assert Child.__mapper__.attrs.parent.strategy.use_get + @testing.combinations( (BIGINT(),), (BIGINT,), diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index a146a94c600..d3b7b47841f 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -41,6 +41,7 @@ from sqlalchemy.sql import LABEL_STYLE_DISAMBIGUATE_ONLY from sqlalchemy.sql import LABEL_STYLE_TABLENAME_PLUS_COL from sqlalchemy.sql import operators +from sqlalchemy.sql import sqltypes from sqlalchemy.sql import table from sqlalchemy.sql import util as sql_util from sqlalchemy.sql import visitors @@ -3023,6 +3024,37 @@ def test_replacement_traverse_preserve(self): eq_(whereclause.left._annotations, {"foo": "bar"}) eq_(whereclause.right._annotations, {"foo": "bar"}) + @testing.variation("use_col_ahead_of_time", [True, False]) + def test_set_type_on_column(self, use_col_ahead_of_time): + """test related to #10597""" + + col = Column() + + col_anno = col._annotate({"foo": "bar"}) + + if use_col_ahead_of_time: + expr = col_anno == bindparam("foo") + + # this could only be fixed if we put some kind of a container + # that receives the type directly rather than using NullType; + # like a PendingType or something + + is_(expr.right.type._type_affinity, sqltypes.NullType) + + assert "type" not in col_anno.__dict__ + + col.name = "name" + col._set_type(Integer()) + + eq_(col_anno.name, "name") + is_(col_anno.type._type_affinity, Integer) + + expr = col_anno == bindparam("foo") + + is_(expr.right.type._type_affinity, Integer) + + assert "type" in col_anno.__dict__ + @testing.combinations(True, False, None) def test_setup_inherit_cache(self, inherit_cache_value): if inherit_cache_value is None: From 389703691211d61f9e59183baebac7002d003c96 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 8 Nov 2023 15:20:24 -0500 Subject: [PATCH 003/544] remove . in sys.path this should not be needed and is causing problems in python 3.12 due to the presence of the "changelog" directory (cherry picked from commit 8faa17d4316772340295a677c54eccf647a221c9) --- doc/build/conf.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/build/conf.py b/doc/build/conf.py index 7abecb59cdc..ad53e75dd9d 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -20,7 +20,9 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("../../lib")) sys.path.insert(0, os.path.abspath("../..")) # examples -sys.path.insert(0, os.path.abspath(".")) + +# was never needed, does not work as of python 3.12 due to conflicts +#sys.path.insert(0, os.path.abspath(".")) os.environ["DISABLE_SQLALCHEMY_CEXT_RUNTIME"] = "true" From ead2c9471485f80d9e63a37ae9399f8b66e669d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebasti=C3=A1n=20Ram=C3=ADrez?= Date: Tue, 7 Nov 2023 00:01:51 +0400 Subject: [PATCH 004/544] Fix syntax typo in doc/build/tutorial/data_select.rst (#10590) (cherry picked from commit e00215cfcdab5186ab07889dda4a2280b5ad5d44) --- doc/build/tutorial/data_select.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/tutorial/data_select.rst b/doc/build/tutorial/data_select.rst index ffeb9dfdb65..c3732d5aa31 100644 --- a/doc/build/tutorial/data_select.rst +++ b/doc/build/tutorial/data_select.rst @@ -130,7 +130,7 @@ for a :func:`_sql.select` by using a tuple of string names:: FROM user_account .. versionadded:: 2.0 Added tuple-accessor capability to the - :attr`.FromClause.c` collection + :attr:`.FromClause.c` collection .. _tutorial_selecting_orm_entities: From f60834d25e11200408ab731f1b22f40d7646ec58 Mon Sep 17 00:00:00 2001 From: Jack McIvor Date: Tue, 7 Nov 2023 17:55:18 +0000 Subject: [PATCH 005/544] Add trove classifier for 3.12 (#10599) (cherry picked from commit 8a583ad7bf65d5d9bf05568279723bc516de3ae3) --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index b797af4afc5..c0be17a7b76 100644 --- a/setup.cfg +++ b/setup.cfg @@ -21,6 +21,7 @@ classifiers = Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy Topic :: Database :: Front-Ends From 8e372eeb3084a08394cdf19c7df20c768e14215d Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 8 Nov 2023 21:49:06 +0100 Subject: [PATCH 006/544] Fix lint error Change-Id: Ifb53e125fc9fd759938908710b2474656dbf1ef9 --- doc/build/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/conf.py b/doc/build/conf.py index ad53e75dd9d..d047d6daef8 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -22,7 +22,7 @@ sys.path.insert(0, os.path.abspath("../..")) # examples # was never needed, does not work as of python 3.12 due to conflicts -#sys.path.insert(0, os.path.abspath(".")) +# sys.path.insert(0, os.path.abspath(".")) os.environ["DISABLE_SQLALCHEMY_CEXT_RUNTIME"] = "true" From b6326ca06660c6954f781889af8963551b16a31c Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 4 Nov 2023 21:32:16 +0100 Subject: [PATCH 007/544] Various minor fixes Fix typo in exported class in init. #10578 Improve warning for loaderes. #10579 Properly document ARRAY.contains. #10587 Mention how to set a schema to the automatically generated enums. #10583 Improve type of cache key dispatcher Change-Id: I86e4f01f5d897b257246fe5f970b78e3444aca3e (cherry picked from commit 1bb9c4b94483a25057bad3d78cf9956e8f292330) --- doc/build/changelog/changelog_13.rst | 2 +- doc/build/orm/declarative_tables.rst | 17 ++++++++++++++++- lib/sqlalchemy/__init__.py | 17 +++++++++++++---- lib/sqlalchemy/orm/context.py | 6 +++--- lib/sqlalchemy/sql/cache_key.py | 17 +++++++++++++++-- lib/sqlalchemy/sql/sqltypes.py | 7 +++++++ 6 files changed, 55 insertions(+), 11 deletions(-) diff --git a/doc/build/changelog/changelog_13.rst b/doc/build/changelog/changelog_13.rst index 462511f3fdf..74fc0c202da 100644 --- a/doc/build/changelog/changelog_13.rst +++ b/doc/build/changelog/changelog_13.rst @@ -3337,7 +3337,7 @@ :tags: change, orm :tickets: 4412 - Added a new function :func:`.close_all_sessions` which takes + Added a new function :func:`_orm.close_all_sessions` which takes over the task of the :meth:`.Session.close_all` method, which is now deprecated as this is confusing as a classmethod. Pull request courtesy Augustin Trancart. diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst index 711fa11bbee..4a1cbd0da3d 100644 --- a/doc/build/orm/declarative_tables.rst +++ b/doc/build/orm/declarative_tables.rst @@ -856,8 +856,23 @@ datatype:: Status: sqlalchemy.Enum(Status, length=50, native_enum=False) } +By default :class:`_sqltypes.Enum` that are automatically generated are not +associated with the :class:`_sql.MetaData` instance used by the ``Base``, so if +the metadata defines a schema it will not be automatically associated with the +enum. To automatically associate the enum with the schema in the metadata or +table they belong to the :paramref:`_sqltypes.Enum.inherit_schema` can be set:: + + from enum import Enum + import sqlalchemy as sa + from sqlalchemy.orm import DeclarativeBase + + + class Base(DeclarativeBase): + metadata = sa.MetaData(schema="my_schema") + type_annotation_map = {Enum: sa.Enum(Enum, inherit_schema=True)} + Linking Specific ``enum.Enum`` or ``typing.Literal`` to other datatypes -++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ The above examples feature the use of an :class:`_sqltypes.Enum` that is automatically configuring itself to the arguments / attributes present on diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 472f01ad063..871e403a77d 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -55,7 +55,7 @@ from .pool import PoolProxiedConnection as PoolProxiedConnection from .pool import PoolResetState as PoolResetState from .pool import QueuePool as QueuePool -from .pool import SingletonThreadPool as SingleonThreadPool +from .pool import SingletonThreadPool as SingletonThreadPool from .pool import StaticPool as StaticPool from .schema import BaseDDLElement as BaseDDLElement from .schema import BLANK_SCHEMA as BLANK_SCHEMA @@ -273,9 +273,7 @@ def __go(lcls: Any) -> None: - from . import util as _sa_util - - _sa_util.preloaded.import_prefix("sqlalchemy") + _util.preloaded.import_prefix("sqlalchemy") from . import exc @@ -283,3 +281,14 @@ def __go(lcls: Any) -> None: __go(locals()) + + +def __getattr__(name: str) -> Any: + if name == "SingleonThreadPool": + _util.warn_deprecated( + "SingleonThreadPool was a typo in the v2 series. " + "Please use the correct SingletonThreadPool name.", + "2.0.24", + ) + return SingletonThreadPool + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 79b43f5fe7d..2f5e4ce8b7b 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -519,9 +519,9 @@ def orm_pre_session_exec( ): util.warn( "Loader depth for query is excessively deep; caching will " - "be disabled for additional loaders. Consider using the " - "recursion_depth feature for deeply nested recursive eager " - "loaders. Use the compiled_cache=None execution option to " + "be disabled for additional loaders. For recursive eager " + "loaders consider using the recursion_depth feature. " + "Use the compiled_cache=None execution option to " "skip this warning." ) execution_options = execution_options.union( diff --git a/lib/sqlalchemy/sql/cache_key.py b/lib/sqlalchemy/sql/cache_key.py index 500e3e4dd72..831b90809b2 100644 --- a/lib/sqlalchemy/sql/cache_key.py +++ b/lib/sqlalchemy/sql/cache_key.py @@ -11,6 +11,7 @@ from itertools import zip_longest import typing from typing import Any +from typing import Callable from typing import Dict from typing import Iterable from typing import Iterator @@ -43,7 +44,7 @@ class _CacheKeyTraversalDispatchType(Protocol): def __call__( s, self: HasCacheKey, visitor: _CacheKeyTraversal - ) -> CacheKey: + ) -> _CacheKeyTraversalDispatchTypeReturn: ... @@ -75,6 +76,18 @@ class CacheTraverseTarget(enum.Enum): ANON_NAME, ) = tuple(CacheTraverseTarget) +_CacheKeyTraversalDispatchTypeReturn = Sequence[ + Tuple[ + str, + Any, + Union[ + Callable[..., Tuple[Any, ...]], + CacheTraverseTarget, + InternalTraversal, + ], + ] +] + class HasCacheKey: """Mixin for objects which can produce a cache key. @@ -324,7 +337,7 @@ def _gen_cache_key( ), ) else: - result += meth( + result += meth( # type: ignore attrname, obj, self, anon_map, bindparams ) return result diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index ddee7767bc3..7e866cc032d 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -2908,6 +2908,13 @@ def _setup_getitem(self, index): return operators.getitem, index, return_type def contains(self, *arg, **kw): + """``ARRAY.contains()`` not implemented for the base ARRAY type. + Use the dialect-specific ARRAY type. + + .. seealso:: + + :class:`_postgresql.ARRAY` - PostgreSQL specific version. + """ raise NotImplementedError( "ARRAY.contains() not implemented for the base " "ARRAY type; please use the dialect-specific ARRAY type" From 32876c199812ab59d48d778634b34aeba11a4d5e Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 11 Nov 2023 16:26:57 +0100 Subject: [PATCH 008/544] Typing updates to fix errors found by mypy 1.7 Change-Id: I02046a72df88a82c8bc6e40b41f9d5b0d01a163e (cherry picked from commit e45b9ffd421cd2a36208a763e20ff6c4e86aad90) --- lib/sqlalchemy/engine/cursor.py | 5 +++-- lib/sqlalchemy/engine/result.py | 4 ++-- lib/sqlalchemy/engine/row.py | 8 ++++---- lib/sqlalchemy/orm/decl_base.py | 4 ++-- lib/sqlalchemy/orm/interfaces.py | 6 +++--- lib/sqlalchemy/util/_py_collections.py | 8 ++++++-- lib/sqlalchemy/util/compat.py | 2 +- 7 files changed, 21 insertions(+), 16 deletions(-) diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index 45af49afccb..ff6e311a743 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -120,7 +120,7 @@ List[Any], # MD_OBJECTS str, # MD_LOOKUP_KEY str, # MD_RENDERED_NAME - Optional["_ResultProcessorType"], # MD_PROCESSOR + Optional["_ResultProcessorType[Any]"], # MD_PROCESSOR Optional[str], # MD_UNTRANSLATED ] @@ -134,7 +134,7 @@ List[Any], str, str, - Optional["_ResultProcessorType"], + Optional["_ResultProcessorType[Any]"], str, ] @@ -1438,6 +1438,7 @@ def __init__( metadata = self._init_metadata(context, cursor_description) + _make_row: Any _make_row = functools.partial( Row, metadata, diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 132ae88b660..acbe6f09236 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -64,7 +64,7 @@ _KeyMapType = Mapping[_KeyType, _KeyMapRecType] -_RowData = Union[Row, RowMapping, Any] +_RowData = Union[Row[Any], RowMapping, Any] """A generic form of "row" that accommodates for the different kinds of "rows" that different result objects return, including row, row mapping, and scalar values""" @@ -82,7 +82,7 @@ """ -_InterimSupportsScalarsRowType = Union[Row, Any] +_InterimSupportsScalarsRowType = Union[Row[Any], Any] _ProcessorsType = Sequence[Optional["_ResultProcessorType[Any]"]] _TupleGetterType = Callable[[Sequence[Any]], Sequence[Any]] diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py index 9017537ab09..d2bb2e4c9a6 100644 --- a/lib/sqlalchemy/engine/row.py +++ b/lib/sqlalchemy/engine/row.py @@ -296,8 +296,8 @@ class ROMappingView(ABC): def __init__( self, mapping: Mapping["_KeyType", Any], items: Sequence[Any] ): - self._mapping = mapping - self._items = items + self._mapping = mapping # type: ignore[misc] + self._items = items # type: ignore[misc] def __len__(self) -> int: return len(self._items) @@ -321,11 +321,11 @@ def __ne__(self, other: Any) -> bool: class ROMappingKeysValuesView( ROMappingView, typing.KeysView["_KeyType"], typing.ValuesView[Any] ): - __slots__ = ("_items",) + __slots__ = ("_items",) # mapping slot is provided by KeysView class ROMappingItemsView(ROMappingView, typing.ItemsView["_KeyType", Any]): - __slots__ = ("_items",) + __slots__ = ("_items",) # mapping slot is provided by ItemsView class RowMapping(BaseRow, typing.Mapping["_KeyType", Any]): diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index d5ef3db470a..0037379bd5f 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -1130,9 +1130,9 @@ def _allow_dataclass_field( defaults = {} for item in field_list: if len(item) == 2: - name, tp = item # type: ignore + name, tp = item elif len(item) == 3: - name, tp, spec = item # type: ignore + name, tp, spec = item defaults[name] = spec else: assert False diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index a118b2aa854..fed07334fb5 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -754,7 +754,7 @@ def __init__( self._adapt_to_entity = adapt_to_entity @util.non_memoized_property - def property(self) -> MapperProperty[_T]: + def property(self) -> MapperProperty[_T_co]: """Return the :class:`.MapperProperty` associated with this :class:`.PropComparator`. @@ -784,7 +784,7 @@ def _bulk_update_tuples( def adapt_to_entity( self, adapt_to_entity: AliasedInsp[Any] - ) -> PropComparator[_T]: + ) -> PropComparator[_T_co]: """Return a copy of this PropComparator which will use the given :class:`.AliasedInsp` to produce corresponding expressions. """ @@ -846,7 +846,7 @@ def reverse_operate( ) -> ColumnElement[Any]: ... - def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T]: + def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T_co]: r"""Redefine this object in terms of a polymorphic subclass, :func:`_orm.with_polymorphic` construct, or :func:`_orm.aliased` construct. diff --git a/lib/sqlalchemy/util/_py_collections.py b/lib/sqlalchemy/util/_py_collections.py index 4f52d3bce67..7dba5092bcf 100644 --- a/lib/sqlalchemy/util/_py_collections.py +++ b/lib/sqlalchemy/util/_py_collections.py @@ -148,12 +148,16 @@ def __ior__(self, __value: Any) -> NoReturn: # type: ignore def __or__( # type: ignore[override] self, __value: Mapping[_KT, _VT] ) -> immutabledict[_KT, _VT]: - return immutabledict(super().__or__(__value)) + return immutabledict( + super().__or__(__value), # type: ignore[call-overload] + ) def __ror__( # type: ignore[override] self, __value: Mapping[_KT, _VT] ) -> immutabledict[_KT, _VT]: - return immutabledict(super().__ror__(__value)) + return immutabledict( + super().__ror__(__value), # type: ignore[call-overload] + ) class OrderedSet(Set[_T]): diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index 98a0b65ec95..5bbb58f4af5 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -173,7 +173,7 @@ async def anext_(async_iterator, default=_NOT_PROVIDED): def importlib_metadata_get(group): ep = importlib_metadata.entry_points() - if not typing.TYPE_CHECKING and hasattr(ep, "select"): + if hasattr(ep, "select"): return ep.select(group=group) else: return ep.get(group, ()) From 3c53f2ca57a81060a508deca92bcc00cbcf170c0 Mon Sep 17 00:00:00 2001 From: Aleksandr Kiliushin Date: Sun, 12 Nov 2023 13:32:05 +0400 Subject: [PATCH 009/544] Fix a typo (#10620) (cherry picked from commit fc6c2b19fd7f5cb89e0c405f5aa3b3360e4c4a93) --- doc/build/intro.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/intro.rst b/doc/build/intro.rst index cac103ed831..709d56b7b87 100644 --- a/doc/build/intro.rst +++ b/doc/build/intro.rst @@ -42,7 +42,7 @@ augmented by ORM-specific automations and object-centric querying capabilities. Whereas working with Core and the SQL Expression language presents a schema-centric view of the database, along with a programming paradigm that is oriented around immutability, the ORM builds on top of this a domain-centric -view of the database with a programming paradigm that is more explcitly +view of the database with a programming paradigm that is more explicitly object-oriented and reliant upon mutability. Since a relational database is itself a mutable service, the difference is that Core/SQL Expression language is command oriented whereas the ORM is state oriented. From d4ccb276e81bb353f8bdaa8ab03d72183c5f1e7c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 15 Nov 2023 13:50:19 -0500 Subject: [PATCH 010/544] clarify connection pool release for session.commit/ rollback Change-Id: I71adf3ed9eb9f7c0abb50ecc9efe7c2df3c98705 (cherry picked from commit 3b4a97972131bbcbe53120400270faa4fce87594) --- doc/build/orm/session_basics.rst | 59 ++++++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 11 deletions(-) diff --git a/doc/build/orm/session_basics.rst b/doc/build/orm/session_basics.rst index 0fcbf7900b1..4b47be43bfc 100644 --- a/doc/build/orm/session_basics.rst +++ b/doc/build/orm/session_basics.rst @@ -15,12 +15,15 @@ ORM-mapped objects. The ORM objects themselves are maintained inside the structure that maintains unique copies of each object, where "unique" means "only one object with a particular primary key". -The :class:`.Session` begins in a mostly stateless form. Once queries are -issued or other objects are persisted with it, it requests a connection -resource from an :class:`_engine.Engine` that is associated with the -:class:`.Session`, and then establishes a transaction on that connection. This -transaction remains in effect until the :class:`.Session` is instructed to -commit or roll back the transaction. +The :class:`.Session` in its most common pattern of use begins in a mostly +stateless form. Once queries are issued or other objects are persisted with it, +it requests a connection resource from an :class:`_engine.Engine` that is +associated with the :class:`.Session`, and then establishes a transaction on +that connection. This transaction remains in effect until the :class:`.Session` +is instructed to commit or roll back the transaction. When the transaction +ends, the connection resource associated with the :class:`_engine.Engine` +is :term:`released` to the connection pool managed by the engine. A new +transaction then starts with a new connection checkout. The ORM objects maintained by a :class:`_orm.Session` are :term:`instrumented` such that whenever an attribute or a collection is modified in the Python @@ -643,8 +646,26 @@ connections. If no pending changes are detected, then no SQL is emitted to the database. This behavior is not configurable and is not affected by the :paramref:`.Session.autoflush` parameter. -Subsequent to that, :meth:`_orm.Session.commit` will then COMMIT the actual -database transaction or transactions, if any, that are in place. +Subsequent to that, assuming the :class:`_orm.Session` is bound to an +:class:`_engine.Engine`, :meth:`_orm.Session.commit` will then COMMIT the +actual database transaction that is in place, if one was started. After the +commit, the :class:`_engine.Connection` object associated with that transaction +is closed, causing its underlying DBAPI connection to be :term:`released` back +to the connection pool associated with the :class:`_engine.Engine` to which the +:class:`_orm.Session` is bound. + +For a :class:`_orm.Session` that's bound to multiple engines (e.g. as described +at :ref:`Partitioning Strategies `), the same COMMIT +steps will proceed for each :class:`_engine.Engine` / +:class:`_engine.Connection` that is in play within the "logical" transaction +being committed. These database transactions are uncoordinated with each other +unless :ref:`two-phase features ` are enabled. + +Other connection-interaction patterns are available as well, by binding the +:class:`_orm.Session` to a :class:`_engine.Connection` directly; in this case, +it's assumed that an externally-managed transaction is present, and a real +COMMIT will not be emitted automatically in this case; see the section +:ref:`session_external_transaction` for background on this pattern. Finally, all objects within the :class:`_orm.Session` are :term:`expired` as the transaction is closed out. This is so that when the instances are next @@ -671,9 +692,25 @@ been begun either via :ref:`autobegin ` or by calling the :meth:`_orm.Session.begin` method explicitly, is as follows: - * All transactions are rolled back and all connections returned to the - connection pool, unless the Session was bound directly to a Connection, in - which case the connection is still maintained (but still rolled back). + * Database transactions are rolled back. For a :class:`_orm.Session` + bound to a single :class:`_engine.Engine`, this means ROLLBACK is emitted + for at most a single :class:`_engine.Connection` that's currently in use. + For :class:`_orm.Session` objects bound to multiple :class:`_engine.Engine` + objects, ROLLBACK is emitted for all :class:`_engine.Connection` objects + that were checked out. + * Database connections are :term:`released`. This follows the same connection-related + behavior noted in :ref:`session_committing`, where + :class:`_engine.Connection` objects obtained from :class:`_engine.Engine` + objects are closed, causing the DBAPI connections to be :term:`released` to + the connection pool within the :class:`_engine.Engine`. New connections + are checked out from the :class:`_engine.Engine` if and when a new + transaction begins. + * For a :class:`_orm.Session` + that's bound directly to a :class:`_engine.Connection` as described + at :ref:`session_external_transaction`, rollback behavior on this + :class:`_engine.Connection` would follow the behavior specified by the + :paramref:`_orm.Session.join_transaction_mode` parameter, which could + involve rolling back savepoints or emitting a real ROLLBACK. * Objects which were initially in the :term:`pending` state when they were added to the :class:`~sqlalchemy.orm.session.Session` within the lifespan of the transaction are expunged, corresponding to their INSERT statement being From fc80cee2d581699c0a8795f40d9f1bc3d1c03e77 Mon Sep 17 00:00:00 2001 From: William Henry Hakim Date: Thu, 16 Nov 2023 09:11:26 -0500 Subject: [PATCH 011/544] Update type annotation for DBAPI Cursor's executemany() ### Description As per https://groups.google.com/g/sqlalchemy/c/DkyffAgDmwM, fixes an issue with the DBAPI cursor's executemany() type signature. ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #10644 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10644 Pull-request-sha: 0a1fe849b54e35cd5e417da79556dc94be3a0abc Change-Id: I3af344f052a2f306876a528c528b1bf9cc0bdaa6 (cherry picked from commit 55bf2bf971cda82c2e3f58e9c211c74c3a6394a8) --- lib/sqlalchemy/engine/interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index ea1f27d0629..4bf0d3e9e7d 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -194,7 +194,7 @@ def execute( def executemany( self, operation: Any, - parameters: Sequence[_DBAPIMultiExecuteParams], + parameters: _DBAPIMultiExecuteParams, ) -> Any: ... From bc890c9f1042a7ca60b11c53963d329358d3923b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 18 Nov 2023 10:22:19 -0500 Subject: [PATCH 012/544] update all errors / warnings in schema to use f strings Fixed issue where error reporting for unexpected schema item when creating objects like :class:`_schema.Table` would incorrectly handle an argument that was itself passed as a tuple, leading to a formatting error. The error message has been modernized to use f-strings. this change necessitated an update to flake8 as version 5 was mis-interpreting f-strings that had semicolons in them. Black is also unable to format some of these f-strings which had to be broken out, unclear if there is a newer Black available. Fixes: #10654 Change-Id: I703e94282c27ccf06f4aa315e8a11bd97b719170 (cherry picked from commit 2458ceee94e7bd6e5bf8d9d7270be8819bbe772c) --- .pre-commit-config.yaml | 2 +- doc/build/changelog/unreleased_20/10654.rst | 8 ++ lib/sqlalchemy/sql/schema.py | 110 ++++++++++---------- test/sql/test_metadata.py | 12 +++ 4 files changed, 77 insertions(+), 55 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10654.rst diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ab722e4f309..f169100aa60 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ repos: - id: zimports - repo: https://github.com/pycqa/flake8 - rev: 5.0.0 + rev: 6.1.0 hooks: - id: flake8 additional_dependencies: diff --git a/doc/build/changelog/unreleased_20/10654.rst b/doc/build/changelog/unreleased_20/10654.rst new file mode 100644 index 00000000000..bb9b25e04d0 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10654.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, schema + :tickets: 10654 + + Fixed issue where error reporting for unexpected schema item when creating + objects like :class:`_schema.Table` would incorrectly handle an argument + that was itself passed as a tuple, leading to a formatting error. The + error message has been modernized to use f-strings. diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index d4e3f4cff51..556e6c81534 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -225,7 +225,7 @@ def _init_items(self, *args: SchemaItem, **kw: Any) -> None: except AttributeError as err: raise exc.ArgumentError( "'SchemaItem' object, such as a 'Column' or a " - "'Constraint' expected, got %r" % item + f"'Constraint' expected, got {item!r}" ) from err else: spwd(self, **kw) @@ -466,11 +466,11 @@ def _new(cls, *args: Any, **kw: Any) -> Any: if key in metadata.tables: if not keep_existing and not extend_existing and bool(args): raise exc.InvalidRequestError( - "Table '%s' is already defined for this MetaData " + f"Table '{key}' is already defined for this MetaData " "instance. Specify 'extend_existing=True' " "to redefine " "options and columns on an " - "existing Table object." % key + "existing Table object." ) table = metadata.tables[key] if extend_existing: @@ -478,7 +478,7 @@ def _new(cls, *args: Any, **kw: Any) -> Any: return table else: if must_exist: - raise exc.InvalidRequestError("Table '%s' not defined" % (key)) + raise exc.InvalidRequestError(f"Table '{key}' not defined") table = object.__new__(cls) table.dispatch.before_parent_attach(table, metadata) metadata._add_table(name, schema, table) @@ -955,8 +955,8 @@ def _init_existing(self, *args: Any, **kwargs: Any) -> None: if schema and schema != self.schema: raise exc.ArgumentError( - "Can't change schema of existing table from '%s' to '%s'", - (self.schema, schema), + f"Can't change schema of existing table " + f"from '{self.schema}' to '{schema}'", ) include_columns = kwargs.pop("include_columns", None) @@ -1436,8 +1436,8 @@ def referred_schema_fn(table, to_schema, key = _get_table_key(name, actual_schema) if key in metadata.tables: util.warn( - "Table '%s' already exists within the given " - "MetaData - not copying." % self.description + f"Table '{self.description}' already exists within the given " + "MetaData - not copying." ) return metadata.tables[key] @@ -2317,8 +2317,8 @@ def _set_parent( # type: ignore[override] existing = getattr(self, "table", None) if existing is not None and existing is not table: raise exc.ArgumentError( - "Column object '%s' already assigned to Table '%s'" - % (self.key, existing.description) + f"Column object '{self.key}' already " + f"assigned to Table '{existing.description}'" ) extra_remove = None @@ -2378,9 +2378,8 @@ def _set_parent( # type: ignore[override] table.primary_key._replace(self) elif self.key in table.primary_key: raise exc.ArgumentError( - "Trying to redefine primary-key column '%s' as a " - "non-primary-key column on table '%s'" - % (self.key, table.fullname) + f"Trying to redefine primary-key column '{self.key}' as a " + f"non-primary-key column on table '{table.fullname}'" ) if self.index: @@ -3030,7 +3029,7 @@ def _column_tokens(self) -> Tuple[Optional[str], str, Optional[str]]: m = self._get_colspec().split(".") if m is None: raise exc.ArgumentError( - "Invalid foreign key column specification: %s" % self._colspec + f"Invalid foreign key column specification: {self._colspec}" ) if len(m) == 1: tname = m.pop() @@ -3121,9 +3120,9 @@ def _link_to_col_by_colstring( if _column is None: raise exc.NoReferencedColumnError( "Could not initialize target column " - "for ForeignKey '%s' on table '%s': " - "table '%s' has no column named '%s'" - % (self._colspec, parenttable.name, table.name, key), + f"for ForeignKey '{self._colspec}' " + f"on table '{parenttable.name}': " + f"table '{table.name}' has no column named '{key}'", table.name, key, ) @@ -3182,18 +3181,18 @@ def _resolve_column( if not raiseerr: return None raise exc.NoReferencedTableError( - "Foreign key associated with column '%s' could not find " - "table '%s' with which to generate a " - "foreign key to target column '%s'" - % (self.parent, tablekey, colname), + f"Foreign key associated with column " + f"'{self.parent}' could not find " + f"table '{tablekey}' with which to generate a " + f"foreign key to target column '{colname}'", tablekey, ) elif parenttable.key not in parenttable.metadata: if not raiseerr: return None raise exc.InvalidRequestError( - "Table %s is no longer associated with its " - "parent MetaData" % parenttable + f"Table {parenttable} is no longer associated with its " + "parent MetaData" ) else: table = parenttable.metadata.tables[tablekey] @@ -3940,10 +3939,10 @@ def drop(self, bind: _CreateDropBind, checkfirst: bool = True) -> None: def _not_a_column_expr(self) -> NoReturn: raise exc.InvalidRequestError( - "This %s cannot be used directly " + f"This {self.__class__.__name__} cannot be used directly " "as a column expression. Use func.next_value(sequence) " "to produce a 'next value' function that's usable " - "as a column element." % self.__class__.__name__ + "as a column element." ) @@ -4267,12 +4266,11 @@ def _col_attached(column: Column[Any], table: Table) -> None: table = columns[0].table others = [c for c in columns[1:] if c.table is not table] if others: + # black could not format this inline + other_str = ", ".join("'%s'" % c for c in others) raise exc.ArgumentError( - "Column(s) %s are not part of table '%s'." - % ( - ", ".join("'%s'" % c for c in others), - table.description, - ) + f"Column(s) {other_str} " + f"are not part of table '{table.description}'." ) @util.ro_memoized_property @@ -4757,9 +4755,9 @@ def _validate_dest_table(self, table: Table) -> None: if None not in table_keys and len(table_keys) > 1: elem0, elem1 = sorted(table_keys)[0:2] raise exc.ArgumentError( - "ForeignKeyConstraint on %s(%s) refers to " - "multiple remote tables: %s and %s" - % (table.fullname, self._col_description, elem0, elem1) + f"ForeignKeyConstraint on " + f"{table.fullname}({self._col_description}) refers to " + f"multiple remote tables: {elem0} and {elem1}" ) @property @@ -4945,17 +4943,20 @@ def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None: and table_pks and set(table_pks) != set(self._columns) ): + # black could not format these inline + table_pk_str = ", ".join("'%s'" % c.name for c in table_pks) + col_str = ", ".join("'%s'" % c.name for c in self._columns) + util.warn( - "Table '%s' specifies columns %s as primary_key=True, " - "not matching locally specified columns %s; setting the " - "current primary key columns to %s. This warning " - "may become an exception in a future release" - % ( - table.name, - ", ".join("'%s'" % c.name for c in table_pks), - ", ".join("'%s'" % c.name for c in self._columns), - ", ".join("'%s'" % c.name for c in self._columns), - ) + f"Table '{table.name}' specifies columns " + f"{table_pk_str} as " + f"primary_key=True, " + f"not matching locally specified columns {col_str}; " + f"setting the " + f"current primary key columns to " + f"{col_str}. " + f"This warning " + f"may become an exception in a future release" ) table_pks[:] = [] @@ -5022,8 +5023,8 @@ def _validate_autoinc(col: Column[Any], autoinc_true: bool) -> bool: ): if autoinc_true: raise exc.ArgumentError( - "Column type %s on column '%s' is not " - "compatible with autoincrement=True" % (col.type, col) + f"Column type {col.type} on column '{col}' is not " + f"compatible with autoincrement=True" ) else: return False @@ -5066,9 +5067,9 @@ def _validate_autoinc(col: Column[Any], autoinc_true: bool) -> bool: _validate_autoinc(col, True) if autoinc is not None: raise exc.ArgumentError( - "Only one Column may be marked " - "autoincrement=True, found both %s and %s." - % (col.name, autoinc.name) + f"Only one Column may be marked " + f"autoincrement=True, found both " + f"{col.name} and {autoinc.name}." ) else: autoinc = col @@ -5239,9 +5240,9 @@ def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None: if self.table is not None and table is not self.table: raise exc.ArgumentError( - "Index '%s' is against table '%s', and " - "cannot be associated with table '%s'." - % (self.name, self.table.description, table.description) + f"Index '{self.name}' is against table " + f"'{self.table.description}', and " + f"cannot be associated with table '{table.description}'." ) self.table = table table.indexes.add(self) @@ -5776,9 +5777,10 @@ def reflect( missing = [name for name in only if name not in available] if missing: s = schema and (" schema '%s'" % schema) or "" + missing_str = ", ".join(missing) raise exc.InvalidRequestError( - "Could not reflect: requested table(s) not available " - "in %r%s: (%s)" % (bind.engine, s, ", ".join(missing)) + f"Could not reflect: requested table(s) not available " + f"in {bind.engine!r}{s}: ({missing_str})" ) load = [ name @@ -5801,7 +5803,7 @@ def reflect( try: Table(name, self, **reflect_opts) except exc.UnreflectableTableError as uerr: - util.warn("Skipping table %s: %s" % (name, uerr)) + util.warn(f"Skipping table {name}: {uerr}") def create_all( self, diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 0b35adc1ccc..aa3cec3dad3 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -1777,6 +1777,18 @@ def test_invalid_objects(self): 12, ) + assert_raises_message( + tsa.exc.ArgumentError, + "'SchemaItem' object, such as a 'Column' or a " + "'Constraint' expected, got " + r"\(Column\('q', Integer\(\), table=None\), " + r"Column\('p', Integer\(\), table=None\)\)", + Table, + "asdf", + MetaData(), + (Column("q", Integer), Column("p", Integer)), + ) + def test_reset_exported_passes(self): m = MetaData() From 6e8362d2b9f7aa331ccebbb882d7234edf22a080 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 22 Nov 2023 10:58:00 -0500 Subject: [PATCH 013/544] re-add cascade caveats these got removed and never restored in 2.0 References: https://github.com/sqlalchemy/sqlalchemy/discussions/10672 Change-Id: Ibbd9a8ab04556ebd581f9287b54fe1ffdaacee79 (cherry picked from commit 4279e715e0b4af819a0434daa5bf9e0c18789bec) --- doc/build/orm/cascades.rst | 8 ++++++ doc/build/orm/queryguide/dml.rst | 46 ++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/doc/build/orm/cascades.rst b/doc/build/orm/cascades.rst index 02d68669eee..efb997560a8 100644 --- a/doc/build/orm/cascades.rst +++ b/doc/build/orm/cascades.rst @@ -301,6 +301,14 @@ The feature by default works completely independently of database-configured In order to integrate more efficiently with this configuration, additional directives described at :ref:`passive_deletes` should be used. +.. warning:: Note that the ORM's "delete" and "delete-cascade" behavior applies + **only** to the use of the :meth:`_orm.Session.delete` method to mark + individual ORM instances for deletion within the :term:`unit-of-work` process. + It does **not** apply to "bulk" deletes, which would be emitted using + the :func:`_sql.delete` construct as illustrated at + :ref:`orm_queryguide_update_delete_where`. See + :ref:`orm_queryguide_update_delete_caveats` for additional background. + .. seealso:: :ref:`passive_deletes` diff --git a/doc/build/orm/queryguide/dml.rst b/doc/build/orm/queryguide/dml.rst index 967397f1ae9..67614ac92c5 100644 --- a/doc/build/orm/queryguide/dml.rst +++ b/doc/build/orm/queryguide/dml.rst @@ -993,6 +993,52 @@ For a DELETE, an example of deleting rows based on criteria:: >>> session.connection() BEGIN (implicit)... +.. warning:: Please read the following section :ref:`orm_queryguide_update_delete_caveats` + for important notes regarding how the functionality of ORM-Enabled UPDATE and DELETE + diverges from that of ORM :term:`unit-of-work` features, such + as using the :meth:`_orm.Session.delete` method to delete individual objects. + + +.. _orm_queryguide_update_delete_caveats: + +Important Notes and Caveats for ORM-Enabled Update and Delete +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The ORM-enabled UPDATE and DELETE features bypass ORM :term:`unit-of-work` +automation in favor being able to emit a single UPDATE or DELETE statement +that matches multiple rows at once without complexity. + +* The operations do not offer in-Python cascading of relationships - it is + assumed that ON UPDATE CASCADE and/or ON DELETE CASCADE is configured for any + foreign key references which require it, otherwise the database may emit an + integrity violation if foreign key references are being enforced. See the + notes at :ref:`passive_deletes` for some examples. + +* After the UPDATE or DELETE, dependent objects in the :class:`.Session` which + were impacted by an ON UPDATE CASCADE or ON DELETE CASCADE on related tables, + particularly objects that refer to rows that have now been deleted, may still + reference those objects. This issue is resolved once the :class:`.Session` + is expired, which normally occurs upon :meth:`.Session.commit` or can be + forced by using :meth:`.Session.expire_all`. + +* ORM-enabled UPDATEs and DELETEs do not handle joined table inheritance + automatically. See the section :ref:`orm_queryguide_update_delete_joined_inh` + for notes on how to work with joined-inheritance mappings. + +* The WHERE criteria needed in order to limit the polymorphic identity to + specific subclasses for single-table-inheritance mappings **is included + automatically** . This only applies to a subclass mapper that has no table of + its own. + +* The :func:`_orm.with_loader_criteria` option **is supported** by ORM + update and delete operations; criteria here will be added to that of the UPDATE + or DELETE statement being emitted, as well as taken into account during the + "synchronize" process. + +* In order to intercept ORM-enabled UPDATE and DELETE operations with event + handlers, use the :meth:`_orm.SessionEvents.do_orm_execute` event. + + .. _orm_queryguide_update_delete_sync: From 67e478e213c05c35c7ea8b31ab88a0d9490d44f1 Mon Sep 17 00:00:00 2001 From: Jan Vollmer Date: Wed, 22 Nov 2023 13:16:27 -0500 Subject: [PATCH 014/544] add local column to dependency rule error message Improved the error message produced when the unit of work process sets the value of a primary key column to NULL due to a related object with a dependency rule on that column being deleted, to include not just the destination object and column name but also the source column from which the NULL value is originating. Pull request courtesy Jan Vollmer. Fixes: #10668 Closes: #10669 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10669 Pull-request-sha: d7f9952b81394d585e55dad6d6f355aaa54f599e Change-Id: I210317f8777ff07d9eec674b79f2140523af38d0 (cherry picked from commit 87929a9c55163ebcd7edd031d222c7f115922842) --- doc/build/changelog/unreleased_20/10668.rst | 9 +++++++++ lib/sqlalchemy/orm/sync.py | 5 +++-- test/orm/test_relationships.py | 7 ++++--- test/orm/test_sync.py | 2 +- 4 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10668.rst diff --git a/doc/build/changelog/unreleased_20/10668.rst b/doc/build/changelog/unreleased_20/10668.rst new file mode 100644 index 00000000000..560aac85e9a --- /dev/null +++ b/doc/build/changelog/unreleased_20/10668.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 10668 + + Improved the error message produced when the unit of work process sets the + value of a primary key column to NULL due to a related object with a + dependency rule on that column being deleted, to include not just the + destination object and column name but also the source column from which + the NULL value is originating. Pull request courtesy Jan Vollmer. diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py index 036c26dd6be..dbe8fb7a251 100644 --- a/lib/sqlalchemy/orm/sync.py +++ b/lib/sqlalchemy/orm/sync.py @@ -86,8 +86,9 @@ def clear(dest, dest_mapper, synchronize_pairs): not in orm_util._none_set ): raise AssertionError( - "Dependency rule tried to blank-out primary key " - "column '%s' on instance '%s'" % (r, orm_util.state_str(dest)) + f"Dependency rule on column '{l}' " + "tried to blank-out primary key " + f"column '{r}' on instance '{orm_util.state_str(dest)}'" ) try: dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None) diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index d6b886be151..969196ad8ca 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -1334,7 +1334,8 @@ def _test_no_overwrite(self, sess, expect_failure): # this happens assert_raises_message( AssertionError, - "Dependency rule tried to blank-out primary key column " + "Dependency rule on column 'employee_t.company_id' " + "tried to blank-out primary key column " "'employee_t.company_id'", sess.flush, ) @@ -1669,7 +1670,7 @@ def test_no_delete_PK_AtoB(self): assert_raises_message( AssertionError, - "Dependency rule tried to blank-out " + "Dependency rule on column 'tableA.id' tried to blank-out " "primary key column 'tableB.id' on instance ", sess.flush, ) @@ -1696,7 +1697,7 @@ def test_no_delete_PK_BtoA(self): b1.a = None assert_raises_message( AssertionError, - "Dependency rule tried to blank-out " + "Dependency rule on column 'tableA.id' tried to blank-out " "primary key column 'tableB.id' on instance ", sess.flush, ) diff --git a/test/orm/test_sync.py b/test/orm/test_sync.py index c8f511f447a..10d73cb8d64 100644 --- a/test/orm/test_sync.py +++ b/test/orm/test_sync.py @@ -145,7 +145,7 @@ def test_clear_pk(self): eq_(b1.obj().__dict__["id"], 8) assert_raises_message( AssertionError, - "Dependency rule tried to blank-out primary key " + "Dependency rule on column 't1.id' tried to blank-out primary key " "column 't2.id' on instance ' Date: Wed, 22 Nov 2023 22:04:03 +0100 Subject: [PATCH 015/544] Fix pre-ping regression in old PyMySQL versions. Fixed regression introduced by the fix in ticket :ticket:`10492` when using pool pre-ping with PyMySQL version older than 1.0. Fixes: #10650 Change-Id: Ic0744c8b6f91cc39868e31c3bfddb8df20c7dfbb --- doc/build/changelog/unreleased_14/10650.rst | 7 +++++++ lib/sqlalchemy/dialects/mysql/pymysql.py | 4 +++- 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_14/10650.rst diff --git a/doc/build/changelog/unreleased_14/10650.rst b/doc/build/changelog/unreleased_14/10650.rst new file mode 100644 index 00000000000..dce6b4c75a5 --- /dev/null +++ b/doc/build/changelog/unreleased_14/10650.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, mysql + :tickets: 10650 + :versions: 2.0.24 + + Fixed regression introduced by the fix in ticket :ticket:`10492` when using + pool pre-ping with PyMySQL version older than 1.0. diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index 6567202a45e..ddb99542f8d 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -86,7 +86,9 @@ def _send_false_to_ping(self): """ # noqa: E501 try: - Connection = __import__("pymysql.connections").Connection + Connection = __import__( + "pymysql.connections" + ).connections.Connection except (ImportError, AttributeError): return True else: From a61338679c5cf95bef476b826e867b54fbf1f322 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Voron?= Date: Thu, 23 Nov 2023 10:53:07 -0500 Subject: [PATCH 016/544] =?UTF-8?q?docs:=20fix=20type=20annotation=20in=20?= =?UTF-8?q?Self-Referential=20Many-to-Many=20Relationsh=E2=80=A6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit A type annotation was wrong in the [Self-Referential Many-to-Many Relationship](https://docs.sqlalchemy.org/en/20/orm/join_conditions.html#self-referential-many-to-many-relationship) code example. ### Description The type annotation was `right_nodes: Mapped[List["None"]]`. I changed it to `Node` since we refer to the ORM class we're looking at. ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #10686 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10686 Pull-request-sha: 7671898116f1b8850e5d8f3ff0f940450a7c1bf4 Change-Id: Iab1535c3d00747eb8c9e9a17aea50606febedbf9 (cherry picked from commit 7157d16e3ba521c119958a727af51790ebdf3f34) --- doc/build/orm/join_conditions.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/orm/join_conditions.rst b/doc/build/orm/join_conditions.rst index ef6d74e6676..2e6d2d936b3 100644 --- a/doc/build/orm/join_conditions.rst +++ b/doc/build/orm/join_conditions.rst @@ -564,14 +564,14 @@ is when establishing a many-to-many relationship from a class to itself, as show __tablename__ = "node" id: Mapped[int] = mapped_column(primary_key=True) label: Mapped[str] - right_nodes: Mapped[List["None"]] = relationship( + right_nodes: Mapped[List["Node"]] = relationship( "Node", secondary=node_to_node, primaryjoin=id == node_to_node.c.left_node_id, secondaryjoin=id == node_to_node.c.right_node_id, back_populates="left_nodes", ) - left_nodes: Mapped[List["None"]] = relationship( + left_nodes: Mapped[List["Node"]] = relationship( "Node", secondary=node_to_node, primaryjoin=id == node_to_node.c.right_node_id, From 7b09fda88609afb68426bbcf6f6d8a8f238a1b78 Mon Sep 17 00:00:00 2001 From: Gerald Venzl Date: Thu, 23 Nov 2023 14:42:52 -0500 Subject: [PATCH 017/544] Upgrade to Oracle Database 23c Free ### Description This PR updates the unittest ReadMe and `setup.cfg` to use Oracle Database Free, the successor to Oracle Database XE. ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #10658 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10658 Pull-request-sha: 1cfb2d3a7f688a4b887182d2485b7d60f12eb013 Change-Id: Id37f6607572c1fdb12d507cd9336800d4d8b0cf3 (cherry picked from commit 44771ab0c32730d1ecefac21af13f113b1b19726) --- README.unittests.rst | 12 ++++++------ setup.cfg | 8 ++++---- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.unittests.rst b/README.unittests.rst index 9cf309d2d7e..57c6d42dad6 100644 --- a/README.unittests.rst +++ b/README.unittests.rst @@ -307,11 +307,11 @@ be used with pytest by using ``--db docker_mssql``. **Oracle configuration**:: # create the container with the proper configuration for sqlalchemy - docker run --rm --name oracle -p 127.0.0.1:1521:1521 -d -e ORACLE_PASSWORD=tiger -e ORACLE_DATABASE=test -e APP_USER=scott -e APP_USER_PASSWORD=tiger gvenzl/oracle-xe:21-slim + docker run --rm --name oracle -p 127.0.0.1:1521:1521 -d -e ORACLE_PASSWORD=tiger -e ORACLE_DATABASE=test -e APP_USER=scott -e APP_USER_PASSWORD=tiger gvenzl/oracle-free:23-slim # enter the database container and run the command docker exec -ti oracle bash - >> sqlplus system/tiger@//localhost/XEPDB1 <> sqlplus system/tiger@//localhost/FREEPDB1 < Date: Fri, 24 Nov 2023 08:55:44 -0500 Subject: [PATCH 018/544] backport importlib mypy fix this is the fix that was part of f5f08c28fb, this is otherwise failing for some environments with mypy 6 Change-Id: Ic8b410c4fa1858c98e9299e062f1ff040c2d3576 --- lib/sqlalchemy/util/compat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index 5bbb58f4af5..a4464324cd5 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -173,7 +173,7 @@ async def anext_(async_iterator, default=_NOT_PROVIDED): def importlib_metadata_get(group): ep = importlib_metadata.entry_points() - if hasattr(ep, "select"): + if typing.TYPE_CHECKING or hasattr(ep, "select"): return ep.select(group=group) else: return ep.get(group, ()) From f6fafbe623d9031f6899d9b303bb6e99290693b2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 24 Nov 2023 16:27:03 -0500 Subject: [PATCH 019/544] fully update to flake8 6.1.0 I updated flake8 in pre-commit a few days ago but forgot to do it in tox. this flake seems to be picking inside of f-strings, which black does not fix, so fix these manually. Change-Id: I9a641a99e280fbba9d893a6f1f051b5039d5d4eb (cherry picked from commit 7bc33ca55144d2716b175c733c0d6865572787aa) --- lib/sqlalchemy/dialects/mssql/base.py | 4 ++-- lib/sqlalchemy/dialects/postgresql/base.py | 8 ++++---- lib/sqlalchemy/dialects/postgresql/pg8000.py | 4 ++-- lib/sqlalchemy/engine/base.py | 19 ++++++++++--------- lib/sqlalchemy/engine/reflection.py | 2 +- lib/sqlalchemy/orm/loading.py | 8 +++++--- lib/sqlalchemy/orm/persistence.py | 3 ++- lib/sqlalchemy/orm/strategy_options.py | 7 ++++--- test/dialect/mssql/test_reflection.py | 2 +- test/dialect/oracle/test_reflection.py | 2 +- test/engine/test_pool.py | 6 ++++-- test/orm/test_query.py | 6 ++++-- test/perf/many_table_reflection.py | 2 +- test/sql/test_insert_exec.py | 4 ++-- tox.ini | 2 +- 15 files changed, 44 insertions(+), 35 deletions(-) diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 687de04e4d3..80734d60619 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1551,8 +1551,8 @@ def process(value): def process(value): return f"""'{ - value.replace("-", "").replace("'", "''") - }'""" + value.replace("-", "").replace("'", "''") + }'""" return process diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index b9fd8c8baba..0aec40ea97f 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1706,10 +1706,10 @@ def render_bind_cast(self, type_, dbapi_type, sqltext): # see #9511 dbapi_type = sqltypes.STRINGTYPE return f"""{sqltext}::{ - self.dialect.type_compiler_instance.process( - dbapi_type, identifier_preparer=self.preparer - ) - }""" + self.dialect.type_compiler_instance.process( + dbapi_type, identifier_preparer=self.preparer + ) + }""" def visit_array(self, element, **kw): return "ARRAY[%s]" % self.visit_clauselist(element, **kw) diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index 71ee4ebd63e..c9829ac6813 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -584,8 +584,8 @@ def _set_client_encoding(self, dbapi_connection, client_encoding): cursor = dbapi_connection.cursor() cursor.execute( f"""SET CLIENT_ENCODING TO '{ - client_encoding.replace("'", "''") - }'""" + client_encoding.replace("'", "''") + }'""" ) cursor.execute("COMMIT") cursor.close() diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 0000e28103d..bcf6ca2280f 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -2067,15 +2067,16 @@ def _exec_insertmany_context( if self._echo: self._log_info(sql_util._long_statement(sub_stmt)) - imv_stats = f""" { - imv_batch.batchnum}/{imv_batch.total_batches} ({ - 'ordered' - if imv_batch.rows_sorted else 'unordered' - }{ - '; batch not supported' - if imv_batch.is_downgraded - else '' - })""" + imv_stats = f""" {imv_batch.batchnum}/{ + imv_batch.total_batches + } ({ + 'ordered' + if imv_batch.rows_sorted else 'unordered' + }{ + '; batch not supported' + if imv_batch.is_downgraded + else '' + })""" if imv_batch.batchnum == 1: stats += imv_stats diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 6d2a8a29fd8..66e94429cb1 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -1843,7 +1843,7 @@ def _reflect_indexes( if not expressions: util.warn( f"Skipping {flavor} {name!r} because key " - f"{index+1} reflected as None but no " + f"{index + 1} reflected as None but no " "'expressions' were returned" ) break diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index cae6f0be21c..624e8c199ab 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -149,9 +149,11 @@ def go(obj): raise sa_exc.InvalidRequestError( "Can't apply uniqueness to row tuple containing value of " - f"""type {datatype!r}; {'the values returned appear to be' - if uncertain else 'this datatype produces'} """ - "non-hashable values" + f"""type {datatype!r}; { + 'the values returned appear to be' + if uncertain + else 'this datatype produces' + } non-hashable values""" ) return go diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 6729b479f90..3f537fb7616 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -559,7 +559,8 @@ def _collect_update_commands( f"No primary key value supplied for column(s) " f"""{ ', '.join( - str(c) for c in pks if pk_params[c._label] is None) + str(c) for c in pks if pk_params[c._label] is None + ) }; """ "per-row ORM Bulk UPDATE by Primary Key requires that " "records contain primary key values", diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 6c81e8fe737..e090d5b258c 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1611,9 +1611,10 @@ def _raise_for_no_match(self, parent_loader, mapper_entities): f"Mapped class {path[0]} does not apply to any of the " f"root entities in this query, e.g. " f"""{ - ", ".join(str(x.entity_zero) - for x in mapper_entities if x.entity_zero - )}. Please """ + ", ".join( + str(x.entity_zero) + for x in mapper_entities if x.entity_zero + )}. Please """ "specify the full path " "from one of the root entities to the target " "attribute. " diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py index b6a1d411a25..ae2b7662ef5 100644 --- a/test/dialect/mssql/test_reflection.py +++ b/test/dialect/mssql/test_reflection.py @@ -389,7 +389,7 @@ def test_global_temp_different_collation( ): """test #8035""" - tname = f"##foo{random.randint(1,1000000)}" + tname = f"##foo{random.randint(1, 1000000)}" with temp_db_alt_collation_fixture.connect() as conn: conn.exec_driver_sql(f"CREATE TABLE {tname} (id int primary key)") diff --git a/test/dialect/oracle/test_reflection.py b/test/dialect/oracle/test_reflection.py index 2a82c25d9fd..00d83637201 100644 --- a/test/dialect/oracle/test_reflection.py +++ b/test/dialect/oracle/test_reflection.py @@ -1227,7 +1227,7 @@ def _run_test(self, metadata, connection, specs, attributes): for attr in attributes: r_attr = getattr(reflected_type, attr) e_attr = getattr(expected_spec, attr) - col = f"c{i+1}" + col = f"c{i + 1}" eq_( r_attr, e_attr, diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py index 44c494bad4a..49736df9b65 100644 --- a/test/engine/test_pool.py +++ b/test/engine/test_pool.py @@ -460,8 +460,10 @@ def _checkin_event_fixture(self, _is_asyncio=False, _has_terminate=False): @event.listens_for(p, "reset") def reset(conn, rec, state): canary.append( - f"""reset_{'rollback_ok' - if state.asyncio_safe else 'no_rollback'}""" + f"""reset_{ + 'rollback_ok' + if state.asyncio_safe else 'no_rollback' + }""" ) @event.listens_for(p, "checkin") diff --git a/test/orm/test_query.py b/test/orm/test_query.py index 3057087e43b..a06406c1154 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -697,8 +697,10 @@ def process_result_value(self, value, dialect): sa_exc.InvalidRequestError, r"Can't apply uniqueness to row tuple " r"containing value of type MyType\(\); " - rf"""{'the values returned appear to be' - if uncertain else 'this datatype produces'} """ + rf"""{ + 'the values returned appear to be' + if uncertain else 'this datatype produces' + } """ r"non-hashable values", ): result = s.execute(q).unique().all() diff --git a/test/perf/many_table_reflection.py b/test/perf/many_table_reflection.py index b9b941b688f..d65c272430a 100644 --- a/test/perf/many_table_reflection.py +++ b/test/perf/many_table_reflection.py @@ -38,7 +38,7 @@ def generate_table(meta: sa.MetaData, min_cols, max_cols, dialect_name): args.append(sa.ForeignKey(f"table_{target}.table_{target}_col_1")) cols.append( sa.Column( - f"table_{table_num}_col_{i+1}", + f"table_{table_num}_col_{i + 1}", *args, primary_key=i == 0, comment=f"primary key of table_{table_num}" diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py index 29484696da8..e9eda0e5bd2 100644 --- a/test/sql/test_insert_exec.py +++ b/test/sql/test_insert_exec.py @@ -2060,7 +2060,7 @@ def test_sentinel_on_non_autoinc_primary_key( collection_cls(r), collection_cls( [ - (uuids[i], f"d{i+1}", "some_server_default") + (uuids[i], f"d{i + 1}", "some_server_default") for i in range(5) ] ), @@ -2072,7 +2072,7 @@ def test_sentinel_on_non_autoinc_primary_key( collection_cls( [ ( - f"d{i+1}", + f"d{i + 1}", "some_server_default", ) for i in range(5) diff --git a/tox.ini b/tox.ini index 5b557338883..e90baf54229 100644 --- a/tox.ini +++ b/tox.ini @@ -210,7 +210,7 @@ setenv= [testenv:lint] basepython = python3 deps= - flake8==6.0.0 + flake8==6.1.0 flake8-import-order flake8-builtins flake8-future-annotations>=0.0.5 From 972220878c0177531ad6f584fde2717f8e0a4315 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 24 Nov 2023 15:20:31 -0500 Subject: [PATCH 020/544] fully type functions.py Completed pep-484 typing for the ``sqlalchemy.sql.functions`` module. :func:`_sql.select` constructs made against ``func`` elements should now have filled-in return types. References: #6810 Change-Id: I5121583c9c5b6f7151f811348c7a281c446cf0b8 (cherry picked from commit 045732a738a10891b85be8e286eab3e5b756a445) --- .../unreleased_20/sql_func_typing.rst | 7 + lib/sqlalchemy/sql/_elements_constructors.py | 16 +- lib/sqlalchemy/sql/_typing.py | 7 + lib/sqlalchemy/sql/elements.py | 19 +- lib/sqlalchemy/sql/functions.py | 320 ++++++++++++++---- lib/sqlalchemy/sql/util.py | 2 +- test/typing/plain_files/sql/functions.py | 93 +++-- tools/generate_sql_functions.py | 121 ++++++- 8 files changed, 451 insertions(+), 134 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/sql_func_typing.rst diff --git a/doc/build/changelog/unreleased_20/sql_func_typing.rst b/doc/build/changelog/unreleased_20/sql_func_typing.rst new file mode 100644 index 00000000000..f4ea6f40c33 --- /dev/null +++ b/doc/build/changelog/unreleased_20/sql_func_typing.rst @@ -0,0 +1,7 @@ + .. change:: + :tags: bug, typing + :tickets: 6810 + + Completed pep-484 typing for the ``sqlalchemy.sql.functions`` module. + :func:`_sql.select` constructs made against ``func`` elements should now + have filled-in return types. diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index 27197375d2d..23e275ed5d7 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -10,7 +10,6 @@ import typing from typing import Any from typing import Callable -from typing import Iterable from typing import Mapping from typing import Optional from typing import overload @@ -49,6 +48,7 @@ from ..util.typing import Literal if typing.TYPE_CHECKING: + from ._typing import _ByArgument from ._typing import _ColumnExpressionArgument from ._typing import _ColumnExpressionOrLiteralArgument from ._typing import _ColumnExpressionOrStrLabelArgument @@ -1483,18 +1483,8 @@ def or_(*clauses): # noqa: F811 def over( element: FunctionElement[_T], - partition_by: Optional[ - Union[ - Iterable[_ColumnExpressionArgument[Any]], - _ColumnExpressionArgument[Any], - ] - ] = None, - order_by: Optional[ - Union[ - Iterable[_ColumnExpressionArgument[Any]], - _ColumnExpressionArgument[Any], - ] - ] = None, + partition_by: Optional[_ByArgument] = None, + order_by: Optional[_ByArgument] = None, range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, ) -> Over[_T]: diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index c9e183058e6..0793fbb3db1 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -11,6 +11,7 @@ from typing import Any from typing import Callable from typing import Dict +from typing import Iterable from typing import Mapping from typing import NoReturn from typing import Optional @@ -198,6 +199,12 @@ def __call__(self, obj: _CE) -> _CE: _ColumnExpressionOrStrLabelArgument = Union[str, _ColumnExpressionArgument[_T]] +_ByArgument = Union[ + Iterable[_ColumnExpressionOrStrLabelArgument[Any]], + _ColumnExpressionOrStrLabelArgument[Any], +] +"""Used for keyword-based ``order_by`` and ``partition_by`` parameters.""" + _InfoType = Dict[Any, Any] """the .info dictionary accepted and used throughout Core /ORM""" diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 48dfd25829a..cafd291eee2 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -80,6 +80,7 @@ from ..util.typing import Self if typing.TYPE_CHECKING: + from ._typing import _ByArgument from ._typing import _ColumnExpressionArgument from ._typing import _ColumnExpressionOrStrLabelArgument from ._typing import _InfoType @@ -4189,18 +4190,8 @@ class Over(ColumnElement[_T]): def __init__( self, element: ColumnElement[_T], - partition_by: Optional[ - Union[ - Iterable[_ColumnExpressionArgument[Any]], - _ColumnExpressionArgument[Any], - ] - ] = None, - order_by: Optional[ - Union[ - Iterable[_ColumnExpressionArgument[Any]], - _ColumnExpressionArgument[Any], - ] - ] = None, + partition_by: Optional[_ByArgument] = None, + order_by: Optional[_ByArgument] = None, range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, ): @@ -5202,12 +5193,12 @@ def _find_columns(clause: ClauseElement) -> Set[ColumnClause[Any]]: return cols -def _type_from_args(args): +def _type_from_args(args: Sequence[ColumnElement[_T]]) -> TypeEngine[_T]: for a in args: if not a.type._isnull: return a.type else: - return type_api.NULLTYPE + return type_api.NULLTYPE # type: ignore def _corresponding_column_or_error(fromclause, column, require_embedded=False): diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index fc23e9d2156..c5eb6b28115 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -4,7 +4,7 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: allow-untyped-defs, allow-untyped-calls + """SQL function API, factories, and built-in functions. @@ -17,13 +17,16 @@ from typing import Any from typing import cast from typing import Dict +from typing import List from typing import Mapping from typing import Optional from typing import overload +from typing import Sequence from typing import Tuple from typing import Type from typing import TYPE_CHECKING from typing import TypeVar +from typing import Union from . import annotation from . import coercions @@ -59,23 +62,35 @@ from .type_api import TypeEngine from .visitors import InternalTraversal from .. import util +from ..util.typing import Self if TYPE_CHECKING: + from ._typing import _ByArgument + from ._typing import _ColumnExpressionArgument + from ._typing import _ColumnExpressionOrLiteralArgument from ._typing import _TypeEngineArgument + from .base import _EntityNamespace + from .elements import ClauseElement + from .elements import KeyedColumnElement + from .elements import TableValuedColumn + from .operators import OperatorType from ..engine.base import Connection from ..engine.cursor import CursorResult from ..engine.interfaces import _CoreMultiExecuteParams from ..engine.interfaces import CoreExecuteOptionsParameter _T = TypeVar("_T", bound=Any) +_S = TypeVar("_S", bound=Any) _registry: util.defaultdict[ str, Dict[str, Type[Function[Any]]] ] = util.defaultdict(dict) -def register_function(identifier, fn, package="_default"): +def register_function( + identifier: str, fn: Type[Function[Any]], package: str = "_default" +) -> None: """Associate a callable with a particular func. name. This is normally called by GenericFunction, but is also @@ -138,7 +153,7 @@ class FunctionElement(Executable, ColumnElement[_T], FromClause, Generative): clause_expr: Grouping[Any] - def __init__(self, *clauses: Any): + def __init__(self, *clauses: _ColumnExpressionOrLiteralArgument[Any]): r"""Construct a :class:`.FunctionElement`. :param \*clauses: list of column expressions that form the arguments @@ -154,7 +169,7 @@ def __init__(self, *clauses: Any): :class:`.Function` """ - args = [ + args: Sequence[_ColumnExpressionArgument[Any]] = [ coercions.expect( roles.ExpressionElementRole, c, @@ -171,7 +186,7 @@ def __init__(self, *clauses: Any): _non_anon_label = None @property - def _proxy_key(self): + def _proxy_key(self) -> Any: return super()._proxy_key or getattr(self, "name", None) def _execute_on_connection( @@ -184,7 +199,9 @@ def _execute_on_connection( self, distilled_params, execution_options ) - def scalar_table_valued(self, name, type_=None): + def scalar_table_valued( + self, name: str, type_: Optional[_TypeEngineArgument[_T]] = None + ) -> ScalarFunctionColumn[_T]: """Return a column expression that's against this :class:`_functions.FunctionElement` as a scalar table-valued expression. @@ -217,7 +234,9 @@ def scalar_table_valued(self, name, type_=None): return ScalarFunctionColumn(self, name, type_) - def table_valued(self, *expr, **kw): + def table_valued( + self, *expr: _ColumnExpressionArgument[Any], **kw: Any + ) -> TableValuedAlias: r"""Return a :class:`_sql.TableValuedAlias` representation of this :class:`_functions.FunctionElement` with table-valued expressions added. @@ -303,7 +322,9 @@ def table_valued(self, *expr, **kw): return new_func.alias(name=name, joins_implicitly=joins_implicitly) - def column_valued(self, name=None, joins_implicitly=False): + def column_valued( + self, name: Optional[str] = None, joins_implicitly: bool = False + ) -> TableValuedColumn[_T]: """Return this :class:`_functions.FunctionElement` as a column expression that selects from itself as a FROM clause. @@ -345,7 +366,7 @@ def column_valued(self, name=None, joins_implicitly=False): return self.alias(name=name, joins_implicitly=joins_implicitly).column @util.ro_non_memoized_property - def columns(self): + def columns(self) -> ColumnCollection[str, KeyedColumnElement[Any]]: # type: ignore[override] # noqa: E501 r"""The set of columns exported by this :class:`.FunctionElement`. This is a placeholder collection that allows the function to be @@ -371,7 +392,7 @@ def columns(self): return self.c @util.ro_memoized_property - def c(self): + def c(self) -> ColumnCollection[str, KeyedColumnElement[Any]]: # type: ignore[override] # noqa: E501 """synonym for :attr:`.FunctionElement.columns`.""" return ColumnCollection( @@ -379,16 +400,21 @@ def c(self): ) @property - def _all_selected_columns(self): + def _all_selected_columns(self) -> Sequence[KeyedColumnElement[Any]]: if is_table_value_type(self.type): - cols = self.type._elements + # TODO: this might not be fully accurate + cols = cast( + "Sequence[KeyedColumnElement[Any]]", self.type._elements + ) else: cols = [self.label(None)] return cols @property - def exported_columns(self): + def exported_columns( # type: ignore[override] + self, + ) -> ColumnCollection[str, KeyedColumnElement[Any]]: return self.columns @HasMemoized.memoized_attribute @@ -399,7 +425,14 @@ def clauses(self) -> ClauseList: """ return cast(ClauseList, self.clause_expr.element) - def over(self, partition_by=None, order_by=None, rows=None, range_=None): + def over( + self, + *, + partition_by: Optional[_ByArgument] = None, + order_by: Optional[_ByArgument] = None, + rows: Optional[Tuple[Optional[int], Optional[int]]] = None, + range_: Optional[Tuple[Optional[int], Optional[int]]] = None, + ) -> Over[_T]: """Produce an OVER clause against this function. Used against aggregate or so-called "window" functions, @@ -431,7 +464,9 @@ def over(self, partition_by=None, order_by=None, rows=None, range_=None): range_=range_, ) - def within_group(self, *order_by): + def within_group( + self, *order_by: _ColumnExpressionArgument[Any] + ) -> WithinGroup[_T]: """Produce a WITHIN GROUP (ORDER BY expr) clause against this function. Used against so-called "ordered set aggregate" and "hypothetical @@ -449,7 +484,9 @@ def within_group(self, *order_by): """ return WithinGroup(self, *order_by) - def filter(self, *criterion): + def filter( + self, *criterion: _ColumnExpressionArgument[bool] + ) -> Union[Self, FunctionFilter[_T]]: """Produce a FILTER clause against this function. Used against aggregate and window functions, @@ -479,7 +516,9 @@ def filter(self, *criterion): return self return FunctionFilter(self, *criterion) - def as_comparison(self, left_index, right_index): + def as_comparison( + self, left_index: int, right_index: int + ) -> FunctionAsBinary: """Interpret this expression as a boolean comparison between two values. @@ -554,10 +593,12 @@ class Venue(Base): return FunctionAsBinary(self, left_index, right_index) @property - def _from_objects(self): + def _from_objects(self) -> Any: return self.clauses._from_objects - def within_group_type(self, within_group): + def within_group_type( + self, within_group: WithinGroup[_S] + ) -> Optional[TypeEngine[_S]]: """For types that define their return type as based on the criteria within a WITHIN GROUP (ORDER BY) expression, called by the :class:`.WithinGroup` construct. @@ -569,7 +610,9 @@ def within_group_type(self, within_group): return None - def alias(self, name=None, joins_implicitly=False): + def alias( + self, name: Optional[str] = None, joins_implicitly: bool = False + ) -> TableValuedAlias: r"""Produce a :class:`_expression.Alias` construct against this :class:`.FunctionElement`. @@ -647,7 +690,7 @@ def alias(self, name=None, joins_implicitly=False): joins_implicitly=joins_implicitly, ) - def select(self) -> Select[Any]: + def select(self) -> Select[Tuple[_T]]: """Produce a :func:`_expression.select` construct against this :class:`.FunctionElement`. @@ -661,7 +704,14 @@ def select(self) -> Select[Any]: s = s.execution_options(**self._execution_options) return s - def _bind_param(self, operator, obj, type_=None, **kw): + def _bind_param( + self, + operator: OperatorType, + obj: Any, + type_: Optional[TypeEngine[_T]] = None, + expanding: bool = False, + **kw: Any, + ) -> BindParameter[_T]: return BindParameter( None, obj, @@ -669,10 +719,11 @@ def _bind_param(self, operator, obj, type_=None, **kw): _compared_to_type=self.type, unique=True, type_=type_, + expanding=expanding, **kw, ) - def self_group(self, against=None): + def self_group(self, against: Optional[OperatorType] = None) -> ClauseElement: # type: ignore[override] # noqa E501 # for the moment, we are parenthesizing all array-returning # expressions against getitem. This may need to be made # more portable if in the future we support other DBs @@ -685,7 +736,7 @@ def self_group(self, against=None): return super().self_group(against=against) @property - def entity_namespace(self): + def entity_namespace(self) -> _EntityNamespace: """overrides FromClause.entity_namespace as functions are generally column expressions and not FromClauses. @@ -707,7 +758,7 @@ class FunctionAsBinary(BinaryExpression[Any]): left_index: int right_index: int - def _gen_cache_key(self, anon_map, bindparams): + def _gen_cache_key(self, anon_map: Any, bindparams: Any) -> Any: return ColumnElement._gen_cache_key(self, anon_map, bindparams) def __init__( @@ -860,8 +911,8 @@ class _FunctionGenerator: """ # noqa - def __init__(self, **opts): - self.__names = [] + def __init__(self, **opts: Any): + self.__names: List[str] = [] self.opts = opts def __getattr__(self, name: str) -> _FunctionGenerator: @@ -936,8 +987,33 @@ def cast(self) -> Type[Cast[Any]]: def char_length(self) -> Type[char_length]: ... - @property - def coalesce(self) -> Type[coalesce[Any]]: + # appease mypy which seems to not want to accept _T from + # _ColumnExpressionArgument, as it includes non-generic types + + @overload + def coalesce( + self, + col: ColumnElement[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> coalesce[_T]: + ... + + @overload + def coalesce( + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> coalesce[_T]: + ... + + def coalesce( + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> coalesce[_T]: ... @property @@ -992,12 +1068,62 @@ def localtime(self) -> Type[localtime]: def localtimestamp(self) -> Type[localtimestamp]: ... - @property - def max(self) -> Type[max[Any]]: # noqa: A001 + # appease mypy which seems to not want to accept _T from + # _ColumnExpressionArgument, as it includes non-generic types + + @overload + def max( # noqa: A001 + self, + col: ColumnElement[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> max[_T]: ... - @property - def min(self) -> Type[min[Any]]: # noqa: A001 + @overload + def max( # noqa: A001 + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> max[_T]: + ... + + def max( # noqa: A001 + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> max[_T]: + ... + + # appease mypy which seems to not want to accept _T from + # _ColumnExpressionArgument, as it includes non-generic types + + @overload + def min( # noqa: A001 + self, + col: ColumnElement[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> min[_T]: + ... + + @overload + def min( # noqa: A001 + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> min[_T]: + ... + + def min( # noqa: A001 + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> min[_T]: ... @property @@ -1036,10 +1162,6 @@ def random(self) -> Type[random]: def rank(self) -> Type[rank]: ... - @property - def returntypefromargs(self) -> Type[ReturnTypeFromArgs[Any]]: - ... - @property def rollup(self) -> Type[rollup[Any]]: ... @@ -1048,8 +1170,33 @@ def rollup(self) -> Type[rollup[Any]]: def session_user(self) -> Type[session_user]: ... - @property - def sum(self) -> Type[sum[Any]]: # noqa: A001 + # appease mypy which seems to not want to accept _T from + # _ColumnExpressionArgument, as it includes non-generic types + + @overload + def sum( # noqa: A001 + self, + col: ColumnElement[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> sum[_T]: + ... + + @overload + def sum( # noqa: A001 + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> sum[_T]: + ... + + def sum( # noqa: A001 + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ) -> sum[_T]: ... @property @@ -1131,10 +1278,30 @@ class Function(FunctionElement[_T]): """ + @overload + def __init__( + self, + name: str, + *clauses: _ColumnExpressionOrLiteralArgument[_T], + type_: None = ..., + packagenames: Optional[Tuple[str, ...]] = ..., + ): + ... + + @overload + def __init__( + self, + name: str, + *clauses: _ColumnExpressionOrLiteralArgument[Any], + type_: _TypeEngineArgument[_T] = ..., + packagenames: Optional[Tuple[str, ...]] = ..., + ): + ... + def __init__( self, name: str, - *clauses: Any, + *clauses: _ColumnExpressionOrLiteralArgument[Any], type_: Optional[_TypeEngineArgument[_T]] = None, packagenames: Optional[Tuple[str, ...]] = None, ): @@ -1153,7 +1320,14 @@ def __init__( FunctionElement.__init__(self, *clauses) - def _bind_param(self, operator, obj, type_=None, **kw): + def _bind_param( + self, + operator: OperatorType, + obj: Any, + type_: Optional[TypeEngine[_T]] = None, + expanding: bool = False, + **kw: Any, + ) -> BindParameter[_T]: return BindParameter( self.name, obj, @@ -1161,6 +1335,7 @@ def _bind_param(self, operator, obj, type_=None, **kw): _compared_to_type=self.type, type_=type_, unique=True, + expanding=expanding, **kw, ) @@ -1306,7 +1481,9 @@ def _register_generic_function( # Set _register to True to register child classes by default cls._register = True - def __init__(self, *args, **kwargs): + def __init__( + self, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any + ): parsed_args = kwargs.pop("_parsed_args", None) if parsed_args is None: parsed_args = [ @@ -1332,8 +1509,8 @@ def __init__(self, *args, **kwargs): ) -register_function("cast", Cast) -register_function("extract", Extract) +register_function("cast", Cast) # type: ignore +register_function("extract", Extract) # type: ignore class next_value(GenericFunction[int]): @@ -1353,7 +1530,7 @@ class next_value(GenericFunction[int]): ("sequence", InternalTraversal.dp_named_ddl_element) ] - def __init__(self, seq, **kw): + def __init__(self, seq: schema.Sequence, **kw: Any): assert isinstance( seq, schema.Sequence ), "next_value() accepts a Sequence object as input." @@ -1362,14 +1539,14 @@ def __init__(self, seq, **kw): seq.data_type or getattr(self, "type", None) ) - def compare(self, other, **kw): + def compare(self, other: Any, **kw: Any) -> bool: return ( isinstance(other, next_value) and self.sequence.name == other.sequence.name ) @property - def _from_objects(self): + def _from_objects(self) -> Any: return [] @@ -1378,7 +1555,7 @@ class AnsiFunction(GenericFunction[_T]): inherit_cache = True - def __init__(self, *args, **kwargs): + def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any): GenericFunction.__init__(self, *args, **kwargs) @@ -1387,8 +1564,29 @@ class ReturnTypeFromArgs(GenericFunction[_T]): inherit_cache = True - def __init__(self, *args, **kwargs): - fn_args = [ + # appease mypy which seems to not want to accept _T from + # _ColumnExpressionArgument, as it includes non-generic types + + @overload + def __init__( + self, + col: ColumnElement[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ): + ... + + @overload + def __init__( + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, + ): + ... + + def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any): + fn_args: Sequence[ColumnElement[Any]] = [ coercions.expect( roles.ExpressionElementRole, c, @@ -1469,7 +1667,7 @@ class char_length(GenericFunction[int]): type = sqltypes.Integer() inherit_cache = True - def __init__(self, arg, **kw): + def __init__(self, arg: _ColumnExpressionArgument[str], **kw: Any): # slight hack to limit to just one positional argument # not sure why this one function has this special treatment super().__init__(arg, **kw) @@ -1506,7 +1704,11 @@ class count(GenericFunction[int]): type = sqltypes.Integer() inherit_cache = True - def __init__(self, expression=None, **kwargs): + def __init__( + self, + expression: Optional[_ColumnExpressionArgument[Any]] = None, + **kwargs: Any, + ): if expression is None: expression = literal_column("*") super().__init__(expression, **kwargs) @@ -1595,8 +1797,8 @@ class array_agg(GenericFunction[_T]): inherit_cache = True - def __init__(self, *args, **kwargs): - fn_args = [ + def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any): + fn_args: Sequence[ColumnElement[Any]] = [ coercions.expect( roles.ExpressionElementRole, c, apply_propagate_attrs=self ) @@ -1624,9 +1826,13 @@ class OrderedSetAgg(GenericFunction[_T]): array_for_multi_clause = False inherit_cache = True - def within_group_type(self, within_group): + def within_group_type( + self, within_group: WithinGroup[Any] + ) -> TypeEngine[Any]: func_clauses = cast(ClauseList, self.clause_expr.element) - order_by = sqlutil.unwrap_order_by(within_group.order_by) + order_by: Sequence[ColumnElement[Any]] = sqlutil.unwrap_order_by( + within_group.order_by + ) if self.array_for_multi_clause and len(func_clauses.clauses) > 1: return sqltypes.ARRAY(order_by[0].type) else: @@ -1824,5 +2030,5 @@ class aggregate_strings(GenericFunction[str]): _has_args = True inherit_cache = True - def __init__(self, clause, separator): + def __init__(self, clause: _ColumnExpressionArgument[Any], separator: str): super().__init__(clause, separator) diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 28480a5d437..19551831fe3 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -367,7 +367,7 @@ def visit_column(column): return tables -def unwrap_order_by(clause): +def unwrap_order_by(clause: Any) -> Any: """Break up an 'order by' expression into individual column-expressions, without DESC/ASC/NULLS FIRST/NULLS LAST""" diff --git a/test/typing/plain_files/sql/functions.py b/test/typing/plain_files/sql/functions.py index e66e554cff7..6a345fcf6ec 100644 --- a/test/typing/plain_files/sql/functions.py +++ b/test/typing/plain_files/sql/functions.py @@ -2,14 +2,17 @@ from sqlalchemy import column from sqlalchemy import func +from sqlalchemy import Integer from sqlalchemy import select +from sqlalchemy import Sequence +from sqlalchemy import String # START GENERATED FUNCTION TYPING TESTS # code within this block is **programmatically, # statically generated** by tools/generate_sql_functions.py -stmt1 = select(func.aggregate_strings(column("x"), column("x"))) +stmt1 = select(func.aggregate_strings(column("x", String), ",")) # EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] reveal_type(stmt1) @@ -21,105 +24,129 @@ reveal_type(stmt2) -stmt3 = select(func.concat()) +stmt3 = select(func.coalesce(column("x", Integer))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt3) -stmt4 = select(func.count(column("x"))) +stmt4 = select(func.concat()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] reveal_type(stmt4) -stmt5 = select(func.cume_dist()) +stmt5 = select(func.count(column("x"))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Decimal\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt5) -stmt6 = select(func.current_date()) +stmt6 = select(func.cume_dist()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*date\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Decimal\]\] reveal_type(stmt6) -stmt7 = select(func.current_time()) +stmt7 = select(func.current_date()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*time\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*date\]\] reveal_type(stmt7) -stmt8 = select(func.current_timestamp()) +stmt8 = select(func.current_time()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*time\]\] reveal_type(stmt8) -stmt9 = select(func.current_user()) +stmt9 = select(func.current_timestamp()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt9) -stmt10 = select(func.dense_rank()) +stmt10 = select(func.current_user()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] reveal_type(stmt10) -stmt11 = select(func.localtime()) +stmt11 = select(func.dense_rank()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt11) -stmt12 = select(func.localtimestamp()) +stmt12 = select(func.localtime()) # EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt12) -stmt13 = select(func.next_value(column("x"))) +stmt13 = select(func.localtimestamp()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt13) -stmt14 = select(func.now()) +stmt14 = select(func.max(column("x", Integer))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt14) -stmt15 = select(func.percent_rank()) +stmt15 = select(func.min(column("x", Integer))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Decimal\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt15) -stmt16 = select(func.rank()) +stmt16 = select(func.next_value(Sequence("x_seq"))) # EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt16) -stmt17 = select(func.session_user()) +stmt17 = select(func.now()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt17) -stmt18 = select(func.sysdate()) +stmt18 = select(func.percent_rank()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Decimal\]\] reveal_type(stmt18) -stmt19 = select(func.user()) +stmt19 = select(func.rank()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt19) + +stmt20 = select(func.session_user()) + +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +reveal_type(stmt20) + + +stmt21 = select(func.sum(column("x", Integer))) + +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +reveal_type(stmt21) + + +stmt22 = select(func.sysdate()) + +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +reveal_type(stmt22) + + +stmt23 = select(func.user()) + +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +reveal_type(stmt23) + # END GENERATED FUNCTION TYPING TESTS diff --git a/tools/generate_sql_functions.py b/tools/generate_sql_functions.py index 848a9272250..348b3344845 100644 --- a/tools/generate_sql_functions.py +++ b/tools/generate_sql_functions.py @@ -11,6 +11,7 @@ import textwrap from sqlalchemy.sql.functions import _registry +from sqlalchemy.sql.functions import ReturnTypeFromArgs from sqlalchemy.types import TypeEngine from sqlalchemy.util.tool_support import code_writer_cmd @@ -18,7 +19,10 @@ def _fns_in_deterministic_order(): reg = _registry["_default"] for key in sorted(reg): - yield key, reg[key] + cls = reg[key] + if cls is ReturnTypeFromArgs: + continue + yield key, cls def process_functions(filename: str, cmd: code_writer_cmd) -> str: @@ -53,23 +57,75 @@ def process_functions(filename: str, cmd: code_writer_cmd) -> str: for key, fn_class in _fns_in_deterministic_order(): is_reserved_word = key in builtins - guess_its_generic = bool(fn_class.__parameters__) + if issubclass(fn_class, ReturnTypeFromArgs): + buf.write( + textwrap.indent( + f""" + +# appease mypy which seems to not want to accept _T from +# _ColumnExpressionArgument, as it includes non-generic types + +@overload +def {key}( {' # noqa: A001' if is_reserved_word else ''} + self, + col: ColumnElement[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, +) -> {fn_class.__name__}[_T]: + ... - buf.write( - textwrap.indent( - f""" +@overload +def {key}( {' # noqa: A001' if is_reserved_word else ''} + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, +) -> {fn_class.__name__}[_T]: + ... + +def {key}( {' # noqa: A001' if is_reserved_word else ''} + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionArgument[Any], + **kwargs: Any, +) -> {fn_class.__name__}[_T]: + ... + + """, + indent, + ) + ) + else: + guess_its_generic = bool(fn_class.__parameters__) + + # the latest flake8 is quite broken here: + # 1. it insists on linting f-strings, no option + # to turn it off + # 2. the f-string indentation rules are either broken + # or completely impossible to figure out + # 3. there's no way to E501 a too-long f-string, + # so I can't even put the expressions all one line + # to get around the indentation errors + # 4. Therefore here I have to concat part of the + # string outside of the f-string + _type = fn_class.__name__ + _type += "[Any]" if guess_its_generic else "" + _reserved_word = ( + " # noqa: A001" if is_reserved_word else "" + ) + + # now the f-string + buf.write( + textwrap.indent( + f""" @property -def {key}(self) -> Type[{fn_class.__name__}{ - '[Any]' if guess_its_generic else '' -}]:{ - ' # noqa: A001' if is_reserved_word else '' -} +def {key}(self) -> Type[{_type}]:{_reserved_word} ... """, - indent, + indent, + ) ) - ) m = re.match( r"^( *)# START GENERATED FUNCTION TYPING TESTS", @@ -92,15 +148,48 @@ def {key}(self) -> Type[{fn_class.__name__}{ count = 0 for key, fn_class in _fns_in_deterministic_order(): - if hasattr(fn_class, "type") and isinstance( + if issubclass(fn_class, ReturnTypeFromArgs): + count += 1 + + buf.write( + textwrap.indent( + rf""" +stmt{count} = select(func.{key}(column('x', Integer))) + +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +reveal_type(stmt{count}) + +""", + indent, + ) + ) + elif fn_class.__name__ == "aggregate_strings": + count += 1 + buf.write( + textwrap.indent( + rf""" +stmt{count} = select(func.{key}(column('x', String), ',')) + +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +reveal_type(stmt{count}) + +""", + indent, + ) + ) + + elif hasattr(fn_class, "type") and isinstance( fn_class.type, TypeEngine ): python_type = fn_class.type.python_type python_expr = rf"Tuple\[.*{python_type.__name__}\]" argspec = inspect.getfullargspec(fn_class) - args = ", ".join( - 'column("x")' for elem in argspec.args[1:] - ) + if fn_class.__name__ == "next_value": + args = "Sequence('x_seq')" + else: + args = ", ".join( + 'column("x")' for elem in argspec.args[1:] + ) count += 1 buf.write( From c28bcf4b490c6e45c38bde56332b6c68bd1f4ea4 Mon Sep 17 00:00:00 2001 From: Martijn Pieters Date: Sat, 18 Nov 2023 16:36:08 -0500 Subject: [PATCH 021/544] Add type annotations for Function.filter This includes all methods / properties on the returned FunctionFilter object. This contributes towards #6810 This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. Closes: #10643 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10643 Pull-request-sha: 6137b7b995b6ea0bd4e4195c5693d2312fa26639 Change-Id: I2af1af7617d0cd3fd30b262d36ff982464bac011 (cherry picked from commit 52452ec39d18567126673eeef4cf0dd12039043b) --- lib/sqlalchemy/sql/elements.py | 62 ++++++++++++------- lib/sqlalchemy/sql/functions.py | 14 ++++- lib/sqlalchemy/sql/operators.py | 6 +- .../typing/plain_files/sql/functions_again.py | 6 ++ 4 files changed, 62 insertions(+), 26 deletions(-) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index cafd291eee2..531be31555e 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2694,9 +2694,11 @@ class Null(SingletonConstant, roles.ConstExprRole[None], ColumnElement[None]): _traverse_internals: _TraverseInternalsType = [] _singleton: Null - @util.memoized_property - def type(self): - return type_api.NULLTYPE + if not TYPE_CHECKING: + + @util.memoized_property + def type(self) -> TypeEngine[_T]: # noqa: A001 + return type_api.NULLTYPE @classmethod def _instance(cls) -> Null: @@ -2722,9 +2724,11 @@ class False_( _traverse_internals: _TraverseInternalsType = [] _singleton: False_ - @util.memoized_property - def type(self): - return type_api.BOOLEANTYPE + if not TYPE_CHECKING: + + @util.memoized_property + def type(self) -> TypeEngine[_T]: # noqa: A001 + return type_api.BOOLEANTYPE def _negate(self) -> True_: return True_._singleton @@ -2750,9 +2754,11 @@ class True_(SingletonConstant, roles.ConstExprRole[bool], ColumnElement[bool]): _traverse_internals: _TraverseInternalsType = [] _singleton: True_ - @util.memoized_property - def type(self): - return type_api.BOOLEANTYPE + if not TYPE_CHECKING: + + @util.memoized_property + def type(self) -> TypeEngine[_T]: # noqa: A001 + return type_api.BOOLEANTYPE def _negate(self) -> False_: return False_._singleton @@ -4266,9 +4272,11 @@ def _interpret_range( return lower, upper - @util.memoized_property - def type(self): - return self.element.type + if not TYPE_CHECKING: + + @util.memoized_property + def type(self) -> TypeEngine[_T]: # noqa: A001 + return self.element.type @util.ro_non_memoized_property def _from_objects(self) -> List[FromClause]: @@ -4341,13 +4349,15 @@ def over(self, partition_by=None, order_by=None, range_=None, rows=None): rows=rows, ) - @util.memoized_property - def type(self): - wgt = self.element.within_group_type(self) - if wgt is not None: - return wgt - else: - return self.element.type + if not TYPE_CHECKING: + + @util.memoized_property + def type(self) -> TypeEngine[_T]: # noqa: A001 + wgt = self.element.within_group_type(self) + if wgt is not None: + return wgt + else: + return self.element.type @util.ro_non_memoized_property def _from_objects(self) -> List[FromClause]: @@ -4397,7 +4407,7 @@ def __init__( self.func = func self.filter(*criterion) - def filter(self, *criterion): + def filter(self, *criterion: _ColumnExpressionArgument[bool]) -> Self: """Produce an additional FILTER against the function. This method adds additional criteria to the initial criteria @@ -4461,15 +4471,19 @@ def over( rows=rows, ) - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> Union[Self, Grouping[_T]]: if operators.is_precedent(operators.filter_op, against): return Grouping(self) else: return self - @util.memoized_property - def type(self): - return self.func.type + if not TYPE_CHECKING: + + @util.memoized_property + def type(self) -> TypeEngine[_T]: # noqa: A001 + return self.func.type @util.ro_non_memoized_property def _from_objects(self) -> List[FromClause]: diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index c5eb6b28115..5b54f46ab73 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -62,7 +62,6 @@ from .type_api import TypeEngine from .visitors import InternalTraversal from .. import util -from ..util.typing import Self if TYPE_CHECKING: @@ -79,6 +78,7 @@ from ..engine.cursor import CursorResult from ..engine.interfaces import _CoreMultiExecuteParams from ..engine.interfaces import CoreExecuteOptionsParameter + from ..util.typing import Self _T = TypeVar("_T", bound=Any) _S = TypeVar("_S", bound=Any) @@ -484,6 +484,18 @@ def within_group( """ return WithinGroup(self, *order_by) + @overload + def filter(self) -> Self: + ... + + @overload + def filter( + self, + __criterion0: _ColumnExpressionArgument[bool], + *criterion: _ColumnExpressionArgument[bool], + ) -> FunctionFilter[_T]: + ... + def filter( self, *criterion: _ColumnExpressionArgument[bool] ) -> Union[Self, FunctionFilter[_T]]: diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 6402d0fd1b2..1d3f2f483f6 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -2582,9 +2582,13 @@ class _OpLimit(IntEnum): } -def is_precedent(operator: OperatorType, against: OperatorType) -> bool: +def is_precedent( + operator: OperatorType, against: Optional[OperatorType] +) -> bool: if operator is against and is_natural_self_precedent(operator): return False + elif against is None: + return True else: return bool( _PRECEDENCE.get( diff --git a/test/typing/plain_files/sql/functions_again.py b/test/typing/plain_files/sql/functions_again.py index edfbd6bb2b1..5173d1fe082 100644 --- a/test/typing/plain_files/sql/functions_again.py +++ b/test/typing/plain_files/sql/functions_again.py @@ -21,3 +21,9 @@ class Foo(Base): func.row_number().over(partition_by=[Foo.a.desc(), Foo.b.desc()]) func.row_number().over(order_by="a", partition_by=("a", "b")) func.row_number().over(partition_by="a", order_by=("a", "b")) + + +# EXPECTED_TYPE: Function[Any] +reveal_type(func.row_number().filter()) +# EXPECTED_TYPE: FunctionFilter[Any] +reveal_type(func.row_number().filter(Foo.a > 0)) From ece55ff29d3385ad7aec6556c12a2181cc9ec513 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 26 Nov 2023 10:02:47 -0500 Subject: [PATCH 022/544] add MARS connection for aioodbc on jenkins main we are getting a lot of connection busy with other results, which we assume is due to the thread-pool based approach of aioodbc not being very solid. MARS is described at: https://stackoverflow.com/questions/9017264/why-only-some-users-get-the-error-connection-is-busy-with-results-for-another https://learn.microsoft.com/en-us/sql/relational-databases/native-client/features/using-multiple-active-result-sets-mars?view=sql-server-ver16 not clear why the name of the parameter is different in those two articles. using a totally made up parameter doesn't raise any error, so it's not clear if this works at all. Change-Id: I8e437e9f46c1c070c5102a24d7d82a912e8b5145 --- lib/sqlalchemy/dialects/mssql/provision.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/sqlalchemy/dialects/mssql/provision.py b/lib/sqlalchemy/dialects/mssql/provision.py index 096ae03fa56..2db3ee44f01 100644 --- a/lib/sqlalchemy/dialects/mssql/provision.py +++ b/lib/sqlalchemy/dialects/mssql/provision.py @@ -29,6 +29,9 @@ def generate_driver_url(url, driver, query_str): if driver not in ("pyodbc", "aioodbc"): new_url = new_url.set(query="") + if driver == "aioodbc": + new_url = new_url.update_query_dict({"MARS_Connection": "Yes"}) + if query_str: new_url = new_url.update_query_string(query_str) @@ -37,6 +40,7 @@ def generate_driver_url(url, driver, query_str): except exc.NoSuchModuleError: return None else: + print(f"NEW URL!!!!! {new_url}") return new_url From b4223aa0052afbe6c6f6d9491f460c0b8ba17bc5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 26 Nov 2023 15:16:08 -0500 Subject: [PATCH 023/544] remove errant print statement Change-Id: I9cb1571995f078c359a9c2793670a017effe4be2 --- lib/sqlalchemy/dialects/mssql/provision.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/sqlalchemy/dialects/mssql/provision.py b/lib/sqlalchemy/dialects/mssql/provision.py index 2db3ee44f01..75e15ce4dc4 100644 --- a/lib/sqlalchemy/dialects/mssql/provision.py +++ b/lib/sqlalchemy/dialects/mssql/provision.py @@ -40,7 +40,6 @@ def generate_driver_url(url, driver, query_str): except exc.NoSuchModuleError: return None else: - print(f"NEW URL!!!!! {new_url}") return new_url From e5b07fa0d0733d58f2ccae76fa5e4b2209129c95 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 27 Nov 2023 21:35:41 -0500 Subject: [PATCH 024/544] freshen up callcounts for py311 move the oracle tests to use oracledb (because i dont feel like installing OCI on my laptops anymore) Change-Id: I8ca7ceb5083dbf2510ec02dc40f202a8e0eaf3dc (cherry picked from commit 82690b1cfc1e76e5deb622a9afefbcf3be299962) --- regen_callcounts.tox.ini | 23 +- test/profiles.txt | 564 ++++++++++++++++++--------------------- 2 files changed, 270 insertions(+), 317 deletions(-) diff --git a/regen_callcounts.tox.ini b/regen_callcounts.tox.ini index 5f9c2aa99bc..9a98ce8efa7 100644 --- a/regen_callcounts.tox.ini +++ b/regen_callcounts.tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{310}-sqla_{cext,nocext}-db_{sqlite,postgresql,mysql,oracle,mssql} +envlist = py{311}-sqla_{cext,nocext}-db_{sqlite,postgresql,mysql,oracle,mssql} [testenv] deps=pytest @@ -7,8 +7,7 @@ deps=pytest mock db_postgresql: .[postgresql] db_mysql: .[mysql] - db_mysql: .[pymysql] - db_oracle: .[oracle] + db_oracle: .[oracle_oracledb] db_mssql: .[mssql] @@ -22,13 +21,13 @@ commands= db_{mssql}: {env:BASECOMMAND} {env:MSSQL:} {posargs} passenv= - ORACLE_HOME - NLS_LANG - TOX_POSTGRESQL - TOX_MYSQL - TOX_ORACLE - TOX_MSSQL - TOX_SQLITE + ORACLE_HOME + NLS_LANG + TOX_POSTGRESQL + TOX_MYSQL + TOX_ORACLE + TOX_MSSQL + TOX_SQLITE TOX_WORKERS # -E : ignore PYTHON* environment variables (such as PYTHONPATH) @@ -41,8 +40,8 @@ setenv= sqla_cext: REQUIRE_SQLALCHEMY_CEXT=1 db_sqlite: SQLITE={env:TOX_SQLITE:--db sqlite} db_postgresql: POSTGRESQL={env:TOX_POSTGRESQL:--db postgresql} - db_mysql: MYSQL={env:TOX_MYSQL:--db mysql --db pymysql} - db_oracle: ORACLE={env:TOX_ORACLE:--db oracle} + db_mysql: MYSQL={env:TOX_MYSQL:--db mysql} + db_oracle: ORACLE={env:TOX_ORACLE:--db oracledb} db_mssql: MSSQL={env:TOX_MSSQL:--db mssql} diff --git a/test/profiles.txt b/test/profiles.txt index 7db24e2ff56..d943f418ff6 100644 --- a/test/profiles.txt +++ b/test/profiles.txt @@ -1,4 +1,4 @@ -# /mnt/photon_home/classic/dev/sqlalchemy/test/profiles.txt +# /home/classic/dev/sqlalchemy/test/profiles.txt # This file is written out on a per-environment basis. # For each test in aaa_profiling, the corresponding function and # environment is located within this file. If it doesn't exist, @@ -13,487 +13,441 @@ # TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 77 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 77 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 75 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 78 +test.aaa_profiling.test_compiler.CompileTest.test_insert x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 78 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 195 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 195 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 195 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 195 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 195 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 195 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 195 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 193 -test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 193 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 221 +test.aaa_profiling.test_compiler.CompileTest.test_select x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 221 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 219 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 243 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 243 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 217 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 217 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 245 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 245 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 81 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 81 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 81 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 81 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 81 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 81 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 81 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 86 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 86 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 79 -test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 79 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 87 +test.aaa_profiling.test_compiler.CompileTest.test_update x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 87 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 180 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 180 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 180 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 180 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 180 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 180 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 180 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 184 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 187 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 180 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 180 - -# TEST: test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_cached - -test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_cached x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 44 - -# TEST: test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_cached[no_embedded] - -test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_cached[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 44 - -# TEST: test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_cached[require_embedded] - -test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_cached[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 70 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 186 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 189 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 186 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 189 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 186 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 189 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 186 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 189 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 186 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 189 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[no_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 11 -test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 13 +test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 11 +test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 13 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[require_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 13 -test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 15 +test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 13 +test.aaa_profiling.test_misc.CCLookupTest.test_corresponding_column_isolated[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 15 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[no_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 13336 -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 13354 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 13347 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 13650 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[require_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 13336 -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 13354 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 13347 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 13650 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[no_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 29839 -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 35374 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 28449 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 35632 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[require_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 29923 -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 35374 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 28449 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_aliased_class_select_cols[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 35876 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[no_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 1239 -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 1392 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1261 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1437 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[require_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 1257 -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 1410 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1279 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_many_corresponding_column[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1455 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[no_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 1258 -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[no_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 1395 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1280 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[no_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1440 # TEST: test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[require_embedded] -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 1260 -test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[require_embedded] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 1397 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1282 +test.aaa_profiling.test_misc.CCLookupTest.test_gen_subq_to_table_single_corresponding_column[require_embedded] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1442 # TEST: test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_cached -test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_cached x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 303 -test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_cached x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 303 +test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_cached x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 303 +test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_cached x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 303 # TEST: test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_not_cached -test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_not_cached x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 4403 -test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_not_cached x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 6103 +test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_not_cached x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 4003 +test.aaa_profiling.test_misc.CacheKeyTest.test_statement_key_is_not_cached x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 6103 # TEST: test.aaa_profiling.test_misc.EnumTest.test_create_enum_from_pep_435_w_expensive_members -test.aaa_profiling.test_misc.EnumTest.test_create_enum_from_pep_435_w_expensive_members x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 924 -test.aaa_profiling.test_misc.EnumTest.test_create_enum_from_pep_435_w_expensive_members x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 924 +test.aaa_profiling.test_misc.EnumTest.test_create_enum_from_pep_435_w_expensive_members x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 924 +test.aaa_profiling.test_misc.EnumTest.test_create_enum_from_pep_435_w_expensive_members x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 924 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 55030 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 65340 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 55930 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 65740 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 53330 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 63640 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 54230 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 64040 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 57930 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 66340 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 58530 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 66440 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 57030 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 65440 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 57530 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 65440 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 48730 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 52040 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 49130 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 51940 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 52230 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 60040 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 52830 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 60140 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 51330 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 59140 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 51830 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 59140 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 37005 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 40205 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 37705 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 40805 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 36105 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 39305 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 36705 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 39805 # TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set -test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 3599 -test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 3599 +test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 3599 +test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 3599 # TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove -test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 5527 -test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5527 +test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 5527 +test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 5527 # TEST: test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 128 -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 128 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 128 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 128 # TEST: test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 128 -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 128 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 128 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 128 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 15341 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 26360 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 15359 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 24383 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 21419 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 26438 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 21437 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 24461 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 10704 -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 11054 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 10654 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 11054 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 1154 -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 1154 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1154 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1154 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 4354 -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 4604 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 4304 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 4604 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 98682 -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 109932 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 96282 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 109782 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 96132 -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 107582 - -# TEST: test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results - -test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 440705 -test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 458805 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 93732 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 107432 # TEST: test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated -test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 26832,1031,97853 -test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 27722,1217,116453 +test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 26339,1019,96653 +test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 27438,1228,117553 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 23981 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 23981 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 23981 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 23981 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 112466 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 120723 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 113158 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 123916 # TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 20730 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 22152 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 21189 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 22709 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load -test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 1453 -test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 1542 +test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1480 +test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1583 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 110,20 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 110,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 108,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 108,20 # TEST: test.aaa_profiling.test_orm.QueryTest.test_query_cols -test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 6586 -test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 7406 +test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 6696 +test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 7456 # TEST: test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results -test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 275705 -test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 297105 +test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 277405 +test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 298505 # TEST: test.aaa_profiling.test_orm.SessionTest.test_expire_lots -test.aaa_profiling.test_orm.SessionTest.test_expire_lots x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 1212 -test.aaa_profiling.test_orm.SessionTest.test_expire_lots x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 1212 +test.aaa_profiling.test_orm.SessionTest.test_expire_lots x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1212 +test.aaa_profiling.test_orm.SessionTest.test_expire_lots x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1212 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 75 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 75 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 75 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 75 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect -test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 24 -test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 24 +test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 24 +test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 24 # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 55 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 55 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 53 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 53 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 53 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 55 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 53 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 55 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 53 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 55 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 53 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 55 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 53 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 55 # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 106 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 106 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 106 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 106 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 106 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 106 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 106 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 106 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 110 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 110 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 105 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 105 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 108 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 110 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 108 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 110 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 108 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 110 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 108 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 110 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 108 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 110 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 8 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 9 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 8 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 9 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 8 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 9 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 8 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 9 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 8 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 9 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 8 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 9 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 8 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 9 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 8 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 9 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 8 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 9 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 8 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 9 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 8 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 9 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 2604 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 15608 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 89344 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 102348 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 2597 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 15601 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 2637 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 15641 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 2651 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 14655 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 2539 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 14614 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 2664 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 14671 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 2669 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 14676 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 3815 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 15822 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 2649 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 14656 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 2614 +test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 14621 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 22 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 22 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 19 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 19 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 18 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 18 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 19 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 19 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 14 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 22 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 24 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 19 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 21 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 16 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 16 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 15 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 15 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 18 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 19 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 15 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 19 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 20 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 15 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 15 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 22 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 24 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 19 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 21 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 16 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 16 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 15 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 14 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 15 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 18 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 19 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 15 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 19 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 20 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 15 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 14 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 15 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 27 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 29 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 24 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 26 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 17 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 19 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 17 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 19 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 17 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 18 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 17 -test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 18 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 23 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 24 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 17 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 18 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 25 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 26 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 17 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 18 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 17 +test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 18 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 301 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 6301 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 87041 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 93041 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 269 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 6269 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 361 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 6361 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 301 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 5301 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 257 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5277 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 305 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 5307 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 279 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 5281 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 1504 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 6506 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 299 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 5301 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 272 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 5274 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 301 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 6301 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 87041 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 93041 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 269 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 6269 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 361 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 6361 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 301 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 5301 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 257 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5277 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 305 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 5307 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 279 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 5281 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 1504 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 6506 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 299 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 5301 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 272 +test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 5274 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 597 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 6601 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 87337 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 93341 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 590 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 6594 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 630 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 6634 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 642 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 5646 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 532 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5605 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 655 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 5662 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 660 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 5667 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 1806 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 6813 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 640 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 5647 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 605 +test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 5612 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 597 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 6601 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 87337 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 93341 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 590 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 6594 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 630 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 6634 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 642 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 5646 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 532 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5605 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_cextensions 655 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 5662 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 660 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 5667 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 1806 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 6813 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 640 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 5647 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 605 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 5612 From 3aaa69118c0b90ae3892cb507ad97677573466c5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 30 Nov 2023 09:11:25 -0500 Subject: [PATCH 025/544] try to gracefully close even in terminate Adjusted the asyncpg dialect such that when the ``terminate()`` method is used to discard an invalidated connection, the dialect will first attempt to gracefully close the conneciton using ``.close()`` with a timeout, if the operation is proceeding within an async event loop context only. This allows the asyncpg driver to attend to finalizing a ``TimeoutError`` including being able to close a long-running query server side, which otherwise can keep running after the program has exited. Fixes: #10717 Change-Id: Iaba0aeb67873a7a2b3981d43f4eb663005057309 (cherry picked from commit e70a0b0a0e52945e5b588b5cffec619a3f3e78a1) --- doc/build/changelog/unreleased_20/10717.rst | 11 +++++++++++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 18 +++++++++++++++++- lib/sqlalchemy/util/_concurrency_py3k.py | 5 +++++ lib/sqlalchemy/util/concurrency.py | 4 ++++ 4 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/10717.rst diff --git a/doc/build/changelog/unreleased_20/10717.rst b/doc/build/changelog/unreleased_20/10717.rst new file mode 100644 index 00000000000..2cd93034554 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10717.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, postgresql + :tickets: 10717 + + Adjusted the asyncpg dialect such that when the ``terminate()`` method is + used to discard an invalidated connection, the dialect will first attempt + to gracefully close the conneciton using ``.close()`` with a timeout, if + the operation is proceeding within an async event loop context only. This + allows the asyncpg driver to attend to finalizing a ``TimeoutError`` + including being able to close a long-running query server side, which + otherwise can keep running after the program has exited. diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index ca35bf96075..a116a470a7c 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -894,7 +894,23 @@ def close(self): self.await_(self._connection.close()) def terminate(self): - self._connection.terminate() + if util.concurrency.in_greenlet(): + # in a greenlet; this is the connection was invalidated + # case. + try: + # try to gracefully close; see #10717 + # timeout added in asyncpg 0.14.0 December 2017 + self.await_(self._connection.close(timeout=2)) + except asyncio.TimeoutError: + # in the case where we are recycling an old connection + # that may have already been disconnected, close() will + # fail with the above timeout. in this case, terminate + # the connection without any further waiting. + # see issue #8419 + self._connection.terminate() + else: + # not in a greenlet; this is the gc cleanup case + self._connection.terminate() self._started = False @staticmethod diff --git a/lib/sqlalchemy/util/_concurrency_py3k.py b/lib/sqlalchemy/util/_concurrency_py3k.py index 71d10a68579..83201dd95c7 100644 --- a/lib/sqlalchemy/util/_concurrency_py3k.py +++ b/lib/sqlalchemy/util/_concurrency_py3k.py @@ -99,6 +99,11 @@ def _safe_cancel_awaitable(awaitable: Awaitable[Any]) -> None: awaitable.close() +def in_greenlet() -> bool: + current = getcurrent() + return isinstance(current, _AsyncIoGreenlet) + + def await_only(awaitable: Awaitable[_T]) -> _T: """Awaits an async function in a sync method. diff --git a/lib/sqlalchemy/util/concurrency.py b/lib/sqlalchemy/util/concurrency.py index 53a70070b76..1141cbc165a 100644 --- a/lib/sqlalchemy/util/concurrency.py +++ b/lib/sqlalchemy/util/concurrency.py @@ -22,6 +22,7 @@ have_greenlet = True from ._concurrency_py3k import await_only as await_only from ._concurrency_py3k import await_fallback as await_fallback + from ._concurrency_py3k import in_greenlet as in_greenlet from ._concurrency_py3k import greenlet_spawn as greenlet_spawn from ._concurrency_py3k import is_exit_exception as is_exit_exception from ._concurrency_py3k import AsyncAdaptedLock as AsyncAdaptedLock @@ -56,6 +57,9 @@ def await_only(thing): # type: ignore # noqa: F811 def await_fallback(thing): # type: ignore # noqa: F811 return thing + def in_greenlet(): # type: ignore # noqa: F811 + _not_implemented() + def greenlet_spawn(fn, *args, **kw): # type: ignore # noqa: F811 _not_implemented() From 2a84b73eb570cdcede742084b7f15deca17307cc Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 30 Nov 2023 22:16:11 +0100 Subject: [PATCH 026/544] Normalize all file headers to have consistent format Change-Id: Idfa5f699280990aed3f6e46225d4202539d9e900 --- lib/sqlalchemy/__init__.py | 2 +- lib/sqlalchemy/cyextension/__init__.py | 6 ++ lib/sqlalchemy/cyextension/collections.pyx | 6 ++ lib/sqlalchemy/cyextension/immutabledict.pxd | 6 ++ lib/sqlalchemy/cyextension/immutabledict.pyx | 6 ++ lib/sqlalchemy/cyextension/processors.pyx | 6 ++ lib/sqlalchemy/cyextension/resultproxy.pyx | 6 ++ lib/sqlalchemy/cyextension/util.pyx | 6 ++ lib/sqlalchemy/dialects/_typing.py | 6 ++ lib/sqlalchemy/dialects/mssql/__init__.py | 2 +- lib/sqlalchemy/dialects/mssql/aioodbc.py | 2 +- lib/sqlalchemy/dialects/mssql/base.py | 2 +- .../dialects/mssql/information_schema.py | 2 +- lib/sqlalchemy/dialects/mssql/json.py | 6 ++ lib/sqlalchemy/dialects/mssql/provision.py | 6 ++ lib/sqlalchemy/dialects/mssql/pymssql.py | 2 +- lib/sqlalchemy/dialects/mssql/pyodbc.py | 2 +- lib/sqlalchemy/dialects/mysql/__init__.py | 2 +- lib/sqlalchemy/dialects/mysql/aiomysql.py | 2 +- lib/sqlalchemy/dialects/mysql/asyncmy.py | 2 +- lib/sqlalchemy/dialects/mysql/base.py | 2 +- lib/sqlalchemy/dialects/mysql/cymysql.py | 2 +- lib/sqlalchemy/dialects/mysql/dml.py | 2 +- lib/sqlalchemy/dialects/mysql/enumerated.py | 2 +- lib/sqlalchemy/dialects/mysql/expression.py | 1 + lib/sqlalchemy/dialects/mysql/json.py | 2 +- lib/sqlalchemy/dialects/mysql/mariadb.py | 2 +- .../dialects/mysql/mariadbconnector.py | 2 +- .../dialects/mysql/mysqlconnector.py | 2 +- lib/sqlalchemy/dialects/mysql/mysqldb.py | 2 +- lib/sqlalchemy/dialects/mysql/provision.py | 6 ++ lib/sqlalchemy/dialects/mysql/pymysql.py | 2 +- lib/sqlalchemy/dialects/mysql/pyodbc.py | 2 +- lib/sqlalchemy/dialects/mysql/reflection.py | 2 +- .../dialects/mysql/reserved_words.py | 2 +- lib/sqlalchemy/dialects/mysql/types.py | 2 +- lib/sqlalchemy/dialects/oracle/__init__.py | 2 +- lib/sqlalchemy/dialects/oracle/base.py | 2 +- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 1 + lib/sqlalchemy/dialects/oracle/dictionary.py | 1 + lib/sqlalchemy/dialects/oracle/oracledb.py | 1 + lib/sqlalchemy/dialects/oracle/provision.py | 6 ++ lib/sqlalchemy/dialects/oracle/types.py | 1 + .../dialects/postgresql/__init__.py | 2 +- .../dialects/postgresql/_psycopg_common.py | 1 + lib/sqlalchemy/dialects/postgresql/array.py | 2 +- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 2 +- lib/sqlalchemy/dialects/postgresql/base.py | 2 +- lib/sqlalchemy/dialects/postgresql/dml.py | 2 +- lib/sqlalchemy/dialects/postgresql/ext.py | 2 +- lib/sqlalchemy/dialects/postgresql/hstore.py | 2 +- lib/sqlalchemy/dialects/postgresql/json.py | 2 +- .../dialects/postgresql/named_types.py | 2 +- .../dialects/postgresql/operators.py | 2 +- lib/sqlalchemy/dialects/postgresql/pg8000.py | 2 +- .../dialects/postgresql/pg_catalog.py | 4 +- .../dialects/postgresql/provision.py | 6 ++ lib/sqlalchemy/dialects/postgresql/psycopg.py | 2 +- .../dialects/postgresql/psycopg2.py | 2 +- .../dialects/postgresql/psycopg2cffi.py | 2 +- lib/sqlalchemy/dialects/postgresql/ranges.py | 1 + lib/sqlalchemy/dialects/postgresql/types.py | 1 + lib/sqlalchemy/dialects/sqlite/__init__.py | 2 +- lib/sqlalchemy/dialects/sqlite/aiosqlite.py | 2 +- lib/sqlalchemy/dialects/sqlite/base.py | 2 +- lib/sqlalchemy/dialects/sqlite/dml.py | 2 +- lib/sqlalchemy/dialects/sqlite/json.py | 6 ++ lib/sqlalchemy/dialects/sqlite/provision.py | 6 ++ lib/sqlalchemy/dialects/sqlite/pysqlcipher.py | 2 +- lib/sqlalchemy/dialects/sqlite/pysqlite.py | 2 +- lib/sqlalchemy/engine/_py_processors.py | 2 +- lib/sqlalchemy/engine/_py_row.py | 6 ++ lib/sqlalchemy/engine/_py_util.py | 6 ++ lib/sqlalchemy/engine/characteristics.py | 6 ++ lib/sqlalchemy/engine/events.py | 2 +- lib/sqlalchemy/engine/processors.py | 2 +- lib/sqlalchemy/events.py | 2 +- lib/sqlalchemy/exc.py | 2 +- lib/sqlalchemy/ext/baked.py | 2 +- lib/sqlalchemy/ext/indexable.py | 2 +- lib/sqlalchemy/ext/mypy/__init__.py | 6 ++ lib/sqlalchemy/ext/mypy/apply.py | 2 +- lib/sqlalchemy/ext/mypy/decl_class.py | 2 +- lib/sqlalchemy/ext/mypy/infer.py | 2 +- lib/sqlalchemy/ext/mypy/names.py | 2 +- lib/sqlalchemy/future/__init__.py | 2 +- lib/sqlalchemy/future/engine.py | 2 +- lib/sqlalchemy/inspection.py | 2 +- lib/sqlalchemy/log.py | 2 +- lib/sqlalchemy/orm/_typing.py | 2 +- lib/sqlalchemy/orm/clsregistry.py | 2 +- lib/sqlalchemy/orm/decl_api.py | 2 +- lib/sqlalchemy/orm/decl_base.py | 2 +- lib/sqlalchemy/orm/mapped_collection.py | 2 +- lib/sqlalchemy/orm/strategy_options.py | 1 + lib/sqlalchemy/pool/__init__.py | 2 +- lib/sqlalchemy/pool/base.py | 2 +- lib/sqlalchemy/pool/events.py | 2 +- lib/sqlalchemy/pool/impl.py | 2 +- lib/sqlalchemy/sql/_orm_types.py | 2 +- lib/sqlalchemy/sql/_typing.py | 2 +- lib/sqlalchemy/sql/events.py | 2 +- lib/sqlalchemy/sql/naming.py | 2 +- lib/sqlalchemy/sql/type_api.py | 2 +- lib/sqlalchemy/testing/plugin/__init__.py | 6 ++ lib/sqlalchemy/testing/plugin/bootstrap.py | 6 ++ lib/sqlalchemy/testing/plugin/plugin_base.py | 2 +- lib/sqlalchemy/testing/plugin/pytestplugin.py | 6 ++ lib/sqlalchemy/testing/provision.py | 6 ++ lib/sqlalchemy/testing/suite/__init__.py | 6 ++ lib/sqlalchemy/testing/suite/test_cte.py | 6 ++ lib/sqlalchemy/testing/suite/test_ddl.py | 6 ++ .../testing/suite/test_deprecations.py | 6 ++ lib/sqlalchemy/testing/suite/test_dialect.py | 6 ++ lib/sqlalchemy/testing/suite/test_insert.py | 6 ++ .../testing/suite/test_reflection.py | 6 ++ lib/sqlalchemy/testing/suite/test_results.py | 6 ++ lib/sqlalchemy/testing/suite/test_rowcount.py | 6 ++ lib/sqlalchemy/testing/suite/test_select.py | 6 ++ lib/sqlalchemy/testing/suite/test_sequence.py | 6 ++ lib/sqlalchemy/testing/suite/test_types.py | 6 ++ .../testing/suite/test_unicode_ddl.py | 6 ++ .../testing/suite/test_update_delete.py | 6 ++ lib/sqlalchemy/util/_has_cy.py | 1 + lib/sqlalchemy/util/preloaded.py | 2 +- lib/sqlalchemy/util/tool_support.py | 15 ++-- lib/sqlalchemy/util/typing.py | 2 +- tools/normalize_file_headers.py | 69 +++++++++++++++++++ tox.ini | 1 + 129 files changed, 391 insertions(+), 86 deletions(-) create mode 100644 tools/normalize_file_headers.py diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 871e403a77d..88ff36dcc73 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -1,4 +1,4 @@ -# sqlalchemy/__init__.py +# __init__.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/cyextension/__init__.py b/lib/sqlalchemy/cyextension/__init__.py index e69de29bb2d..67aa690e02f 100644 --- a/lib/sqlalchemy/cyextension/__init__.py +++ b/lib/sqlalchemy/cyextension/__init__.py @@ -0,0 +1,6 @@ +# cyextension/__init__.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php diff --git a/lib/sqlalchemy/cyextension/collections.pyx b/lib/sqlalchemy/cyextension/collections.pyx index 4d134ccf302..a45b5d90433 100644 --- a/lib/sqlalchemy/cyextension/collections.pyx +++ b/lib/sqlalchemy/cyextension/collections.pyx @@ -1,3 +1,9 @@ +# cyextension/collections.pyx +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php cimport cython from cpython.long cimport PyLong_FromLongLong from cpython.set cimport PySet_Add diff --git a/lib/sqlalchemy/cyextension/immutabledict.pxd b/lib/sqlalchemy/cyextension/immutabledict.pxd index fe7ad6a81a8..d733d48affd 100644 --- a/lib/sqlalchemy/cyextension/immutabledict.pxd +++ b/lib/sqlalchemy/cyextension/immutabledict.pxd @@ -1,2 +1,8 @@ +# cyextension/immutabledict.pxd +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php cdef class immutabledict(dict): pass diff --git a/lib/sqlalchemy/cyextension/immutabledict.pyx b/lib/sqlalchemy/cyextension/immutabledict.pyx index 100287b380d..d43d465febe 100644 --- a/lib/sqlalchemy/cyextension/immutabledict.pyx +++ b/lib/sqlalchemy/cyextension/immutabledict.pyx @@ -1,3 +1,9 @@ +# cyextension/immutabledict.pyx +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php from cpython.dict cimport PyDict_New, PyDict_Update, PyDict_Size diff --git a/lib/sqlalchemy/cyextension/processors.pyx b/lib/sqlalchemy/cyextension/processors.pyx index b0ad865c54a..03d8411c336 100644 --- a/lib/sqlalchemy/cyextension/processors.pyx +++ b/lib/sqlalchemy/cyextension/processors.pyx @@ -1,3 +1,9 @@ +# cyextension/processors.pyx +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php import datetime from datetime import datetime as datetime_cls from datetime import time as time_cls diff --git a/lib/sqlalchemy/cyextension/resultproxy.pyx b/lib/sqlalchemy/cyextension/resultproxy.pyx index 0d7eeece93c..e81df51f38d 100644 --- a/lib/sqlalchemy/cyextension/resultproxy.pyx +++ b/lib/sqlalchemy/cyextension/resultproxy.pyx @@ -1,3 +1,9 @@ +# cyextension/resultproxy.pyx +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php import operator cdef class BaseRow: diff --git a/lib/sqlalchemy/cyextension/util.pyx b/lib/sqlalchemy/cyextension/util.pyx index 92e91a6edc1..63daddf4640 100644 --- a/lib/sqlalchemy/cyextension/util.pyx +++ b/lib/sqlalchemy/cyextension/util.pyx @@ -1,3 +1,9 @@ +# cyextension/util.pyx +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php from collections.abc import Mapping from sqlalchemy import exc diff --git a/lib/sqlalchemy/dialects/_typing.py b/lib/sqlalchemy/dialects/_typing.py index 932742bd045..9d2500e48e8 100644 --- a/lib/sqlalchemy/dialects/_typing.py +++ b/lib/sqlalchemy/dialects/_typing.py @@ -1,3 +1,9 @@ +# dialects/_typing.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php from __future__ import annotations from typing import Any diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py index 6bbb934157a..c601cba1f30 100644 --- a/lib/sqlalchemy/dialects/mssql/__init__.py +++ b/lib/sqlalchemy/dialects/mssql/__init__.py @@ -1,4 +1,4 @@ -# mssql/__init__.py +# dialects/mssql/__init__.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mssql/aioodbc.py b/lib/sqlalchemy/dialects/mssql/aioodbc.py index 23c2790f29d..e9d22155a74 100644 --- a/lib/sqlalchemy/dialects/mssql/aioodbc.py +++ b/lib/sqlalchemy/dialects/mssql/aioodbc.py @@ -1,4 +1,4 @@ -# mssql/aioodbc.py +# dialects/mssql/aioodbc.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 80734d60619..952a7a1f690 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1,4 +1,4 @@ -# mssql/base.py +# dialects/mssql/base.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py index e770313f937..2c30c55b6e0 100644 --- a/lib/sqlalchemy/dialects/mssql/information_schema.py +++ b/lib/sqlalchemy/dialects/mssql/information_schema.py @@ -1,4 +1,4 @@ -# mssql/information_schema.py +# dialects/mssql/information_schema.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mssql/json.py b/lib/sqlalchemy/dialects/mssql/json.py index 815b5d2ff86..f79d6e3ed5e 100644 --- a/lib/sqlalchemy/dialects/mssql/json.py +++ b/lib/sqlalchemy/dialects/mssql/json.py @@ -1,3 +1,9 @@ +# dialects/mssql/json.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from ... import types as sqltypes diff --git a/lib/sqlalchemy/dialects/mssql/provision.py b/lib/sqlalchemy/dialects/mssql/provision.py index 75e15ce4dc4..1913c95717a 100644 --- a/lib/sqlalchemy/dialects/mssql/provision.py +++ b/lib/sqlalchemy/dialects/mssql/provision.py @@ -1,3 +1,9 @@ +# dialects/mssql/provision.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from sqlalchemy import inspect diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index 3823db91b3a..5351be1131e 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -1,4 +1,4 @@ -# mssql/pymssql.py +# dialects/mssql/pymssql.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index a8f12fd984c..17c4e4c830d 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -1,4 +1,4 @@ -# mssql/pyodbc.py +# dialects/mssql/pyodbc.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py index b6af683b5e0..49d859b418d 100644 --- a/lib/sqlalchemy/dialects/mysql/__init__.py +++ b/lib/sqlalchemy/dialects/mysql/__init__.py @@ -1,4 +1,4 @@ -# mysql/__init__.py +# dialects/mysql/__init__.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/aiomysql.py b/lib/sqlalchemy/dialects/mysql/aiomysql.py index 2a0c6ba7832..c8a0a36abcd 100644 --- a/lib/sqlalchemy/dialects/mysql/aiomysql.py +++ b/lib/sqlalchemy/dialects/mysql/aiomysql.py @@ -1,4 +1,4 @@ -# mysql/aiomysql.py +# dialects/mysql/aiomysql.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # diff --git a/lib/sqlalchemy/dialects/mysql/asyncmy.py b/lib/sqlalchemy/dialects/mysql/asyncmy.py index 92058d60dd3..d55129bed81 100644 --- a/lib/sqlalchemy/dialects/mysql/asyncmy.py +++ b/lib/sqlalchemy/dialects/mysql/asyncmy.py @@ -1,4 +1,4 @@ -# mysql/asyncmy.py +# dialects/mysql/asyncmy.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 92f90774fbe..58d7235e017 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1,4 +1,4 @@ -# mysql/base.py +# dialects/mysql/base.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py index ed3c60694aa..a96a71eb4c3 100644 --- a/lib/sqlalchemy/dialects/mysql/cymysql.py +++ b/lib/sqlalchemy/dialects/mysql/cymysql.py @@ -1,4 +1,4 @@ -# mysql/cymysql.py +# dialects/mysql/cymysql.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/dml.py b/lib/sqlalchemy/dialects/mysql/dml.py index dfa39f6e086..aba60103f7f 100644 --- a/lib/sqlalchemy/dialects/mysql/dml.py +++ b/lib/sqlalchemy/dialects/mysql/dml.py @@ -1,4 +1,4 @@ -# mysql/dml.py +# dialects/mysql/dml.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/enumerated.py b/lib/sqlalchemy/dialects/mysql/enumerated.py index 2e1d3c3da9f..a70d499e436 100644 --- a/lib/sqlalchemy/dialects/mysql/enumerated.py +++ b/lib/sqlalchemy/dialects/mysql/enumerated.py @@ -1,4 +1,4 @@ -# mysql/enumerated.py +# dialects/mysql/enumerated.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/expression.py b/lib/sqlalchemy/dialects/mysql/expression.py index c5bd0be02b0..0c41aeb727b 100644 --- a/lib/sqlalchemy/dialects/mysql/expression.py +++ b/lib/sqlalchemy/dialects/mysql/expression.py @@ -1,3 +1,4 @@ +# dialects/mysql/expression.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/json.py b/lib/sqlalchemy/dialects/mysql/json.py index 66fcb714d54..8359e4d36ad 100644 --- a/lib/sqlalchemy/dialects/mysql/json.py +++ b/lib/sqlalchemy/dialects/mysql/json.py @@ -1,4 +1,4 @@ -# mysql/json.py +# dialects/mysql/json.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/mariadb.py b/lib/sqlalchemy/dialects/mysql/mariadb.py index a6ee5dfac93..17f858184fc 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadb.py +++ b/lib/sqlalchemy/dialects/mysql/mariadb.py @@ -1,4 +1,4 @@ -# mysql/mariadb.py +# dialects/mysql/mariadb.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py index 9730c9b4da3..3ee9c1e0053 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py @@ -1,4 +1,4 @@ -# mysql/mariadbconnector.py +# dialects/mysql/mariadbconnector.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index fc90c65d2ad..73254530164 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -1,4 +1,4 @@ -# mysql/mysqlconnector.py +# dialects/mysql/mysqlconnector.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index d1cf835c54e..d42cdc9b0fd 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -1,4 +1,4 @@ -# mysql/mysqldb.py +# dialects/mysql/mysqldb.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/provision.py b/lib/sqlalchemy/dialects/mysql/provision.py index b7faf771214..b3584ee5c7e 100644 --- a/lib/sqlalchemy/dialects/mysql/provision.py +++ b/lib/sqlalchemy/dialects/mysql/provision.py @@ -1,3 +1,9 @@ +# dialects/mysql/provision.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from ... import exc diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index ddb99542f8d..6e87173be97 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -1,4 +1,4 @@ -# mysql/pymysql.py +# dialects/mysql/pymysql.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py index e4b11778afc..87be2827b50 100644 --- a/lib/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py @@ -1,4 +1,4 @@ -# mysql/pyodbc.py +# dialects/mysql/pyodbc.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py index c4909fe319e..d678bc9f4a6 100644 --- a/lib/sqlalchemy/dialects/mysql/reflection.py +++ b/lib/sqlalchemy/dialects/mysql/reflection.py @@ -1,4 +1,4 @@ -# mysql/reflection.py +# dialects/mysql/reflection.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/reserved_words.py b/lib/sqlalchemy/dialects/mysql/reserved_words.py index 9f3436e6379..b092428cf32 100644 --- a/lib/sqlalchemy/dialects/mysql/reserved_words.py +++ b/lib/sqlalchemy/dialects/mysql/reserved_words.py @@ -1,4 +1,4 @@ -# mysql/reserved_words.py +# dialects/mysql/reserved_words.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py index aa1de1b6992..3fc96e61076 100644 --- a/lib/sqlalchemy/dialects/mysql/types.py +++ b/lib/sqlalchemy/dialects/mysql/types.py @@ -1,4 +1,4 @@ -# mysql/types.py +# dialects/mysql/types.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py index 46a5d0a2051..49464d6de71 100644 --- a/lib/sqlalchemy/dialects/oracle/__init__.py +++ b/lib/sqlalchemy/dialects/oracle/__init__.py @@ -1,4 +1,4 @@ -# oracle/__init__.py +# dialects/oracle/__init__.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index d993ef26927..10dd69e99df 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -1,4 +1,4 @@ -# oracle/base.py +# dialects/oracle/base.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index c595b56c562..95b7abe3b87 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -1,3 +1,4 @@ +# dialects/oracle/cx_oracle.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/oracle/dictionary.py b/lib/sqlalchemy/dialects/oracle/dictionary.py index fdf47ef31ed..5d4056ad2af 100644 --- a/lib/sqlalchemy/dialects/oracle/dictionary.py +++ b/lib/sqlalchemy/dialects/oracle/dictionary.py @@ -1,3 +1,4 @@ +# dialects/oracle/dictionary.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index 7defbc9f064..c4e2b1ffffd 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -1,3 +1,4 @@ +# dialects/oracle/oracledb.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/oracle/provision.py b/lib/sqlalchemy/dialects/oracle/provision.py index c8599e8e225..c9100192e17 100644 --- a/lib/sqlalchemy/dialects/oracle/provision.py +++ b/lib/sqlalchemy/dialects/oracle/provision.py @@ -1,3 +1,9 @@ +# dialects/oracle/provision.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from ... import create_engine diff --git a/lib/sqlalchemy/dialects/oracle/types.py b/lib/sqlalchemy/dialects/oracle/types.py index 4f82c43c699..bc9e563ff75 100644 --- a/lib/sqlalchemy/dialects/oracle/types.py +++ b/lib/sqlalchemy/dialects/oracle/types.py @@ -1,3 +1,4 @@ +# dialects/oracle/types.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index c3ed7c1fc00..5e327a6eefe 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -1,4 +1,4 @@ -# postgresql/__init__.py +# dialects/postgresql/__init__.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py b/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py index dfb25a56890..95f549dc68f 100644 --- a/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py +++ b/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py @@ -1,3 +1,4 @@ +# dialects/postgresql/_psycopg_common.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 3496ed6b636..5c677059b75 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -1,4 +1,4 @@ -# postgresql/array.py +# dialects/postgresql/array.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index ca35bf96075..e07338df7b6 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -1,4 +1,4 @@ -# postgresql/asyncpg.py +# dialects/postgresql/asyncpg.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 0aec40ea97f..ea7ac156fe1 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1,4 +1,4 @@ -# postgresql/base.py +# dialects/postgresql/base.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/dml.py b/lib/sqlalchemy/dialects/postgresql/dml.py index dee7af3311e..26300c27de3 100644 --- a/lib/sqlalchemy/dialects/postgresql/dml.py +++ b/lib/sqlalchemy/dialects/postgresql/dml.py @@ -1,4 +1,4 @@ -# postgresql/dml.py +# dialects/postgresql/dml.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py index ad1267750bb..22815d9fd64 100644 --- a/lib/sqlalchemy/dialects/postgresql/ext.py +++ b/lib/sqlalchemy/dialects/postgresql/ext.py @@ -1,4 +1,4 @@ -# postgresql/ext.py +# dialects/postgresql/ext.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index 83c4932a6ea..0ef548e7948 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -1,4 +1,4 @@ -# postgresql/hstore.py +# dialects/postgresql/hstore.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index ee56a745048..a0f1814a7a8 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -1,4 +1,4 @@ -# postgresql/json.py +# dialects/postgresql/json.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/named_types.py b/lib/sqlalchemy/dialects/postgresql/named_types.py index 19994d4b99f..26d690ccd30 100644 --- a/lib/sqlalchemy/dialects/postgresql/named_types.py +++ b/lib/sqlalchemy/dialects/postgresql/named_types.py @@ -1,4 +1,4 @@ -# postgresql/named_types.py +# dialects/postgresql/named_types.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/operators.py b/lib/sqlalchemy/dialects/postgresql/operators.py index f393451c6e1..a712022bcb7 100644 --- a/lib/sqlalchemy/dialects/postgresql/operators.py +++ b/lib/sqlalchemy/dialects/postgresql/operators.py @@ -1,4 +1,4 @@ -# postgresql/operators.py +# dialects/postgresql/operators.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index c9829ac6813..d0de5cd8947 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -1,4 +1,4 @@ -# postgresql/pg8000.py +# dialects/postgresql/pg8000.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # diff --git a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py index fa4b30f03f4..25bd6bb99d9 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py +++ b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py @@ -1,5 +1,5 @@ -# postgresql/pg_catalog.py -# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors +# dialects/postgresql/pg_catalog.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/provision.py b/lib/sqlalchemy/dialects/postgresql/provision.py index 87f1c9a4cea..9fafaed9baa 100644 --- a/lib/sqlalchemy/dialects/postgresql/provision.py +++ b/lib/sqlalchemy/dialects/postgresql/provision.py @@ -1,3 +1,9 @@ +# dialects/postgresql/provision.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors import time diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index dcd69ce6631..2c7fd592dea 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -1,4 +1,4 @@ -# postgresql/psycopg2.py +# dialects/postgresql/psycopg.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 2719f3dc5e5..ef960c297d0 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -1,4 +1,4 @@ -# postgresql/psycopg2.py +# dialects/postgresql/psycopg2.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py index 211432c6dc7..df8675bf864 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py @@ -1,4 +1,4 @@ -# testing/engines.py +# dialects/postgresql/psycopg2cffi.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index f1c29897d01..ede52634fe8 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -1,3 +1,4 @@ +# dialects/postgresql/ranges.py # Copyright (C) 2013-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/postgresql/types.py b/lib/sqlalchemy/dialects/postgresql/types.py index 2cac5d816dd..75abab8384a 100644 --- a/lib/sqlalchemy/dialects/postgresql/types.py +++ b/lib/sqlalchemy/dialects/postgresql/types.py @@ -1,3 +1,4 @@ +# dialects/postgresql/types.py # Copyright (C) 2013-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py index 56bca47faeb..18edf67f109 100644 --- a/lib/sqlalchemy/dialects/sqlite/__init__.py +++ b/lib/sqlalchemy/dialects/sqlite/__init__.py @@ -1,4 +1,4 @@ -# sqlite/__init__.py +# dialects/sqlite/__init__.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py index d9438d1880e..e553d4700fb 100644 --- a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py @@ -1,4 +1,4 @@ -# sqlite/aiosqlite.py +# dialects/sqlite/aiosqlite.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index d4eb3bca41b..1052c3d4d3d 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1,4 +1,4 @@ -# sqlite/base.py +# dialects/sqlite/base.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/sqlite/dml.py b/lib/sqlalchemy/dialects/sqlite/dml.py index ec428f5b172..007502e9153 100644 --- a/lib/sqlalchemy/dialects/sqlite/dml.py +++ b/lib/sqlalchemy/dialects/sqlite/dml.py @@ -1,4 +1,4 @@ -# sqlite/dml.py +# dialects/sqlite/dml.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/sqlite/json.py b/lib/sqlalchemy/dialects/sqlite/json.py index 69df3171c22..6a8f374f944 100644 --- a/lib/sqlalchemy/dialects/sqlite/json.py +++ b/lib/sqlalchemy/dialects/sqlite/json.py @@ -1,3 +1,9 @@ +# dialects/sqlite/json.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from ... import types as sqltypes diff --git a/lib/sqlalchemy/dialects/sqlite/provision.py b/lib/sqlalchemy/dialects/sqlite/provision.py index 2ed8253ab47..397ef10088f 100644 --- a/lib/sqlalchemy/dialects/sqlite/provision.py +++ b/lib/sqlalchemy/dialects/sqlite/provision.py @@ -1,3 +1,9 @@ +# dialects/sqlite/provision.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors import os diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py index 28b900ea53d..df8d7c5d83e 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -1,4 +1,4 @@ -# sqlite/pysqlcipher.py +# dialects/sqlite/pysqlcipher.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index 3cd6e5f231a..d22cbd6f110 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -1,4 +1,4 @@ -# sqlite/pysqlite.py +# dialects/sqlite/pysqlite.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/engine/_py_processors.py b/lib/sqlalchemy/engine/_py_processors.py index 1cc5e8dea40..bedfaeedfba 100644 --- a/lib/sqlalchemy/engine/_py_processors.py +++ b/lib/sqlalchemy/engine/_py_processors.py @@ -1,4 +1,4 @@ -# sqlalchemy/processors.py +# engine/_py_processors.py # Copyright (C) 2010-2023 the SQLAlchemy authors and contributors # # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com diff --git a/lib/sqlalchemy/engine/_py_row.py b/lib/sqlalchemy/engine/_py_row.py index 3358abd7848..50705a76550 100644 --- a/lib/sqlalchemy/engine/_py_row.py +++ b/lib/sqlalchemy/engine/_py_row.py @@ -1,3 +1,9 @@ +# engine/_py_row.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php from __future__ import annotations import operator diff --git a/lib/sqlalchemy/engine/_py_util.py b/lib/sqlalchemy/engine/_py_util.py index 538c075a2b5..2ef9d03ffd8 100644 --- a/lib/sqlalchemy/engine/_py_util.py +++ b/lib/sqlalchemy/engine/_py_util.py @@ -1,3 +1,9 @@ +# engine/_py_util.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php from __future__ import annotations import typing diff --git a/lib/sqlalchemy/engine/characteristics.py b/lib/sqlalchemy/engine/characteristics.py index c0feb000be1..aed2fd6b385 100644 --- a/lib/sqlalchemy/engine/characteristics.py +++ b/lib/sqlalchemy/engine/characteristics.py @@ -1,3 +1,9 @@ +# engine/characteristics.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php from __future__ import annotations import abc diff --git a/lib/sqlalchemy/engine/events.py b/lib/sqlalchemy/engine/events.py index aac756d18a2..4f6353080b7 100644 --- a/lib/sqlalchemy/engine/events.py +++ b/lib/sqlalchemy/engine/events.py @@ -1,4 +1,4 @@ -# sqlalchemy/engine/events.py +# engine/events.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/engine/processors.py b/lib/sqlalchemy/engine/processors.py index c01d3b74064..bdca5351c52 100644 --- a/lib/sqlalchemy/engine/processors.py +++ b/lib/sqlalchemy/engine/processors.py @@ -1,4 +1,4 @@ -# sqlalchemy/processors.py +# engine/processors.py # Copyright (C) 2010-2023 the SQLAlchemy authors and contributors # # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 2f7b23db4e3..0124d14dd5f 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -1,4 +1,4 @@ -# sqlalchemy/events.py +# events.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index a5a66de877f..0e90c60e565 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -1,4 +1,4 @@ -# sqlalchemy/exc.py +# exc.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py index 64c9ce6ec26..82db494e411 100644 --- a/lib/sqlalchemy/ext/baked.py +++ b/lib/sqlalchemy/ext/baked.py @@ -1,4 +1,4 @@ -# sqlalchemy/ext/baked.py +# ext/baked.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/ext/indexable.py b/lib/sqlalchemy/ext/indexable.py index dbaad3c4077..b3d90a6e926 100644 --- a/lib/sqlalchemy/ext/indexable.py +++ b/lib/sqlalchemy/ext/indexable.py @@ -1,4 +1,4 @@ -# ext/index.py +# ext/indexable.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/ext/mypy/__init__.py b/lib/sqlalchemy/ext/mypy/__init__.py index e69de29bb2d..8a2e38098e3 100644 --- a/lib/sqlalchemy/ext/mypy/__init__.py +++ b/lib/sqlalchemy/ext/mypy/__init__.py @@ -0,0 +1,6 @@ +# ext/mypy/__init__.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php diff --git a/lib/sqlalchemy/ext/mypy/apply.py b/lib/sqlalchemy/ext/mypy/apply.py index 1bfaf1d7b0b..e18cd08a3fe 100644 --- a/lib/sqlalchemy/ext/mypy/apply.py +++ b/lib/sqlalchemy/ext/mypy/apply.py @@ -1,5 +1,5 @@ # ext/mypy/apply.py -# Copyright (C) 2021 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/decl_class.py b/lib/sqlalchemy/ext/mypy/decl_class.py index 9c7b44b7586..9e2dcbb9aba 100644 --- a/lib/sqlalchemy/ext/mypy/decl_class.py +++ b/lib/sqlalchemy/ext/mypy/decl_class.py @@ -1,5 +1,5 @@ # ext/mypy/decl_class.py -# Copyright (C) 2021 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/infer.py b/lib/sqlalchemy/ext/mypy/infer.py index e8345d09ae3..f7626bdf6b1 100644 --- a/lib/sqlalchemy/ext/mypy/infer.py +++ b/lib/sqlalchemy/ext/mypy/infer.py @@ -1,5 +1,5 @@ # ext/mypy/infer.py -# Copyright (C) 2021 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/names.py b/lib/sqlalchemy/ext/mypy/names.py index ae55ca47b01..256e0be636a 100644 --- a/lib/sqlalchemy/ext/mypy/names.py +++ b/lib/sqlalchemy/ext/mypy/names.py @@ -1,5 +1,5 @@ # ext/mypy/names.py -# Copyright (C) 2021 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/future/__init__.py b/lib/sqlalchemy/future/__init__.py index bfc31d42676..c76360fcfff 100644 --- a/lib/sqlalchemy/future/__init__.py +++ b/lib/sqlalchemy/future/__init__.py @@ -1,4 +1,4 @@ -# sql/future/__init__.py +# future/__init__.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/future/engine.py b/lib/sqlalchemy/future/engine.py index 1984f34ca75..bc43f4601c4 100644 --- a/lib/sqlalchemy/future/engine.py +++ b/lib/sqlalchemy/future/engine.py @@ -1,4 +1,4 @@ -# sql/future/engine.py +# future/engine.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py index 7d8479b5ecf..1fe37d925f2 100644 --- a/lib/sqlalchemy/inspection.py +++ b/lib/sqlalchemy/inspection.py @@ -1,4 +1,4 @@ -# sqlalchemy/inspect.py +# inspection.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py index 8de6d188cee..55c1a3eb44f 100644 --- a/lib/sqlalchemy/log.py +++ b/lib/sqlalchemy/log.py @@ -1,4 +1,4 @@ -# sqlalchemy/log.py +# log.py # Copyright (C) 2006-2023 the SQLAlchemy authors and contributors # # Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk diff --git a/lib/sqlalchemy/orm/_typing.py b/lib/sqlalchemy/orm/_typing.py index 3085351ba3b..07f5e61a0ff 100644 --- a/lib/sqlalchemy/orm/_typing.py +++ b/lib/sqlalchemy/orm/_typing.py @@ -1,5 +1,5 @@ # orm/_typing.py -# Copyright (C) 2022 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/clsregistry.py b/lib/sqlalchemy/orm/clsregistry.py index 10f1db03b65..4f4dab895e4 100644 --- a/lib/sqlalchemy/orm/clsregistry.py +++ b/lib/sqlalchemy/orm/clsregistry.py @@ -1,4 +1,4 @@ -# ext/declarative/clsregistry.py +# orm/clsregistry.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 80c85f13ad3..bd3e1836af1 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -1,4 +1,4 @@ -# orm/declarative/api.py +# orm/decl_api.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index 0037379bd5f..6e8578863ed 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -1,4 +1,4 @@ -# ext/declarative/base.py +# orm/decl_base.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/orm/mapped_collection.py b/lib/sqlalchemy/orm/mapped_collection.py index 9e479d0d308..6a0ee22b3b6 100644 --- a/lib/sqlalchemy/orm/mapped_collection.py +++ b/lib/sqlalchemy/orm/mapped_collection.py @@ -1,4 +1,4 @@ -# orm/collections.py +# orm/mapped_collection.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index e090d5b258c..c62851e1b3b 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1,3 +1,4 @@ +# orm/strategy_options.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/pool/__init__.py b/lib/sqlalchemy/pool/__init__.py index 7929b6e4bed..c25a8f85d87 100644 --- a/lib/sqlalchemy/pool/__init__.py +++ b/lib/sqlalchemy/pool/__init__.py @@ -1,4 +1,4 @@ -# sqlalchemy/pool/__init__.py +# pool/__init__.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index 90ed32ec27b..90ad1d4764c 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -1,4 +1,4 @@ -# sqlalchemy/pool.py +# pool/base.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/pool/events.py b/lib/sqlalchemy/pool/events.py index 762418b14f2..8e06fdbd2be 100644 --- a/lib/sqlalchemy/pool/events.py +++ b/lib/sqlalchemy/pool/events.py @@ -1,4 +1,4 @@ -# sqlalchemy/pool/events.py +# pool/events.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/pool/impl.py b/lib/sqlalchemy/pool/impl.py index af4f788e27d..ced015088cb 100644 --- a/lib/sqlalchemy/pool/impl.py +++ b/lib/sqlalchemy/pool/impl.py @@ -1,4 +1,4 @@ -# sqlalchemy/pool.py +# pool/impl.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/sql/_orm_types.py b/lib/sqlalchemy/sql/_orm_types.py index 90986ec0ccb..26e289c779f 100644 --- a/lib/sqlalchemy/sql/_orm_types.py +++ b/lib/sqlalchemy/sql/_orm_types.py @@ -1,5 +1,5 @@ # sql/_orm_types.py -# Copyright (C) 2022 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index 0793fbb3db1..003cc51245a 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -1,5 +1,5 @@ # sql/_typing.py -# Copyright (C) 2022 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/events.py b/lib/sqlalchemy/sql/events.py index b34d0741209..43102ecc2ae 100644 --- a/lib/sqlalchemy/sql/events.py +++ b/lib/sqlalchemy/sql/events.py @@ -1,4 +1,4 @@ -# sqlalchemy/sql/events.py +# sql/events.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/sql/naming.py b/lib/sqlalchemy/sql/naming.py index 03c9aab67ba..a0daa2ca860 100644 --- a/lib/sqlalchemy/sql/naming.py +++ b/lib/sqlalchemy/sql/naming.py @@ -1,4 +1,4 @@ -# sqlalchemy/naming.py +# sql/naming.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 9cf4872d023..9226b01e61a 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -1,4 +1,4 @@ -# sql/types_api.py +# sql/type_api.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/testing/plugin/__init__.py b/lib/sqlalchemy/testing/plugin/__init__.py index e69de29bb2d..16031a9824b 100644 --- a/lib/sqlalchemy/testing/plugin/__init__.py +++ b/lib/sqlalchemy/testing/plugin/__init__.py @@ -0,0 +1,6 @@ +# testing/plugin/__init__.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php diff --git a/lib/sqlalchemy/testing/plugin/bootstrap.py b/lib/sqlalchemy/testing/plugin/bootstrap.py index f93b8d3e629..e331224b210 100644 --- a/lib/sqlalchemy/testing/plugin/bootstrap.py +++ b/lib/sqlalchemy/testing/plugin/bootstrap.py @@ -1,3 +1,9 @@ +# testing/plugin/bootstrap.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors """ diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py index f6a7f152b79..1f17fc595f6 100644 --- a/lib/sqlalchemy/testing/plugin/plugin_base.py +++ b/lib/sqlalchemy/testing/plugin/plugin_base.py @@ -1,4 +1,4 @@ -# plugin/plugin_base.py +# testing/plugin/plugin_base.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py index a676e7e28d0..47644e3d28b 100644 --- a/lib/sqlalchemy/testing/plugin/pytestplugin.py +++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py @@ -1,3 +1,9 @@ +# testing/plugin/pytestplugin.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from __future__ import annotations diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index 0ff564e2455..dcea52d3ba1 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -1,3 +1,9 @@ +# testing/provision.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from __future__ import annotations diff --git a/lib/sqlalchemy/testing/suite/__init__.py b/lib/sqlalchemy/testing/suite/__init__.py index 30817e1e445..08f31c6c06d 100644 --- a/lib/sqlalchemy/testing/suite/__init__.py +++ b/lib/sqlalchemy/testing/suite/__init__.py @@ -1,3 +1,9 @@ +# testing/suite/__init__.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php from .test_cte import * # noqa from .test_ddl import * # noqa from .test_deprecations import * # noqa diff --git a/lib/sqlalchemy/testing/suite/test_cte.py b/lib/sqlalchemy/testing/suite/test_cte.py index fb767e46354..f73a5a6a781 100644 --- a/lib/sqlalchemy/testing/suite/test_cte.py +++ b/lib/sqlalchemy/testing/suite/test_cte.py @@ -1,3 +1,9 @@ +# testing/suite/test_cte.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from .. import fixtures diff --git a/lib/sqlalchemy/testing/suite/test_ddl.py b/lib/sqlalchemy/testing/suite/test_ddl.py index 35651170d12..2256a03163e 100644 --- a/lib/sqlalchemy/testing/suite/test_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_ddl.py @@ -1,3 +1,9 @@ +# testing/suite/test_ddl.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors import random diff --git a/lib/sqlalchemy/testing/suite/test_deprecations.py b/lib/sqlalchemy/testing/suite/test_deprecations.py index c453cbfed92..793b401ba85 100644 --- a/lib/sqlalchemy/testing/suite/test_deprecations.py +++ b/lib/sqlalchemy/testing/suite/test_deprecations.py @@ -1,3 +1,9 @@ +# testing/suite/test_deprecations.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from .. import fixtures diff --git a/lib/sqlalchemy/testing/suite/test_dialect.py b/lib/sqlalchemy/testing/suite/test_dialect.py index 6edf93ffdc3..68ae800330a 100644 --- a/lib/sqlalchemy/testing/suite/test_dialect.py +++ b/lib/sqlalchemy/testing/suite/test_dialect.py @@ -1,3 +1,9 @@ +# testing/suite/test_dialect.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index 09f24d356da..e03d4c6430c 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -1,3 +1,9 @@ +# testing/suite/test_insert.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from decimal import Decimal diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index f2ecf1cae95..26839ab8777 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -1,3 +1,9 @@ +# testing/suite/test_reflection.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors import operator diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py index e439d6ca6d9..3e688c7cebc 100644 --- a/lib/sqlalchemy/testing/suite/test_results.py +++ b/lib/sqlalchemy/testing/suite/test_results.py @@ -1,3 +1,9 @@ +# testing/suite/test_results.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors import datetime diff --git a/lib/sqlalchemy/testing/suite/test_rowcount.py b/lib/sqlalchemy/testing/suite/test_rowcount.py index 58295a5c531..651e746d46b 100644 --- a/lib/sqlalchemy/testing/suite/test_rowcount.py +++ b/lib/sqlalchemy/testing/suite/test_rowcount.py @@ -1,3 +1,9 @@ +# testing/suite/test_rowcount.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from sqlalchemy import bindparam diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index a0aa147f9c0..4825c53a396 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -1,3 +1,9 @@ +# testing/suite/test_select.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors import collections.abc as collections_abc diff --git a/lib/sqlalchemy/testing/suite/test_sequence.py b/lib/sqlalchemy/testing/suite/test_sequence.py index 43e2d066bba..b3f63076ae4 100644 --- a/lib/sqlalchemy/testing/suite/test_sequence.py +++ b/lib/sqlalchemy/testing/suite/test_sequence.py @@ -1,3 +1,9 @@ +# testing/suite/test_sequence.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from .. import config diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index 5debb450f60..c9a5d6c2601 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -1,3 +1,9 @@ +# testing/suite/test_types.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors diff --git a/lib/sqlalchemy/testing/suite/test_unicode_ddl.py b/lib/sqlalchemy/testing/suite/test_unicode_ddl.py index 01597893727..cd7f6309bd4 100644 --- a/lib/sqlalchemy/testing/suite/test_unicode_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_unicode_ddl.py @@ -1,3 +1,9 @@ +# testing/suite/test_unicode_ddl.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors diff --git a/lib/sqlalchemy/testing/suite/test_update_delete.py b/lib/sqlalchemy/testing/suite/test_update_delete.py index 2d13bda34ae..17238a0205f 100644 --- a/lib/sqlalchemy/testing/suite/test_update_delete.py +++ b/lib/sqlalchemy/testing/suite/test_update_delete.py @@ -1,3 +1,9 @@ +# testing/suite/test_update_delete.py +# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors from .. import fixtures diff --git a/lib/sqlalchemy/util/_has_cy.py b/lib/sqlalchemy/util/_has_cy.py index 37f716ad3b9..37e0c4e891c 100644 --- a/lib/sqlalchemy/util/_has_cy.py +++ b/lib/sqlalchemy/util/_has_cy.py @@ -1,3 +1,4 @@ +# util/_has_cy.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/util/preloaded.py b/lib/sqlalchemy/util/preloaded.py index f3609c8e472..c5b4a0fabb8 100644 --- a/lib/sqlalchemy/util/preloaded.py +++ b/lib/sqlalchemy/util/preloaded.py @@ -1,4 +1,4 @@ -# util/_preloaded.py +# util/preloaded.py # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # diff --git a/lib/sqlalchemy/util/tool_support.py b/lib/sqlalchemy/util/tool_support.py index 5a2fc3ba051..4a9f9473de5 100644 --- a/lib/sqlalchemy/util/tool_support.py +++ b/lib/sqlalchemy/util/tool_support.py @@ -27,6 +27,7 @@ from typing import Dict from typing import Iterator from typing import Optional +from typing import Union from . import compat @@ -121,7 +122,7 @@ def write_status(self, *text: str) -> None: sys.stderr.write(" ".join(text)) def write_output_file_from_text( - self, text: str, destination_path: str + self, text: str, destination_path: Union[str, Path] ) -> None: if self.args.check: self._run_diff(destination_path, source=text) @@ -129,7 +130,9 @@ def write_output_file_from_text( print(text) else: self.write_status(f"Writing {destination_path}...") - Path(destination_path).write_text(text) + Path(destination_path).write_text( + text, encoding="utf-8", newline="\n" + ) self.write_status("done\n") def write_output_file_from_tempfile( @@ -149,24 +152,24 @@ def write_output_file_from_tempfile( def _run_diff( self, - destination_path: str, + destination_path: Union[str, Path], *, source: Optional[str] = None, source_file: Optional[str] = None, ) -> None: if source_file: - with open(source_file) as tf: + with open(source_file, encoding="utf-8") as tf: source_lines = list(tf) elif source is not None: source_lines = source.splitlines(keepends=True) else: assert False, "source or source_file is required" - with open(destination_path) as dp: + with open(destination_path, encoding="utf-8") as dp: d = difflib.unified_diff( list(dp), source_lines, - fromfile=destination_path, + fromfile=Path(destination_path).as_posix(), tofile="", n=3, lineterm="\n", diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 3d15d43db76..aad5709451d 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -1,5 +1,5 @@ # util/typing.py -# Copyright (C) 2022 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2023 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/tools/normalize_file_headers.py b/tools/normalize_file_headers.py new file mode 100644 index 00000000000..8d82f849558 --- /dev/null +++ b/tools/normalize_file_headers.py @@ -0,0 +1,69 @@ +from datetime import date +from pathlib import Path +import re + +from sqlalchemy.util.tool_support import code_writer_cmd + +sa_path = Path(__file__).parent.parent / "lib/sqlalchemy" + + +file_re = re.compile(r"^# [\w+/]+.(?:pyx?|pxd)$", re.MULTILINE) +license_re = re.compile( + r"Copyright .C. (\d+)-\d+ the SQLAlchemy authors and contributors" +) + +this_year = date.today().year +license_ = f""" +# Copyright (C) 2005-{this_year} the SQLAlchemy authors and \ +contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +""" + + +def run_file(cmd: code_writer_cmd, file: Path, update_year: bool): + content = file.read_text("utf-8") + path = str(file.relative_to(sa_path)).replace("\\", "/") # handle windows + path_comment = f"# {path}" + has_license = bool(license_re.search(content)) + if file_re.match(content.strip()): + if has_license: + to_sub = path_comment + else: + to_sub = path_comment + license_ + content = file_re.sub(to_sub, content, count=1) + else: + content = path_comment + ("\n" if has_license else license_) + content + + if has_license and update_year: + content = license_re.sub( + rf"Copyright (C) \1-{this_year} the SQLAlchemy " + "authors and contributors", + content, + 1, + ) + cmd.write_output_file_from_text(content, file) + + +def run(cmd: code_writer_cmd, update_year: bool): + i = 0 + for ext in ('py', 'pyx', 'pxd'): + for file in sa_path.glob(f"**/*.{ext}"): + run_file(cmd, file, update_year) + i += 1 + cmd.write_status(f"\nDone. Processed {i} files.") + + +if __name__ == "__main__": + cmd = code_writer_cmd(__file__) + with cmd.add_arguments() as parser: + parser.add_argument( + "--update-year", + action="store_true", + help="Update the year in the license files", + ) + + with cmd.run_program(): + run(cmd, cmd.args.update_year) diff --git a/tox.ini b/tox.ini index e90baf54229..616e47f7be3 100644 --- a/tox.ini +++ b/tox.ini @@ -244,6 +244,7 @@ commands = python ./tools/generate_proxy_methods.py --check python ./tools/sync_test_files.py --check python ./tools/generate_sql_functions.py --check + python ./tools/normalize_file_headers.py --check python ./tools/walk_packages.py From f7c8ee80020e9ca7482fbe10086cce26f93b04e5 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 27 Nov 2023 21:39:31 +0100 Subject: [PATCH 027/544] Need to escape # in tox v4 See https://tox.wiki/en/latest/upgrading.html#changed-ini-rules Change-Id: I3022538e3f919f5bc977411042d82c62260645a1 (cherry picked from commit aa7145caa1927d8c70f6c5029c3c04528b86c7b0) --- tox.ini | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/tox.ini b/tox.ini index 616e47f7be3..6acbe66366c 100644 --- a/tox.ini +++ b/tox.ini @@ -47,24 +47,24 @@ deps= py312: greenlet>=3.0.0a1 - dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git#egg=aiosqlite - dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git#egg=sqlcipher3 + dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git\#egg=aiosqlite + dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git\#egg=sqlcipher3 - dbapimain-postgresql: git+https://github.com/psycopg/psycopg2.git#egg=psycopg2 - dbapimain-postgresql: git+https://github.com/MagicStack/asyncpg.git#egg=asyncpg - dbapimain-postgresql: git+https://github.com/tlocke/pg8000.git#egg=pg8000 - dbapimain-postgresql: git+https://github.com/psycopg/psycopg.git#egg=psycopg&subdirectory=psycopg - # dbapimain-postgresql: git+https://github.com/psycopg/psycopg.git#egg=psycopg-c&subdirectory=psycopg_c + dbapimain-postgresql: git+https://github.com/psycopg/psycopg2.git\#egg=psycopg2 + dbapimain-postgresql: git+https://github.com/MagicStack/asyncpg.git\#egg=asyncpg + dbapimain-postgresql: git+https://github.com/tlocke/pg8000.git\#egg=pg8000 + dbapimain-postgresql: git+https://github.com/psycopg/psycopg.git\#egg=psycopg&subdirectory=psycopg + # dbapimain-postgresql: git+https://github.com/psycopg/psycopg.git\#egg=psycopg-c&subdirectory=psycopg_c - dbapimain-mysql: git+https://github.com/PyMySQL/mysqlclient-python.git#egg=mysqlclient - dbapimain-mysql: git+https://github.com/PyMySQL/PyMySQL.git#egg=pymysql + dbapimain-mysql: git+https://github.com/PyMySQL/mysqlclient-python.git\#egg=mysqlclient + dbapimain-mysql: git+https://github.com/PyMySQL/PyMySQL.git\#egg=pymysql -# dbapimain-mysql: git+https://github.com/mariadb-corporation/mariadb-connector-python#egg=mariadb +# dbapimain-mysql: git+https://github.com/mariadb-corporation/mariadb-connector-python\#egg=mariadb - dbapimain-oracle: git+https://github.com/oracle/python-cx_Oracle.git#egg=cx_Oracle + dbapimain-oracle: git+https://github.com/oracle/python-cx_Oracle.git\#egg=cx_Oracle - py312-mssql: git+https://github.com/mkleehammer/pyodbc.git#egg=pyodbc - dbapimain-mssql: git+https://github.com/mkleehammer/pyodbc.git#egg=pyodbc + py312-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc + dbapimain-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc cov: pytest-cov From efac23ed0f451490b32330d6425f9291af282eb8 Mon Sep 17 00:00:00 2001 From: Kai Date: Mon, 4 Dec 2023 19:23:43 +0100 Subject: [PATCH 028/544] Spelling dml.rst (#10730) Really a very minor spelling correction. (cherry picked from commit f43b428d2baf6f6fc01c8e3028743cd96c05986e) --- doc/build/orm/queryguide/dml.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/queryguide/dml.rst b/doc/build/orm/queryguide/dml.rst index 67614ac92c5..ec09c61dfd3 100644 --- a/doc/build/orm/queryguide/dml.rst +++ b/doc/build/orm/queryguide/dml.rst @@ -1005,7 +1005,7 @@ Important Notes and Caveats for ORM-Enabled Update and Delete ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ORM-enabled UPDATE and DELETE features bypass ORM :term:`unit-of-work` -automation in favor being able to emit a single UPDATE or DELETE statement +automation in favor of being able to emit a single UPDATE or DELETE statement that matches multiple rows at once without complexity. * The operations do not offer in-Python cascading of relationships - it is From ce1b9655b066d426702fd391579c677b3af731ce Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 4 Dec 2023 21:14:45 +0100 Subject: [PATCH 029/544] Document limitation in dataclass mapping styles Document that using default and init=False on a dataclass field mapped imperatively or using imperative table will not work. Change-Id: Id2e27e4f7f0cafc60be3f97b7945983360c0a7d2 References: #9879 (cherry picked from commit 06c5c8e955402af6c09b8a3c55cbdee3cd0e0393) --- doc/build/orm/cascades.rst | 2 +- doc/build/orm/dataclasses.rst | 12 ++++++++++++ doc/build/orm/queryguide/dml.rst | 4 ++-- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/doc/build/orm/cascades.rst b/doc/build/orm/cascades.rst index efb997560a8..4c1e365ef7d 100644 --- a/doc/build/orm/cascades.rst +++ b/doc/build/orm/cascades.rst @@ -303,7 +303,7 @@ directives described at :ref:`passive_deletes` should be used. .. warning:: Note that the ORM's "delete" and "delete-cascade" behavior applies **only** to the use of the :meth:`_orm.Session.delete` method to mark - individual ORM instances for deletion within the :term:`unit-of-work` process. + individual ORM instances for deletion within the :term:`unit of work` process. It does **not** apply to "bulk" deletes, which would be emitted using the :func:`_sql.delete` construct as illustrated at :ref:`orm_queryguide_update_delete_where`. See diff --git a/doc/build/orm/dataclasses.rst b/doc/build/orm/dataclasses.rst index b7d0bee4313..19fabe9f835 100644 --- a/doc/build/orm/dataclasses.rst +++ b/doc/build/orm/dataclasses.rst @@ -705,6 +705,15 @@ which itself is specified within the ``__mapper_args__`` dictionary, so that it is passed to the constructor for :class:`_orm.Mapper`. An alternative to this approach is in the next example. + +.. warning:: + Declaring a dataclass ``field()`` setting a ``default`` together with ``init=False`` + will not work as would be expected with a totally plain dataclass, + since the SQLAlchemy class instrumentation will replace + the default value set on the class by the dataclass creation process. + Use ``default_factory`` instead. This adaptation is done automatically when + making use of :ref:`orm_declarative_native_dataclasses`. + .. _orm_declarative_dataclasses_declarative_table: Mapping pre-existing dataclasses using Declarative-style fields @@ -909,6 +918,9 @@ variables:: mapper_registry.map_imperatively(Address, address) +The same warning mentioned in :ref:`orm_declarative_dataclasses_imperative_table` +applies when using this mapping style. + .. _orm_declarative_attrs_imperative_table: Applying ORM mappings to an existing attrs class diff --git a/doc/build/orm/queryguide/dml.rst b/doc/build/orm/queryguide/dml.rst index ec09c61dfd3..a2c10c1bb34 100644 --- a/doc/build/orm/queryguide/dml.rst +++ b/doc/build/orm/queryguide/dml.rst @@ -995,7 +995,7 @@ For a DELETE, an example of deleting rows based on criteria:: .. warning:: Please read the following section :ref:`orm_queryguide_update_delete_caveats` for important notes regarding how the functionality of ORM-Enabled UPDATE and DELETE - diverges from that of ORM :term:`unit-of-work` features, such + diverges from that of ORM :term:`unit of work` features, such as using the :meth:`_orm.Session.delete` method to delete individual objects. @@ -1004,7 +1004,7 @@ For a DELETE, an example of deleting rows based on criteria:: Important Notes and Caveats for ORM-Enabled Update and Delete ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The ORM-enabled UPDATE and DELETE features bypass ORM :term:`unit-of-work` +The ORM-enabled UPDATE and DELETE features bypass ORM :term:`unit of work` automation in favor of being able to emit a single UPDATE or DELETE statement that matches multiple rows at once without complexity. From 933a3eca9600d3654f459274aa35df7aec77c83d Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 30 Nov 2023 22:40:43 +0100 Subject: [PATCH 030/544] Improve session after_begin even documentation Change-Id: Ie2a1e6bdf5960208921dc76e372fe51d3b280f1a References: #10687 (cherry picked from commit 842b3ebb4b9e40ce3f6aa4257bd5e585c42e51d2) --- lib/sqlalchemy/orm/events.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index e7e3e32a7ff..1a54dfd49a5 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -2035,7 +2035,14 @@ def after_begin( transaction: SessionTransaction, connection: Connection, ) -> None: - """Execute after a transaction is begun on a connection + """Execute after a transaction is begun on a connection. + + .. note:: This event is called within the process of the + :class:`_orm.Session` modifying its own internal state. + To invoke SQL operations within this hook, use the + :class:`_engine.Connection` provided to the event; + do not run SQL operations using the :class:`_orm.Session` + directly. :param session: The target :class:`.Session`. :param transaction: The :class:`.SessionTransaction`. From 2ad09db79d093ad866f48831ea8c641b79c7a464 Mon Sep 17 00:00:00 2001 From: Michael Oliver Date: Tue, 5 Dec 2023 17:24:17 -0500 Subject: [PATCH 031/544] Forward `**kw` in `__init_subclass__()` to super Modified the ``__init_subclass__()`` method used by :class:`_orm.MappedAsDataclass`, :class:`_orm.DeclarativeBase`` and :class:`_orm.DeclarativeBaseNoMeta` to accept arbitrary ``**kw`` and to propagate them to the ``super()`` call, allowing greater flexibility in arranging custom superclasses and mixins which make use of ``__init_subclass__()`` keyword arguments. Pull request courtesy Michael Oliver. Fixes: #10732 Closes: #10733 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10733 Pull-request-sha: 7fdeec1f3224f48213c9c9af5f3e7e5d0904dafa Change-Id: I955a735d4e23502b5a6b22ac093e391b378edc87 (cherry picked from commit ceeaaecd2401d2407b60c22708f58a8ae0898d85) --- doc/build/changelog/unreleased_20/10732.rst | 12 +++++++ lib/sqlalchemy/dialects/mysql/base.py | 2 +- lib/sqlalchemy/orm/decl_api.py | 10 +++--- test/orm/declarative/test_basic.py | 37 +++++++++++++++++++++ 4 files changed, 56 insertions(+), 5 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10732.rst diff --git a/doc/build/changelog/unreleased_20/10732.rst b/doc/build/changelog/unreleased_20/10732.rst new file mode 100644 index 00000000000..0961b05d739 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10732.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: bug, orm + :tickets: 10668 + + Modified the ``__init_subclass__()`` method used by + :class:`_orm.MappedAsDataclass`, :class:`_orm.DeclarativeBase`` and + :class:`_orm.DeclarativeBaseNoMeta` to accept arbitrary ``**kw`` and to + propagate them to the ``super()`` call, allowing greater flexibility in + arranging custom superclasses and mixins which make use of + ``__init_subclass__()`` keyword arguments. Pull request courtesy Michael + Oliver. + diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 58d7235e017..749d42ea120 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1571,7 +1571,7 @@ def visit_false(self, element, **kw): def get_select_precolumns(self, select, **kw): """Add special MySQL keywords in place of DISTINCT. - .. deprecated 1.4:: this usage is deprecated. + .. deprecated:: 1.4 This usage is deprecated. :meth:`_expression.Select.prefix_with` should be used for special keywords at the start of a SELECT. diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index bd3e1836af1..60bd2ae4901 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -594,6 +594,7 @@ def __init_subclass__( dataclass_callable: Union[ _NoArg, Callable[..., Type[Any]] ] = _NoArg.NO_ARG, + **kw: Any, ) -> None: apply_dc_transforms: _DataclassArguments = { "init": init, @@ -622,7 +623,7 @@ def __init_subclass__( current_transforms ) = apply_dc_transforms - super().__init_subclass__() + super().__init_subclass__(**kw) if not _is_mapped_class(cls): new_anno = ( @@ -839,13 +840,13 @@ def _sa_inspect_instance(self) -> InstanceState[Self]: def __init__(self, **kw: Any): ... - def __init_subclass__(cls) -> None: + def __init_subclass__(cls, **kw: Any) -> None: if DeclarativeBase in cls.__bases__: _check_not_declarative(cls, DeclarativeBase) _setup_declarative_base(cls) else: _as_declarative(cls._sa_registry, cls, cls.__dict__) - super().__init_subclass__() + super().__init_subclass__(**kw) def _check_not_declarative(cls: Type[Any], base: Type[Any]) -> None: @@ -964,12 +965,13 @@ def _sa_inspect_instance(self) -> InstanceState[Self]: def __init__(self, **kw: Any): ... - def __init_subclass__(cls) -> None: + def __init_subclass__(cls, **kw: Any) -> None: if DeclarativeBaseNoMeta in cls.__bases__: _check_not_declarative(cls, DeclarativeBaseNoMeta) _setup_declarative_base(cls) else: _as_declarative(cls._sa_registry, cls, cls.__dict__) + super().__init_subclass__(**kw) def add_mapped_attribute( diff --git a/test/orm/declarative/test_basic.py b/test/orm/declarative/test_basic.py index 7085b2af9f6..37a1b643c1d 100644 --- a/test/orm/declarative/test_basic.py +++ b/test/orm/declarative/test_basic.py @@ -35,6 +35,7 @@ from sqlalchemy.orm import joinedload from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import MappedAsDataclass from sqlalchemy.orm import MappedColumn from sqlalchemy.orm import Mapper from sqlalchemy.orm import registry @@ -930,6 +931,42 @@ class User(BaseUser): # Check to see if __init_subclass__ works in supported versions eq_(UserType._set_random_keyword_used_here, True) + @testing.variation( + "basetype", + ["DeclarativeBase", "DeclarativeBaseNoMeta", "MappedAsDataclass"], + ) + def test_kw_support_in_declarative_base(self, basetype): + """test #10732""" + + if basetype.DeclarativeBase: + + class Base(DeclarativeBase): + pass + + elif basetype.DeclarativeBaseNoMeta: + + class Base(DeclarativeBaseNoMeta): + pass + + elif basetype.MappedAsDataclass: + + class Base(MappedAsDataclass): + pass + + else: + basetype.fail() + + class Mixin: + def __init_subclass__(cls, random_keyword: bool, **kw) -> None: + super().__init_subclass__(**kw) + cls._set_random_keyword_used_here = random_keyword + + class User(Base, Mixin, random_keyword=True): + __tablename__ = "user" + id_ = Column(Integer, primary_key=True) + + eq_(User._set_random_keyword_used_here, True) + def test_declarative_base_bad_registry(self): with assertions.expect_raises_message( exc.InvalidRequestError, From 64a5a2fa37815508ddb54ebf34740c141cb65226 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 6 Dec 2023 14:10:28 -0500 Subject: [PATCH 032/544] Replace custom URL-encoding method with quote Fixed URL-encoding of the username and password components of :class:`.engine.URL` objects when converting them to string using the :meth:`_engine.URL.render_as_string` method, by using Python standard library ``urllib.parse.quote`` while allowing for plus signs and spaces to remain unchanged as supported by SQLAlchemy's non-standard URL parsing, rather than the legacy home-grown routine from many years ago. Pull request courtesy of Xavier NUNN. Fixes: #10662 Closes: #10726 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10726 Pull-request-sha: 82219041b8f73d8c932cc40e87c002b3b853e02e Change-Id: Iedca4929579d4d26ef8cce083252dcd1e476286b (cherry picked from commit 4438883c9703affa3f441be9a230a5f751905a05) --- doc/build/changelog/unreleased_20/10662.rst | 11 +++++++++ lib/sqlalchemy/engine/url.py | 20 ++++++--------- test/engine/test_parseconnect.py | 27 ++++++++++++++++++--- 3 files changed, 42 insertions(+), 16 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10662.rst diff --git a/doc/build/changelog/unreleased_20/10662.rst b/doc/build/changelog/unreleased_20/10662.rst new file mode 100644 index 00000000000..5be613d8e23 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10662.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, engine + :tickets: 10662 + + Fixed URL-encoding of the username and password components of + :class:`.engine.URL` objects when converting them to string using the + :meth:`_engine.URL.render_as_string` method, by using Python standard + library ``urllib.parse.quote`` while allowing for plus signs and spaces to + remain unchanged as supported by SQLAlchemy's non-standard URL parsing, + rather than the legacy home-grown routine from many years ago. Pull request + courtesy of Xavier NUNN. diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 5cf5ec7b4b7..04ae5e91fbb 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -32,6 +32,7 @@ from typing import Type from typing import Union from urllib.parse import parse_qsl +from urllib.parse import quote from urllib.parse import quote_plus from urllib.parse import unquote @@ -621,17 +622,17 @@ def render_as_string(self, hide_password: bool = True) -> str: """ s = self.drivername + "://" if self.username is not None: - s += _sqla_url_quote(self.username) + s += quote(self.username, safe=" +") if self.password is not None: s += ":" + ( "***" if hide_password - else _sqla_url_quote(str(self.password)) + else quote(str(self.password), safe=" +") ) s += "@" if self.host is not None: if ":" in self.host: - s += "[%s]" % self.host + s += f"[{self.host}]" else: s += self.host if self.port is not None: @@ -642,7 +643,7 @@ def render_as_string(self, hide_password: bool = True) -> str: keys = list(self.query) keys.sort() s += "?" + "&".join( - "%s=%s" % (quote_plus(k), quote_plus(element)) + f"{quote_plus(k)}={quote_plus(element)}" for k in keys for element in util.to_list(self.query[k]) ) @@ -885,10 +886,10 @@ def _parse_url(name: str) -> URL: components["query"] = query if components["username"] is not None: - components["username"] = _sqla_url_unquote(components["username"]) + components["username"] = unquote(components["username"]) if components["password"] is not None: - components["password"] = _sqla_url_unquote(components["password"]) + components["password"] = unquote(components["password"]) ipv4host = components.pop("ipv4host") ipv6host = components.pop("ipv6host") @@ -904,10 +905,3 @@ def _parse_url(name: str) -> URL: raise exc.ArgumentError( "Could not parse SQLAlchemy URL from string '%s'" % name ) - - -def _sqla_url_quote(text: str) -> str: - return re.sub(r"[:@/]", lambda m: "%%%X" % ord(m.group(0)), text) - - -_sqla_url_unquote = unquote diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py index 4c144a4a31a..34dc1d7aa82 100644 --- a/test/engine/test_parseconnect.py +++ b/test/engine/test_parseconnect.py @@ -62,13 +62,33 @@ class URLTest(fixtures.TestBase): "dbtype://username:password@hostspec/test database with@atsign", "dbtype://username:password@hostspec?query=but_no_db", "dbtype://username:password@hostspec:450?query=but_no_db", + "dbtype://username:password with spaces@hostspec:450?query=but_no_db", + "dbtype+apitype://username with space+and+plus:" + "password with space+and+plus@" + "hostspec:450?query=but_no_db", + "dbtype://user%25%26%7C:pass%25%26%7C@hostspec:499?query=but_no_db", + "dbtype://user🐍測試:pass🐍測試@hostspec:499?query=but_no_db", ) def test_rfc1738(self, text): u = url.make_url(text) assert u.drivername in ("dbtype", "dbtype+apitype") - assert u.username in ("username", None) - assert u.password in ("password", "apples/oranges", None) + assert u.username in ( + "username", + "user%&|", + "username with space+and+plus", + "user🐍測試", + None, + ) + assert u.password in ( + "password", + "password with spaces", + "password with space+and+plus", + "apples/oranges", + "pass%&|", + "pass🐍測試", + None, + ) assert u.host in ( "hostspec", "127.0.0.1", @@ -87,7 +107,8 @@ def test_rfc1738(self, text): "E:/work/src/LEM/db/hello.db", None, ), u.database - eq_(u.render_as_string(hide_password=False), text) + + eq_(url.make_url(u.render_as_string(hide_password=False)), u) def test_rfc1738_password(self): u = url.make_url("dbtype://user:pass word + other%3Awords@host/dbname") From 8b2bf92f223c9036fee84e127f0eaf9573a9c2ee Mon Sep 17 00:00:00 2001 From: Kevin Kirsche Date: Mon, 4 Dec 2023 13:25:24 -0500 Subject: [PATCH 033/544] Update `TZDateTime` type decorator example to align with python docs This change updates the `TZDateTime` type decorator to use the timezone awareness checks described in the Python documentation located here: https://docs.python.org/3/library/datetime.html#determining-if-an-object-is-aware-or-naive The specific lines state: > A [`datetime`](https://docs.python.org/3/library/datetime.html#datetime.datetime) object `d` is aware if both of the following hold: > > `d.tzinfo is not None` > > `d.tzinfo.utcoffset(d)` does not return `None` > > Otherwise, `d` is naive. Closes: #10719 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10719 Pull-request-sha: bb30cb3cfe57f326addec21a6cae5f81184c2e74 Change-Id: I1ac51c1ec2820c3f224a79b7af5057fe2b3a55e2 (cherry picked from commit e1477c152c5a1e097399300883fcd6b23a6dfabf) --- doc/build/core/custom_types.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/core/custom_types.rst b/doc/build/core/custom_types.rst index 6ae9e066ace..b9d8953b4e8 100644 --- a/doc/build/core/custom_types.rst +++ b/doc/build/core/custom_types.rst @@ -156,7 +156,7 @@ denormalize:: def process_bind_param(self, value, dialect): if value is not None: - if not value.tzinfo: + if not value.tzinfo or value.tzinfo.utcoffset(value) is None: raise TypeError("tzinfo is required") value = value.astimezone(datetime.timezone.utc).replace(tzinfo=None) return value From bbd71627f067b7039b09ede974f6ea9cbd8c08da Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Dec 2023 10:52:38 +0100 Subject: [PATCH 034/544] Bump actions/setup-python from 4 to 5 (#10737) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit 9adf8ab1bb72253ac15d516e4a259aa88176d92f) --- .github/workflows/create-wheels.yaml | 2 +- .github/workflows/run-on-pr.yaml | 4 ++-- .github/workflows/run-test.yaml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index b5c0126be68..4e242599fe4 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -82,7 +82,7 @@ jobs: - name: Set up Python for twine and pure-python wheel - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.11" diff --git a/.github/workflows/run-on-pr.yaml b/.github/workflows/run-on-pr.yaml index c19e7a59018..0790c793304 100644 --- a/.github/workflows/run-on-pr.yaml +++ b/.github/workflows/run-on-pr.yaml @@ -40,7 +40,7 @@ jobs: uses: actions/checkout@v4 - name: Set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} @@ -75,7 +75,7 @@ jobs: uses: actions/checkout@v4 - name: Set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index fa2fa54f2ea..6eae9e8bc72 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -72,7 +72,7 @@ jobs: uses: actions/checkout@v4 - name: Set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} @@ -171,7 +171,7 @@ jobs: uses: actions/checkout@v4 - name: Set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} From 961039f9a491d38f63baa6442fcf0e366aa051eb Mon Sep 17 00:00:00 2001 From: Mehdi GMIRA Date: Fri, 8 Dec 2023 11:54:11 +0100 Subject: [PATCH 035/544] fix(attribute_keyed_dict): using Any instead of TypeVar (#10746) Co-authored-by: Mehdi Gmira (cherry picked from commit 86f335c29e9f4d9a8e2b28dd75301f28f5f9f4f1) --- lib/sqlalchemy/orm/mapped_collection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/orm/mapped_collection.py b/lib/sqlalchemy/orm/mapped_collection.py index 6a0ee22b3b6..a75789f851d 100644 --- a/lib/sqlalchemy/orm/mapped_collection.py +++ b/lib/sqlalchemy/orm/mapped_collection.py @@ -231,7 +231,7 @@ def __reduce__(self) -> Tuple[Type[_AttrGetter], Tuple[str]]: def attribute_keyed_dict( attr_name: str, *, ignore_unpopulated_attribute: bool = False -) -> Type[KeyFuncDict[_KT, _KT]]: +) -> Type[KeyFuncDict[Any, Any]]: """A dictionary-based collection type with attribute-based keying. .. versionchanged:: 2.0 Renamed :data:`.attribute_mapped_collection` to From e793b35a60584aae52f2e334e996cab5f3a45179 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 9 Dec 2023 10:43:37 -0500 Subject: [PATCH 036/544] ensure test suite runs w/o greenlet This is a reopen of #6136 essentially that repaired the test suite to run without greenlet but now this has regressed. add a tox target that explicitly uninstalls greenlet, will add to CI. This also changes 2.0 in that the full tox target will omit dbdrivers that require greenlet. Fixes: #10747 Change-Id: Ia7d786d781e591539a388bfbe17b00a59f0e86d9 (cherry picked from commit 0bd686df43f572cec658c586082f299ab2cd756f) --- doc/build/changelog/unreleased_20/10747.rst | 9 +++++++++ lib/sqlalchemy/testing/provision.py | 1 + tox.ini | 7 ++++++- 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/10747.rst diff --git a/doc/build/changelog/unreleased_20/10747.rst b/doc/build/changelog/unreleased_20/10747.rst new file mode 100644 index 00000000000..ac8133ac735 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10747.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, tests + :tickets: 10747 + + Improvements to the test suite to further harden its ability to run + when Python ``greenlet`` is not installed. There is now a tox + target that includes the token "nogreenlet" that will run the suite + with greenlet not installed (note that it still temporarily installs + greenlet as part of the tox config, however). diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index dcea52d3ba1..884d558138a 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -74,6 +74,7 @@ def setup_config(db_url, options, file_config, follower_ident): # hooks dialect = sa_url.make_url(db_url).get_dialect() + dialect.load_provisioning() if follower_ident: diff --git a/tox.ini b/tox.ini index 6acbe66366c..d60c30ee861 100644 --- a/tox.ini +++ b/tox.ini @@ -116,6 +116,7 @@ setenv= sqlite_file: SQLITE={env:TOX_SQLITE_FILE:--db sqlite_file} sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric --dbdriver aiosqlite} + sqlite-nogreenlet: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric} py{37,38,39}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite --dbdriver pysqlcipher} @@ -125,6 +126,7 @@ setenv= postgresql: POSTGRESQL={env:TOX_POSTGRESQL:--db postgresql} postgresql: EXTRA_PG_DRIVERS={env:EXTRA_PG_DRIVERS:--dbdriver psycopg2 --dbdriver asyncpg --dbdriver pg8000 --dbdriver psycopg --dbdriver psycopg_async} + postgresql-nogreenlet: EXTRA_PG_DRIVERS={env:EXTRA_PG_DRIVERS:--dbdriver psycopg2 --dbdriver pg8000 --dbdriver psycopg} # limit driver list for memusage target memusage: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite} @@ -134,10 +136,13 @@ setenv= mysql: MYSQL={env:TOX_MYSQL:--db mysql} mysql: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver asyncmy --dbdriver aiomysql --dbdriver mariadbconnector} + mysql-nogreenlet: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver mariadbconnector} mssql: MSSQL={env:TOX_MSSQL:--db mssql} py{3,37,38,39,310,311}-mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc --dbdriver pymssql} + py{3,37,38,39,310,311}-mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver pymssql} py312-mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc} + py312-mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc} oracle,mssql,sqlite_file: IDENTS=--write-idents db_idents.txt @@ -166,7 +171,7 @@ commands= # this line is only meaningful when usedevelop=True is enabled. we use # that flag for coverage mode. nocext: sh -c "rm -f lib/sqlalchemy/*.so" - + nogreenlet: pip uninstall -y greenlet {env:BASECOMMAND} {env:WORKERS} {env:SQLITE:} {env:EXTRA_SQLITE_DRIVERS:} {env:POSTGRESQL:} {env:EXTRA_PG_DRIVERS:} {env:MYSQL:} {env:EXTRA_MYSQL_DRIVERS:} {env:ORACLE:} {env:EXTRA_ORACLE_DRIVERS:} {env:MSSQL:} {env:EXTRA_MSSQL_DRIVERS:} {env:IDENTS:} {env:PYTEST_EXCLUDES:} {env:COVERAGE:} {posargs} oracle,mssql,sqlite_file: python reap_dbs.py db_idents.txt From 9528b5977d97993df8c8fe47121ddb8a98dee95a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 11 Dec 2023 10:55:25 -0500 Subject: [PATCH 037/544] restore ValueError when greenlet not installed continuing for #10747, add a test asserting we dont get an endless loop and get a clean ValueError instead when greenlet not installed and async functions are used. Fixes: #10747 Change-Id: I54dffe8577025e2ef3a59f5ca9ab7f4362d4d91f (cherry picked from commit 35a0854cae1e23271963e7781c65495e9c84f872) --- lib/sqlalchemy/testing/requirements.py | 12 ++++++++++++ test/base/test_concurrency_py3k.py | 15 +++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 5d1f3fb1663..4dd5176a3ee 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1609,6 +1609,18 @@ def async_dialect(self): def asyncio(self): return self.greenlet + @property + def no_greenlet(self): + def go(config): + try: + import greenlet # noqa: F401 + except ImportError: + return True + else: + return False + + return exclusions.only_if(go) + @property def greenlet(self): def go(config): diff --git a/test/base/test_concurrency_py3k.py b/test/base/test_concurrency_py3k.py index b4fb34d0259..63a9f850800 100644 --- a/test/base/test_concurrency_py3k.py +++ b/test/base/test_concurrency_py3k.py @@ -264,3 +264,18 @@ def prime(): t.join() is_true(run[0]) + + +class GracefulNoGreenletTest(fixtures.TestBase): + __requires__ = ("no_greenlet",) + + def test_await_only_graceful(self): + async def async_fn(): + pass + + with expect_raises_message( + ValueError, + "the greenlet library is required to use this " + "function. No module named 'greenlet'", + ): + await_only(async_fn()) From 3164696020dc0e96c98fefeeb3f3445eeddc9902 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 12 Dec 2023 14:57:38 -0500 Subject: [PATCH 038/544] copy stack related elements to str compiler Fixed issue in stringify for SQL elements, where a specific dialect is not passed, where a dialect-specific element such as the PostgreSQL "on conflict do update" construct is encountered and then fails to provide for a stringify dialect with the appropriate state to render the construct, leading to internal errors. Fixed issue where stringifying or compiling a :class:`.CTE` that was against a DML construct such as an :func:`_sql.insert` construct would fail to stringify, due to a mis-detection that the statement overall is an INSERT, leading to internal errors. Fixes: #10753 Change-Id: I783eca3fc7bbc1794fedd325d58181dbcc7e0b75 (cherry picked from commit 0248efb761bec4bdcea76bc6bbe3c09934f6b527) --- doc/build/changelog/unreleased_20/10753.rst | 17 ++++++ lib/sqlalchemy/sql/compiler.py | 59 ++++++++++++++++----- test/sql/test_compiler.py | 47 ++++++++++++++++ test/sql/test_cte.py | 30 +++++++++++ 4 files changed, 141 insertions(+), 12 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10753.rst diff --git a/doc/build/changelog/unreleased_20/10753.rst b/doc/build/changelog/unreleased_20/10753.rst new file mode 100644 index 00000000000..5b714ed1973 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10753.rst @@ -0,0 +1,17 @@ +.. change:: + :tags: bug, sql + :tickets: 10753 + + Fixed issue in stringify for SQL elements, where a specific dialect is not + passed, where a dialect-specific element such as the PostgreSQL "on + conflict do update" construct is encountered and then fails to provide for + a stringify dialect with the appropriate state to render the construct, + leading to internal errors. + +.. change:: + :tags: bug, sql + + Fixed issue where stringifying or compiling a :class:`.CTE` that was + against a DML construct such as an :func:`_sql.insert` construct would fail + to stringify, due to a mis-detection that the statement overall is an + INSERT, leading to internal errors. diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index cb6899c5e9a..b4b8bcfd26e 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -1343,6 +1343,7 @@ def __init__( column_keys: Optional[Sequence[str]] = None, for_executemany: bool = False, linting: Linting = NO_LINTING, + _supporting_against: Optional[SQLCompiler] = None, **kwargs: Any, ): """Construct a new :class:`.SQLCompiler` object. @@ -1445,6 +1446,24 @@ def __init__( self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle] + if _supporting_against: + self.__dict__.update( + { + k: v + for k, v in _supporting_against.__dict__.items() + if k + not in { + "state", + "dialect", + "preparer", + "positional", + "_numeric_binds", + "compilation_bindtemplate", + "bindtemplate", + } + } + ) + if self.state is CompilerState.STRING_APPLIED: if self.positional: if self._numeric_binds: @@ -5595,13 +5614,19 @@ def apply_placeholders(keys, formatted): ) batchnum += 1 - def visit_insert(self, insert_stmt, visited_bindparam=None, **kw): + def visit_insert( + self, insert_stmt, visited_bindparam=None, visiting_cte=None, **kw + ): compile_state = insert_stmt._compile_state_factory( insert_stmt, self, **kw ) insert_stmt = compile_state.statement - toplevel = not self.stack + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack if toplevel: self.isinsert = True @@ -5629,14 +5654,12 @@ def visit_insert(self, insert_stmt, visited_bindparam=None, **kw): # params inside them. After multiple attempts to figure this out, # this very simplistic "count after" works and is # likely the least amount of callcounts, though looks clumsy - if self.positional: + if self.positional and visiting_cte is None: # if we are inside a CTE, don't count parameters # here since they wont be for insertmanyvalues. keep # visited_bindparam at None so no counting happens. # see #9173 - has_visiting_cte = "visiting_cte" in kw - if not has_visiting_cte: - visited_bindparam = [] + visited_bindparam = [] crud_params_struct = crud._get_crud_params( self, @@ -5990,13 +6013,18 @@ def update_from_clause( "criteria within UPDATE" ) - def visit_update(self, update_stmt, **kw): + def visit_update(self, update_stmt, visiting_cte=None, **kw): compile_state = update_stmt._compile_state_factory( update_stmt, self, **kw ) update_stmt = compile_state.statement - toplevel = not self.stack + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack + if toplevel: self.isupdate = True if not self.dml_compile_state: @@ -6147,13 +6175,18 @@ def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw): self, asfrom=True, iscrud=True, **kw ) - def visit_delete(self, delete_stmt, **kw): + def visit_delete(self, delete_stmt, visiting_cte=None, **kw): compile_state = delete_stmt._compile_state_factory( delete_stmt, self, **kw ) delete_stmt = compile_state.statement - toplevel = not self.stack + if visiting_cte is not None: + kw["visiting_cte"] = visiting_cte + toplevel = False + else: + toplevel = not self.stack + if toplevel: self.isdelete = True if not self.dml_compile_state: @@ -6312,9 +6345,11 @@ def visit_unsupported_compilation(self, element, err, **kw): url = util.preloaded.engine_url dialect = url.URL.create(element.stringify_dialect).get_dialect()() - compiler = dialect.statement_compiler(dialect, None) + compiler = dialect.statement_compiler( + dialect, None, _supporting_against=self + ) if not isinstance(compiler, StrSQLCompiler): - return compiler.process(element) + return compiler.process(element, **kw) return super().visit_unsupported_compilation(element, err) diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 3bd1bacc6d8..d6bc098964c 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -5974,6 +5974,53 @@ def visit_widget(self, element, **kw): ): eq_(str(Grouping(Widget())), "(widget)") + def test_dialect_sub_compile_has_stack(self): + """test #10753""" + + class Widget(ColumnElement): + __visit_name__ = "widget" + stringify_dialect = "sqlite" + + def visit_widget(self, element, **kw): + assert self.stack + return "widget" + + with mock.patch( + "sqlalchemy.dialects.sqlite.base.SQLiteCompiler.visit_widget", + visit_widget, + create=True, + ): + eq_(str(select(Widget())), "SELECT widget AS anon_1") + + def test_dialect_sub_compile_has_stack_pg_specific(self): + """test #10753""" + my_table = table( + "my_table", column("id"), column("data"), column("user_email") + ) + + from sqlalchemy.dialects.postgresql import insert + + insert_stmt = insert(my_table).values( + id="some_existing_id", data="inserted value" + ) + + do_update_stmt = insert_stmt.on_conflict_do_update( + index_elements=["id"], set_=dict(data="updated value") + ) + + # note! two different bound parameter formats. It's weird yes, + # but this is what I want. They are stringifying without using the + # correct dialect. We could use the PG compiler at the point of + # the insert() but that still would not accommodate params in other + # parts of the statement. + eq_ignore_whitespace( + str(select(do_update_stmt.cte())), + "WITH anon_1 AS (INSERT INTO my_table (id, data) " + "VALUES (:param_1, :param_2) " + "ON CONFLICT (id) " + "DO UPDATE SET data = %(param_3)s) SELECT FROM anon_1", + ) + def test_dialect_sub_compile_w_binds(self): """test sub-compile into a new compiler where state != CompilerState.COMPILING, but we have to render a bindparam diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index d044212aa60..23ac87a2148 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -1383,6 +1383,36 @@ def test_insert_w_cte_in_scalar_subquery(self, dialect): else: assert False + @testing.variation("operation", ["insert", "update", "delete"]) + def test_stringify_standalone_dml_cte(self, operation): + """test issue discovered as part of #10753""" + + t1 = table("table_1", column("id"), column("val")) + + if operation.insert: + stmt = t1.insert() + expected = ( + "INSERT INTO table_1 (id, val) VALUES (:id, :val) " + "RETURNING table_1.id, table_1.val" + ) + elif operation.update: + stmt = t1.update() + expected = ( + "UPDATE table_1 SET id=:id, val=:val " + "RETURNING table_1.id, table_1.val" + ) + elif operation.delete: + stmt = t1.delete() + expected = "DELETE FROM table_1 RETURNING table_1.id, table_1.val" + else: + operation.fail() + + stmt = stmt.returning(t1.c.id, t1.c.val) + + cte = stmt.cte() + + self.assert_compile(cte, expected) + @testing.combinations( ("default_enhanced",), ("postgresql",), From e4467a2b869506f3e826bef483211950a44b1071 Mon Sep 17 00:00:00 2001 From: Iuri de Silvio Date: Sun, 17 Dec 2023 18:39:28 +0100 Subject: [PATCH 039/544] Fix engine connect example in migration guide (#10757) You can't fetchall after connection was closed. (cherry picked from commit ca9adf3028095ff28bdef823d6f6cefc12815b3c) --- doc/build/changelog/migration_20.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/changelog/migration_20.rst b/doc/build/changelog/migration_20.rst index fe86338ee21..794d1d80fb1 100644 --- a/doc/build/changelog/migration_20.rst +++ b/doc/build/changelog/migration_20.rst @@ -296,7 +296,7 @@ as a bonus our program is much clearer:: # select() now accepts column / table expressions positionally result = connection.execute(select(foo.c.id)) - print(result.fetchall()) + print(result.fetchall()) The goal of "2.0 deprecations mode" is that a program which runs with no :class:`_exc.RemovedIn20Warning` warnings with "2.0 deprecations mode" turned From df6f3a232393a647052bf6b52d73e4529f7d69e9 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 19 Dec 2023 09:00:03 -0500 Subject: [PATCH 040/544] ensure Bundle / DML RETURNING has test support, full impl Ensured the use case of :class:`.Bundle` objects used in the ``returning()`` portion of ORM-enabled INSERT, UPDATE and DELETE statements is tested and works fully. This was never explicitly implemented or tested previously and did not work correctly in the 1.4 series; in the 2.0 series, ORM UPDATE/DELETE with WHERE criteria was missing an implementation method preventing :class:`.Bundle` objects from working. Fixes: #10776 Change-Id: I32298e65ac590a12b47dd6ba00b7d56038b8a450 (cherry picked from commit 6e089c3dbf7e7348da84dfc62cc1c6100a257fd4) --- doc/build/changelog/unreleased_20/10776.rst | 10 +++ lib/sqlalchemy/orm/context.py | 7 ++ test/orm/dml/test_bulk_statements.py | 91 +++++++++++++++++++++ test/orm/dml/test_update_delete_where.py | 40 +++++++++ 4 files changed, 148 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/10776.rst diff --git a/doc/build/changelog/unreleased_20/10776.rst b/doc/build/changelog/unreleased_20/10776.rst new file mode 100644 index 00000000000..4a6889fdb7a --- /dev/null +++ b/doc/build/changelog/unreleased_20/10776.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, orm + :tickets: 10776 + + Ensured the use case of :class:`.Bundle` objects used in the + ``returning()`` portion of ORM-enabled INSERT, UPDATE and DELETE statements + is tested and works fully. This was never explicitly implemented or + tested previously and did not work correctly in the 1.4 series; in the 2.0 + series, ORM UPDATE/DELETE with WHERE criteria was missing an implementation + method preventing :class:`.Bundle` objects from working. diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 2f5e4ce8b7b..3e73d80e716 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -2865,6 +2865,13 @@ def setup_compile_state(self, compile_state): for ent in self._entities: ent.setup_compile_state(compile_state) + def setup_dml_returning_compile_state( + self, + compile_state: ORMCompileState, + adapter: DMLReturningColFilter, + ) -> None: + return self.setup_compile_state(compile_state) + def row_processor(self, context, result): procs, labels, extra = zip( *[ent.row_processor(context, result) for ent in self._entities] diff --git a/test/orm/dml/test_bulk_statements.py b/test/orm/dml/test_bulk_statements.py index 7af47de8186..1e5c17c9de4 100644 --- a/test/orm/dml/test_bulk_statements.py +++ b/test/orm/dml/test_bulk_statements.py @@ -23,6 +23,7 @@ from sqlalchemy import testing from sqlalchemy import update from sqlalchemy.orm import aliased +from sqlalchemy.orm import Bundle from sqlalchemy.orm import column_property from sqlalchemy.orm import load_only from sqlalchemy.orm import Mapped @@ -381,6 +382,68 @@ class User(ComparableEntity, decl_base): eq_(result.all(), [User(id=1, name="John", age=30)]) + @testing.requires.insert_returning + @testing.variation( + "insert_type", + ["bulk", ("values", testing.requires.multivalues_inserts), "single"], + ) + def test_insert_returning_bundle(self, decl_base, insert_type): + """test #10776""" + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(Identity(), primary_key=True) + + name: Mapped[str] = mapped_column() + x: Mapped[int] + y: Mapped[int] + + decl_base.metadata.create_all(testing.db) + insert_stmt = insert(User).returning( + User.name, Bundle("mybundle", User.id, User.x, User.y) + ) + + s = fixture_session() + + if insert_type.bulk: + result = s.execute( + insert_stmt, + [ + {"name": "some name 1", "x": 1, "y": 2}, + {"name": "some name 2", "x": 2, "y": 3}, + {"name": "some name 3", "x": 3, "y": 4}, + ], + ) + elif insert_type.values: + result = s.execute( + insert_stmt.values( + [ + {"name": "some name 1", "x": 1, "y": 2}, + {"name": "some name 2", "x": 2, "y": 3}, + {"name": "some name 3", "x": 3, "y": 4}, + ], + ) + ) + elif insert_type.single: + result = s.execute( + insert_stmt, {"name": "some name 1", "x": 1, "y": 2} + ) + else: + insert_type.fail() + + if insert_type.single: + eq_(result.all(), [("some name 1", (1, 1, 2))]) + else: + eq_( + result.all(), + [ + ("some name 1", (1, 1, 2)), + ("some name 2", (2, 2, 3)), + ("some name 3", (3, 3, 4)), + ], + ) + @testing.variation( "use_returning", [(True, testing.requires.insert_returning), False] ) @@ -794,6 +857,34 @@ class A(decl_base): result = s.execute(stmt, data) eq_(result.all(), [(1, 5, 9), (2, 5, 9), (3, 5, 9)]) + @testing.requires.update_returning + def test_bulk_update_returning_bundle(self, decl_base): + class A(decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column( + primary_key=True, autoincrement=False + ) + + x: Mapped[int] + y: Mapped[int] + + decl_base.metadata.create_all(testing.db) + + s = fixture_session() + + s.add_all( + [A(id=1, x=1, y=1), A(id=2, x=2, y=2), A(id=3, x=3, y=3)], + ) + s.commit() + + stmt = update(A).returning(Bundle("mybundle", A.id, A.x), A.y) + + data = {"x": 5, "y": 9} + + result = s.execute(stmt, data) + eq_(result.all(), [((1, 5), 9), ((2, 5), 9), ((3, 5), 9)]) + def test_bulk_update_w_where_one(self, decl_base): """test use case in #9595""" diff --git a/test/orm/dml/test_update_delete_where.py b/test/orm/dml/test_update_delete_where.py index 03468972d56..cbf27d018b7 100644 --- a/test/orm/dml/test_update_delete_where.py +++ b/test/orm/dml/test_update_delete_where.py @@ -21,6 +21,7 @@ from sqlalchemy import values from sqlalchemy.orm import aliased from sqlalchemy.orm import backref +from sqlalchemy.orm import Bundle from sqlalchemy.orm import exc as orm_exc from sqlalchemy.orm import immediateload from sqlalchemy.orm import joinedload @@ -1351,6 +1352,45 @@ def test_fetch_w_explicit_returning(self, crud_type): # to point to the class, so you can test eq with sets eq_(set(result.all()), expected) + @testing.requires.update_returning + @testing.variation("crud_type", ["update", "delete"]) + @testing.combinations( + "auto", + "evaluate", + "fetch", + False, + argnames="synchronize_session", + ) + def test_crud_returning_bundle(self, crud_type, synchronize_session): + """test #10776""" + User = self.classes.User + + sess = fixture_session() + + if crud_type.update: + stmt = ( + update(User) + .filter(User.age > 29) + .values({"age": User.age - 10}) + .execution_options(synchronize_session=synchronize_session) + .returning(Bundle("mybundle", User.id, User.age), User.name) + ) + expected = {((4, 27), "jane"), ((2, 37), "jack")} + elif crud_type.delete: + stmt = ( + delete(User) + .filter(User.age > 29) + .execution_options(synchronize_session=synchronize_session) + .returning(Bundle("mybundle", User.id, User.age), User.name) + ) + expected = {((2, 47), "jack"), ((4, 37), "jane")} + else: + crud_type.fail() + + result = sess.execute(stmt) + + eq_(set(result.all()), expected) + @testing.requires.delete_returning @testing.requires.returning_star def test_delete_returning_star(self): From 1fad31cb948d5e1b7421e39d84bc18179875fd26 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 20 Dec 2023 10:56:18 -0500 Subject: [PATCH 041/544] use a standard function to check for iterable collections Fixed 2.0 regression in :class:`.MutableList` where a routine that detects sequences would not correctly filter out string or bytes instances, making it impossible to assign a string value to a specific index (while non-sequence values would work fine). Fixes: #10784 Change-Id: I829cd2a1ef555184de8e6a752f39df65f69f6943 (cherry picked from commit 99da5ebab36da61b7bfa0b868f50974d6a4c4655) --- doc/build/changelog/unreleased_20/10784.rst | 8 +++++ lib/sqlalchemy/ext/mutable.py | 5 +-- lib/sqlalchemy/sql/coercions.py | 4 +-- lib/sqlalchemy/util/__init__.py | 1 + lib/sqlalchemy/util/_collections.py | 6 ++-- lib/sqlalchemy/util/typing.py | 7 +++++ test/base/test_utils.py | 26 ++++++++++++++++ test/ext/test_mutable.py | 34 +++++++++++++++++++-- 8 files changed, 80 insertions(+), 11 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10784.rst diff --git a/doc/build/changelog/unreleased_20/10784.rst b/doc/build/changelog/unreleased_20/10784.rst new file mode 100644 index 00000000000..a67d5b6392b --- /dev/null +++ b/doc/build/changelog/unreleased_20/10784.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm + :tickets: 10784 + + Fixed 2.0 regression in :class:`.MutableList` where a routine that detects + sequences would not correctly filter out string or bytes instances, making + it impossible to assign a string value to a specific index (while + non-sequence values would work fine). diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index 0f82518aaa1..ff4dea08661 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -378,6 +378,7 @@ def __setstate__(self, state): from .. import event from .. import inspect from .. import types +from .. import util from ..orm import Mapper from ..orm._typing import _ExternalEntityType from ..orm._typing import _O @@ -909,10 +910,10 @@ def __setstate__(self, state: Iterable[_T]) -> None: self[:] = state def is_scalar(self, value: _T | Iterable[_T]) -> TypeGuard[_T]: - return not isinstance(value, Iterable) + return not util.is_non_string_iterable(value) def is_iterable(self, value: _T | Iterable[_T]) -> TypeGuard[Iterable[_T]]: - return isinstance(value, Iterable) + return util.is_non_string_iterable(value) def __setitem__( self, index: SupportsIndex | slice, value: _T | Iterable[_T] diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index c4d340713ba..3926e557a94 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -851,9 +851,7 @@ def _warn_for_implicit_coercion(self, elem): ) def _literal_coercion(self, element, expr, operator, **kw): - if isinstance(element, collections_abc.Iterable) and not isinstance( - element, str - ): + if util.is_non_string_iterable(element): non_literal_expressions: Dict[ Optional[operators.ColumnOperators], operators.ColumnOperators, diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index c804f968878..b60dcf2d948 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -157,3 +157,4 @@ from .langhelpers import warn_limited as warn_limited from .langhelpers import wrap_callable as wrap_callable from .preloaded import preload_module as preload_module +from .typing import is_non_string_iterable as is_non_string_iterable diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index a0b1977ee50..1e602165c80 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -9,7 +9,6 @@ """Collection classes and helpers.""" from __future__ import annotations -import collections.abc as collections_abc import operator import threading import types @@ -36,6 +35,7 @@ import weakref from ._has_cy import HAS_CYEXTENSION +from .typing import is_non_string_iterable from .typing import Literal from .typing import Protocol @@ -419,9 +419,7 @@ def coerce_generator_arg(arg: Any) -> List[Any]: def to_list(x: Any, default: Optional[List[Any]] = None) -> List[Any]: if x is None: return default # type: ignore - if not isinstance(x, collections_abc.Iterable) or isinstance( - x, (str, bytes) - ): + if not is_non_string_iterable(x): return [x] elif isinstance(x, list): return x diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index aad5709451d..faf71c89a29 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -9,6 +9,7 @@ from __future__ import annotations import builtins +import collections.abc as collections_abc import re import sys import typing @@ -296,6 +297,12 @@ def is_pep593(type_: Optional[_AnnotationScanType]) -> bool: return type_ is not None and typing_get_origin(type_) is Annotated +def is_non_string_iterable(obj: Any) -> TypeGuard[Iterable[Any]]: + return isinstance(obj, collections_abc.Iterable) and not isinstance( + obj, (str, bytes) + ) + + def is_literal(type_: _AnnotationScanType) -> bool: return get_origin(type_) is Literal diff --git a/test/base/test_utils.py b/test/base/test_utils.py index 7dcf0968a7c..de8712c8523 100644 --- a/test/base/test_utils.py +++ b/test/base/test_utils.py @@ -1,4 +1,5 @@ import copy +from decimal import Decimal import inspect from pathlib import Path import pickle @@ -31,6 +32,7 @@ from sqlalchemy.util import compat from sqlalchemy.util import FastIntFlag from sqlalchemy.util import get_callable_argspec +from sqlalchemy.util import is_non_string_iterable from sqlalchemy.util import langhelpers from sqlalchemy.util import preloaded from sqlalchemy.util import WeakSequence @@ -1550,6 +1552,30 @@ def __ne__(self, other): return True +class MiscTest(fixtures.TestBase): + @testing.combinations( + (["one", "two", "three"], True), + (("one", "two", "three"), True), + ((), True), + ("four", False), + (252, False), + (Decimal("252"), False), + (b"four", False), + (iter("four"), True), + (b"", False), + ("", False), + (None, False), + ({"dict": "value"}, True), + ({}, True), + ({"set", "two"}, True), + (set(), True), + (util.immutabledict(), True), + (util.immutabledict({"key": "value"}), True), + ) + def test_non_string_iterable_check(self, fixture, expected): + is_(is_non_string_iterable(fixture), expected) + + class IdentitySetTest(fixtures.TestBase): obj_type = object diff --git a/test/ext/test_mutable.py b/test/ext/test_mutable.py index dffdac8d842..42378477786 100644 --- a/test/ext/test_mutable.py +++ b/test/ext/test_mutable.py @@ -542,7 +542,7 @@ def test_coerce_raise(self): data={1, 2, 3}, ) - def test_in_place_mutation(self): + def test_in_place_mutation_int(self): sess = fixture_session() f1 = Foo(data=[1, 2]) @@ -554,7 +554,19 @@ def test_in_place_mutation(self): eq_(f1.data, [3, 2]) - def test_in_place_slice_mutation(self): + def test_in_place_mutation_str(self): + sess = fixture_session() + + f1 = Foo(data=["one", "two"]) + sess.add(f1) + sess.commit() + + f1.data[0] = "three" + sess.commit() + + eq_(f1.data, ["three", "two"]) + + def test_in_place_slice_mutation_int(self): sess = fixture_session() f1 = Foo(data=[1, 2, 3, 4]) @@ -566,6 +578,18 @@ def test_in_place_slice_mutation(self): eq_(f1.data, [1, 5, 6, 4]) + def test_in_place_slice_mutation_str(self): + sess = fixture_session() + + f1 = Foo(data=["one", "two", "three", "four"]) + sess.add(f1) + sess.commit() + + f1.data[1:3] = "five", "six" + sess.commit() + + eq_(f1.data, ["one", "five", "six", "four"]) + def test_del_slice(self): sess = fixture_session() @@ -1240,6 +1264,12 @@ class Foo(Mixin, Base): __tablename__ = "foo" id = Column(Integer, primary_key=True) + def test_in_place_mutation_str(self): + """this test is hardcoded to integer, skip strings""" + + def test_in_place_slice_mutation_str(self): + """this test is hardcoded to integer, skip strings""" + class MutableListWithScalarPickleTest( _MutableListTestBase, fixtures.MappedTest From 9ad5ff80cbcb38d35390d733684041bd888fa9bf Mon Sep 17 00:00:00 2001 From: Yilei Yang Date: Thu, 21 Dec 2023 02:47:03 -0500 Subject: [PATCH 042/544] Use a copy of `self.contents` in this list comprehension. Improved a fix first implemented for :ticket:`3208` released in version 0.9.8, where the registry of classes used internally by declarative could be subject to a race condition in the case where individual mapped classes are being garbage collected at the same time while new mapped classes are being constructed, as can happen in some test suite configurations or dynamic class creation environments. In addition to the weakref check already added, the list of items being iterated is also copied first to avoid "list changed while iterating" errors. Pull request courtesy Yilei Yang. Fixes: #10782 Closes: #10783 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10783 Pull-request-sha: 354e97b640430120d0c193a4efe487f293d4768b Change-Id: I04ccc92472bf1004dad0fb785e16b180f58f101d (cherry picked from commit 0fe5d3ca51884b85b4059ed05b53f02172325e70) --- doc/build/changelog/unreleased_14/10782.rst | 15 +++++++++++++++ lib/sqlalchemy/orm/clsregistry.py | 4 ++-- 2 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_14/10782.rst diff --git a/doc/build/changelog/unreleased_14/10782.rst b/doc/build/changelog/unreleased_14/10782.rst new file mode 100644 index 00000000000..d7b219a3652 --- /dev/null +++ b/doc/build/changelog/unreleased_14/10782.rst @@ -0,0 +1,15 @@ +.. change:: + :tags: bug, orm + :tickets: 10782 + :versions: 2.0.24, 1.4.51 + + Improved a fix first implemented for :ticket:`3208` released in version + 0.9.8, where the registry of classes used internally by declarative could + be subject to a race condition in the case where individual mapped classes + are being garbage collected at the same time while new mapped classes are + being constructed, as can happen in some test suite configurations or + dynamic class creation environments. In addition to the weakref check + already added, the list of items being iterated is also copied first to + avoid "list changed while iterating" errors. Pull request courtesy Yilei + Yang. + diff --git a/lib/sqlalchemy/orm/clsregistry.py b/lib/sqlalchemy/orm/clsregistry.py index 4f4dab895e4..bb062c5c981 100644 --- a/lib/sqlalchemy/orm/clsregistry.py +++ b/lib/sqlalchemy/orm/clsregistry.py @@ -239,10 +239,10 @@ def _remove_item(self, ref: weakref.ref[Type[Any]]) -> None: def add_item(self, item: Type[Any]) -> None: # protect against class registration race condition against # asynchronous garbage collection calling _remove_item, - # [ticket:3208] + # [ticket:3208] and [ticket:10782] modules = { cls.__module__ - for cls in [ref() for ref in self.contents] + for cls in [ref() for ref in list(self.contents)] if cls is not None } if item.__module__ in modules: From 56f7b5391a34eb013fee7150a72062756d2da11a Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 5 Dec 2023 23:18:57 +0100 Subject: [PATCH 043/544] Deprecate async_fallback mode Deprecate the async_fallback mode and await_fallback function. Additionally, this commit modifies the use of athrow to no longer use the "util" compat function which is removed; this has since been determined that it's not needed. Change-Id: I37e37400b6954f5ac7c957790932838862930453 --- .../unreleased_20/async_fallback.rst | 7 +++++++ lib/sqlalchemy/dialects/mysql/base.py | 2 +- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 11 ---------- lib/sqlalchemy/engine/create.py | 8 ++++++++ lib/sqlalchemy/ext/asyncio/base.py | 2 +- lib/sqlalchemy/util/__init__.py | 1 - lib/sqlalchemy/util/_concurrency_py3k.py | 5 +++++ lib/sqlalchemy/util/compat.py | 20 ------------------- test/engine/test_deprecations.py | 18 +++++++++++++++++ 9 files changed, 40 insertions(+), 34 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/async_fallback.rst diff --git a/doc/build/changelog/unreleased_20/async_fallback.rst b/doc/build/changelog/unreleased_20/async_fallback.rst new file mode 100644 index 00000000000..a0eccb5580b --- /dev/null +++ b/doc/build/changelog/unreleased_20/async_fallback.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: change, asyncio + + The ``async_fallback`` dialect argument is now deprecated, and will be + removed in SQLAlchemy 2.1. This flag has not been used for SQLAlchemy's + test suite for some time. asyncio dialects can still run in a synchronous + style by running code within a greenlet using :func:`_util.greenlet_spawn`. diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 749d42ea120..c51b3eefca5 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1571,7 +1571,7 @@ def visit_false(self, element, **kw): def get_select_precolumns(self, select, **kw): """Add special MySQL keywords in place of DISTINCT. - .. deprecated:: 1.4 This usage is deprecated. + .. deprecated:: 1.4 This usage is deprecated. :meth:`_expression.Select.prefix_with` should be used for special keywords at the start of a SELECT. diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index acfd2e5afbb..00dbe6d9594 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -25,17 +25,6 @@ from sqlalchemy.ext.asyncio import create_async_engine engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname") -The dialect can also be run as a "synchronous" dialect within the -:func:`_sa.create_engine` function, which will pass "await" calls into -an ad-hoc event loop. This mode of operation is of **limited use** -and is for special testing scenarios only. The mode can be enabled by -adding the SQLAlchemy-specific flag ``async_fallback`` to the URL -in conjunction with :func:`_sa.create_engine`:: - - # for testing purposes only; do not use in production! - engine = create_engine("postgresql+asyncpg://user:pass@hostname/dbname?async_fallback=true") - - .. versionadded:: 1.4 .. note:: diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py index 684550e558c..86e801f8d52 100644 --- a/lib/sqlalchemy/engine/create.py +++ b/lib/sqlalchemy/engine/create.py @@ -616,6 +616,14 @@ def pop_kwarg(key: str, default: Optional[Any] = None) -> Any: # assemble connection arguments (cargs_tup, cparams) = dialect.create_connect_args(u) cparams.update(pop_kwarg("connect_args", {})) + + if "async_fallback" in cparams and util.asbool(cparams["async_fallback"]): + util.warn_deprecated( + "The async_fallback dialect argument is deprecated and will be " + "removed in SQLAlchemy 2.1.", + "2.0", + ) + cargs = list(cargs_tup) # allow mutability # look for existing pool or create diff --git a/lib/sqlalchemy/ext/asyncio/base.py b/lib/sqlalchemy/ext/asyncio/base.py index 251f5212542..69d9cce55c8 100644 --- a/lib/sqlalchemy/ext/asyncio/base.py +++ b/lib/sqlalchemy/ext/asyncio/base.py @@ -182,7 +182,7 @@ async def __aexit__( # tell if we get the same exception back value = typ() try: - await util.athrow(self.gen, typ, value, traceback) + await self.gen.athrow(value) except StopAsyncIteration as exc: # Suppress StopIteration *unless* it's the same exception that # was passed to throw(). This prevents a StopIteration diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index c804f968878..6f409c9e293 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -49,7 +49,6 @@ from ._collections import WeakSequence as WeakSequence from .compat import anext_ as anext_ from .compat import arm as arm -from .compat import athrow as athrow from .compat import b as b from .compat import b64decode as b64decode from .compat import b64encode as b64encode diff --git a/lib/sqlalchemy/util/_concurrency_py3k.py b/lib/sqlalchemy/util/_concurrency_py3k.py index 83201dd95c7..47da59779fb 100644 --- a/lib/sqlalchemy/util/_concurrency_py3k.py +++ b/lib/sqlalchemy/util/_concurrency_py3k.py @@ -138,6 +138,11 @@ def await_fallback(awaitable: Awaitable[_T]) -> _T: :param awaitable: The coroutine to call. + .. deprecated:: 2.0.24 The ``await_fallback()`` function will be removed + in SQLAlchemy 2.1. Use :func:`_util.await_only` instead, running the + function / program / etc. within a top-level greenlet that is set up + using :func:`_util.greenlet_spawn`. + """ # this is called in the context greenlet while running fn diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index a4464324cd5..73cdafea5db 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -19,8 +19,6 @@ import sys import typing from typing import Any -from typing import AsyncGenerator -from typing import Awaitable from typing import Callable from typing import Dict from typing import Iterable @@ -102,24 +100,6 @@ def inspect_getfullargspec(func: Callable[..., Any]) -> FullArgSpec: ) -if py312: - # we are 95% certain this form of athrow works in former Python - # versions, however we are unable to get confirmation; - # see https://github.com/python/cpython/issues/105269 where have - # been unable to get a straight answer so far - def athrow( # noqa - gen: AsyncGenerator[_T_co, Any], typ: Any, value: Any, traceback: Any - ) -> Awaitable[_T_co]: - return gen.athrow(value) - -else: - - def athrow( # noqa - gen: AsyncGenerator[_T_co, Any], typ: Any, value: Any, traceback: Any - ) -> Awaitable[_T_co]: - return gen.athrow(typ, value, traceback) - - if py39: # python stubs don't have a public type for this. not worth # making a protocol diff --git a/test/engine/test_deprecations.py b/test/engine/test_deprecations.py index f6fa21f29dd..9041a6af102 100644 --- a/test/engine/test_deprecations.py +++ b/test/engine/test_deprecations.py @@ -500,3 +500,21 @@ def test_implicit_returning_engine_parameter(self, implicit_returning): ) # parameter has no effect + + +class AsyncFallbackDeprecationTest(fixtures.TestBase): + __requires__ = ("greenlet",) + + def test_async_fallback_deprecated(self): + with assertions.expect_deprecated( + "The async_fallback dialect argument is deprecated and will be " + "removed in SQLAlchemy 2.1.", + ): + create_engine( + "postgresql+asyncpg://?async_fallback=True", module=mock.Mock() + ) + + def test_async_fallback_false_is_ok(self): + create_engine( + "postgresql+asyncpg://?async_fallback=False", module=mock.Mock() + ) From 6bd8921d888ad71bed5b703c9ef05170afad5d16 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 28 Dec 2023 11:10:26 -0500 Subject: [PATCH 044/544] changelog edits Change-Id: I115807ccca74e55e96389d7bb723da3893bcc965 (cherry picked from commit 1a2748152b0f2feb527c6a04054f88d4a659a818) --- doc/build/changelog/unreleased_20/10717.rst | 2 +- doc/build/changelog/unreleased_20/10732.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/build/changelog/unreleased_20/10717.rst b/doc/build/changelog/unreleased_20/10717.rst index 2cd93034554..ccdcd80bb9e 100644 --- a/doc/build/changelog/unreleased_20/10717.rst +++ b/doc/build/changelog/unreleased_20/10717.rst @@ -4,7 +4,7 @@ Adjusted the asyncpg dialect such that when the ``terminate()`` method is used to discard an invalidated connection, the dialect will first attempt - to gracefully close the conneciton using ``.close()`` with a timeout, if + to gracefully close the connection using ``.close()`` with a timeout, if the operation is proceeding within an async event loop context only. This allows the asyncpg driver to attend to finalizing a ``TimeoutError`` including being able to close a long-running query server side, which diff --git a/doc/build/changelog/unreleased_20/10732.rst b/doc/build/changelog/unreleased_20/10732.rst index 0961b05d739..fb1c22a980d 100644 --- a/doc/build/changelog/unreleased_20/10732.rst +++ b/doc/build/changelog/unreleased_20/10732.rst @@ -1,9 +1,9 @@ .. change:: :tags: bug, orm - :tickets: 10668 + :tickets: 10732 Modified the ``__init_subclass__()`` method used by - :class:`_orm.MappedAsDataclass`, :class:`_orm.DeclarativeBase`` and + :class:`_orm.MappedAsDataclass`, :class:`_orm.DeclarativeBase` and :class:`_orm.DeclarativeBaseNoMeta` to accept arbitrary ``**kw`` and to propagate them to the ``super()`` call, allowing greater flexibility in arranging custom superclasses and mixins which make use of From e29e68fe349bd70abb0dd23c9e25b42b83153210 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 28 Dec 2023 11:17:52 -0500 Subject: [PATCH 045/544] changelog formatting fixes Change-Id: Ie6a975b9750fbf99c553f9b4f48a0305cf912443 --- .../changelog/unreleased_20/sql_func_typing.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/build/changelog/unreleased_20/sql_func_typing.rst b/doc/build/changelog/unreleased_20/sql_func_typing.rst index f4ea6f40c33..7fc310ec7c1 100644 --- a/doc/build/changelog/unreleased_20/sql_func_typing.rst +++ b/doc/build/changelog/unreleased_20/sql_func_typing.rst @@ -1,7 +1,7 @@ - .. change:: - :tags: bug, typing - :tickets: 6810 +.. change:: + :tags: bug, typing + :tickets: 6810 - Completed pep-484 typing for the ``sqlalchemy.sql.functions`` module. - :func:`_sql.select` constructs made against ``func`` elements should now - have filled-in return types. + Completed pep-484 typing for the ``sqlalchemy.sql.functions`` module. + :func:`_sql.select` constructs made against ``func`` elements should now + have filled-in return types. From aab08b711a98ebd3e731ca9c304bbf7e047e0e9a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 28 Dec 2023 11:19:39 -0500 Subject: [PATCH 046/544] - 2.0.24 --- doc/build/changelog/changelog_20.rst | 133 +++++++++++++++++- doc/build/changelog/unreleased_20/10597.rst | 10 -- doc/build/changelog/unreleased_20/10654.rst | 8 -- doc/build/changelog/unreleased_20/10662.rst | 11 -- doc/build/changelog/unreleased_20/10668.rst | 9 -- doc/build/changelog/unreleased_20/10717.rst | 11 -- doc/build/changelog/unreleased_20/10732.rst | 12 -- doc/build/changelog/unreleased_20/10747.rst | 9 -- doc/build/changelog/unreleased_20/10753.rst | 17 --- doc/build/changelog/unreleased_20/10776.rst | 10 -- doc/build/changelog/unreleased_20/10784.rst | 8 -- .../unreleased_20/async_fallback.rst | 7 - .../unreleased_20/sql_func_typing.rst | 7 - doc/build/conf.py | 4 +- 14 files changed, 134 insertions(+), 122 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/10597.rst delete mode 100644 doc/build/changelog/unreleased_20/10654.rst delete mode 100644 doc/build/changelog/unreleased_20/10662.rst delete mode 100644 doc/build/changelog/unreleased_20/10668.rst delete mode 100644 doc/build/changelog/unreleased_20/10717.rst delete mode 100644 doc/build/changelog/unreleased_20/10732.rst delete mode 100644 doc/build/changelog/unreleased_20/10747.rst delete mode 100644 doc/build/changelog/unreleased_20/10753.rst delete mode 100644 doc/build/changelog/unreleased_20/10776.rst delete mode 100644 doc/build/changelog/unreleased_20/10784.rst delete mode 100644 doc/build/changelog/unreleased_20/async_fallback.rst delete mode 100644 doc/build/changelog/unreleased_20/sql_func_typing.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index a7d7b204837..e14498e1b81 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,138 @@ .. changelog:: :version: 2.0.24 - :include_notes_from: unreleased_20 + :released: December 28, 2023 + + .. change:: + :tags: bug, orm + :tickets: 10597 + + Fixed issue where use of :func:`_orm.foreign` annotation on a + non-initialized :func:`_orm.mapped_column` construct would produce an + expression without a type, which was then not updated at initialization + time of the actual column, leading to issues such as relationships not + determining ``use_get`` appropriately. + + + .. change:: + :tags: bug, schema + :tickets: 10654 + + Fixed issue where error reporting for unexpected schema item when creating + objects like :class:`_schema.Table` would incorrectly handle an argument + that was itself passed as a tuple, leading to a formatting error. The + error message has been modernized to use f-strings. + + .. change:: + :tags: bug, engine + :tickets: 10662 + + Fixed URL-encoding of the username and password components of + :class:`.engine.URL` objects when converting them to string using the + :meth:`_engine.URL.render_as_string` method, by using Python standard + library ``urllib.parse.quote`` while allowing for plus signs and spaces to + remain unchanged as supported by SQLAlchemy's non-standard URL parsing, + rather than the legacy home-grown routine from many years ago. Pull request + courtesy of Xavier NUNN. + + .. change:: + :tags: bug, orm + :tickets: 10668 + + Improved the error message produced when the unit of work process sets the + value of a primary key column to NULL due to a related object with a + dependency rule on that column being deleted, to include not just the + destination object and column name but also the source column from which + the NULL value is originating. Pull request courtesy Jan Vollmer. + + .. change:: + :tags: bug, postgresql + :tickets: 10717 + + Adjusted the asyncpg dialect such that when the ``terminate()`` method is + used to discard an invalidated connection, the dialect will first attempt + to gracefully close the connection using ``.close()`` with a timeout, if + the operation is proceeding within an async event loop context only. This + allows the asyncpg driver to attend to finalizing a ``TimeoutError`` + including being able to close a long-running query server side, which + otherwise can keep running after the program has exited. + + .. change:: + :tags: bug, orm + :tickets: 10732 + + Modified the ``__init_subclass__()`` method used by + :class:`_orm.MappedAsDataclass`, :class:`_orm.DeclarativeBase` and + :class:`_orm.DeclarativeBaseNoMeta` to accept arbitrary ``**kw`` and to + propagate them to the ``super()`` call, allowing greater flexibility in + arranging custom superclasses and mixins which make use of + ``__init_subclass__()`` keyword arguments. Pull request courtesy Michael + Oliver. + + + .. change:: + :tags: bug, tests + :tickets: 10747 + + Improvements to the test suite to further harden its ability to run + when Python ``greenlet`` is not installed. There is now a tox + target that includes the token "nogreenlet" that will run the suite + with greenlet not installed (note that it still temporarily installs + greenlet as part of the tox config, however). + + .. change:: + :tags: bug, sql + :tickets: 10753 + + Fixed issue in stringify for SQL elements, where a specific dialect is not + passed, where a dialect-specific element such as the PostgreSQL "on + conflict do update" construct is encountered and then fails to provide for + a stringify dialect with the appropriate state to render the construct, + leading to internal errors. + + .. change:: + :tags: bug, sql + + Fixed issue where stringifying or compiling a :class:`.CTE` that was + against a DML construct such as an :func:`_sql.insert` construct would fail + to stringify, due to a mis-detection that the statement overall is an + INSERT, leading to internal errors. + + .. change:: + :tags: bug, orm + :tickets: 10776 + + Ensured the use case of :class:`.Bundle` objects used in the + ``returning()`` portion of ORM-enabled INSERT, UPDATE and DELETE statements + is tested and works fully. This was never explicitly implemented or + tested previously and did not work correctly in the 1.4 series; in the 2.0 + series, ORM UPDATE/DELETE with WHERE criteria was missing an implementation + method preventing :class:`.Bundle` objects from working. + + .. change:: + :tags: bug, orm + :tickets: 10784 + + Fixed 2.0 regression in :class:`.MutableList` where a routine that detects + sequences would not correctly filter out string or bytes instances, making + it impossible to assign a string value to a specific index (while + non-sequence values would work fine). + + .. change:: + :tags: change, asyncio + + The ``async_fallback`` dialect argument is now deprecated, and will be + removed in SQLAlchemy 2.1. This flag has not been used for SQLAlchemy's + test suite for some time. asyncio dialects can still run in a synchronous + style by running code within a greenlet using :func:`_util.greenlet_spawn`. + + .. change:: + :tags: bug, typing + :tickets: 6810 + + Completed pep-484 typing for the ``sqlalchemy.sql.functions`` module. + :func:`_sql.select` constructs made against ``func`` elements should now + have filled-in return types. .. changelog:: :version: 2.0.23 diff --git a/doc/build/changelog/unreleased_20/10597.rst b/doc/build/changelog/unreleased_20/10597.rst deleted file mode 100644 index 97645188296..00000000000 --- a/doc/build/changelog/unreleased_20/10597.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10597 - - Fixed issue where use of :func:`_orm.foreign` annotation on a - non-initialized :func:`_orm.mapped_column` construct would produce an - expression without a type, which was then not updated at initialization - time of the actual column, leading to issues such as relationships not - determining ``use_get`` appropriately. - diff --git a/doc/build/changelog/unreleased_20/10654.rst b/doc/build/changelog/unreleased_20/10654.rst deleted file mode 100644 index bb9b25e04d0..00000000000 --- a/doc/build/changelog/unreleased_20/10654.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, schema - :tickets: 10654 - - Fixed issue where error reporting for unexpected schema item when creating - objects like :class:`_schema.Table` would incorrectly handle an argument - that was itself passed as a tuple, leading to a formatting error. The - error message has been modernized to use f-strings. diff --git a/doc/build/changelog/unreleased_20/10662.rst b/doc/build/changelog/unreleased_20/10662.rst deleted file mode 100644 index 5be613d8e23..00000000000 --- a/doc/build/changelog/unreleased_20/10662.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 10662 - - Fixed URL-encoding of the username and password components of - :class:`.engine.URL` objects when converting them to string using the - :meth:`_engine.URL.render_as_string` method, by using Python standard - library ``urllib.parse.quote`` while allowing for plus signs and spaces to - remain unchanged as supported by SQLAlchemy's non-standard URL parsing, - rather than the legacy home-grown routine from many years ago. Pull request - courtesy of Xavier NUNN. diff --git a/doc/build/changelog/unreleased_20/10668.rst b/doc/build/changelog/unreleased_20/10668.rst deleted file mode 100644 index 560aac85e9a..00000000000 --- a/doc/build/changelog/unreleased_20/10668.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10668 - - Improved the error message produced when the unit of work process sets the - value of a primary key column to NULL due to a related object with a - dependency rule on that column being deleted, to include not just the - destination object and column name but also the source column from which - the NULL value is originating. Pull request courtesy Jan Vollmer. diff --git a/doc/build/changelog/unreleased_20/10717.rst b/doc/build/changelog/unreleased_20/10717.rst deleted file mode 100644 index ccdcd80bb9e..00000000000 --- a/doc/build/changelog/unreleased_20/10717.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 10717 - - Adjusted the asyncpg dialect such that when the ``terminate()`` method is - used to discard an invalidated connection, the dialect will first attempt - to gracefully close the connection using ``.close()`` with a timeout, if - the operation is proceeding within an async event loop context only. This - allows the asyncpg driver to attend to finalizing a ``TimeoutError`` - including being able to close a long-running query server side, which - otherwise can keep running after the program has exited. diff --git a/doc/build/changelog/unreleased_20/10732.rst b/doc/build/changelog/unreleased_20/10732.rst deleted file mode 100644 index fb1c22a980d..00000000000 --- a/doc/build/changelog/unreleased_20/10732.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10732 - - Modified the ``__init_subclass__()`` method used by - :class:`_orm.MappedAsDataclass`, :class:`_orm.DeclarativeBase` and - :class:`_orm.DeclarativeBaseNoMeta` to accept arbitrary ``**kw`` and to - propagate them to the ``super()`` call, allowing greater flexibility in - arranging custom superclasses and mixins which make use of - ``__init_subclass__()`` keyword arguments. Pull request courtesy Michael - Oliver. - diff --git a/doc/build/changelog/unreleased_20/10747.rst b/doc/build/changelog/unreleased_20/10747.rst deleted file mode 100644 index ac8133ac735..00000000000 --- a/doc/build/changelog/unreleased_20/10747.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, tests - :tickets: 10747 - - Improvements to the test suite to further harden its ability to run - when Python ``greenlet`` is not installed. There is now a tox - target that includes the token "nogreenlet" that will run the suite - with greenlet not installed (note that it still temporarily installs - greenlet as part of the tox config, however). diff --git a/doc/build/changelog/unreleased_20/10753.rst b/doc/build/changelog/unreleased_20/10753.rst deleted file mode 100644 index 5b714ed1973..00000000000 --- a/doc/build/changelog/unreleased_20/10753.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 10753 - - Fixed issue in stringify for SQL elements, where a specific dialect is not - passed, where a dialect-specific element such as the PostgreSQL "on - conflict do update" construct is encountered and then fails to provide for - a stringify dialect with the appropriate state to render the construct, - leading to internal errors. - -.. change:: - :tags: bug, sql - - Fixed issue where stringifying or compiling a :class:`.CTE` that was - against a DML construct such as an :func:`_sql.insert` construct would fail - to stringify, due to a mis-detection that the statement overall is an - INSERT, leading to internal errors. diff --git a/doc/build/changelog/unreleased_20/10776.rst b/doc/build/changelog/unreleased_20/10776.rst deleted file mode 100644 index 4a6889fdb7a..00000000000 --- a/doc/build/changelog/unreleased_20/10776.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10776 - - Ensured the use case of :class:`.Bundle` objects used in the - ``returning()`` portion of ORM-enabled INSERT, UPDATE and DELETE statements - is tested and works fully. This was never explicitly implemented or - tested previously and did not work correctly in the 1.4 series; in the 2.0 - series, ORM UPDATE/DELETE with WHERE criteria was missing an implementation - method preventing :class:`.Bundle` objects from working. diff --git a/doc/build/changelog/unreleased_20/10784.rst b/doc/build/changelog/unreleased_20/10784.rst deleted file mode 100644 index a67d5b6392b..00000000000 --- a/doc/build/changelog/unreleased_20/10784.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10784 - - Fixed 2.0 regression in :class:`.MutableList` where a routine that detects - sequences would not correctly filter out string or bytes instances, making - it impossible to assign a string value to a specific index (while - non-sequence values would work fine). diff --git a/doc/build/changelog/unreleased_20/async_fallback.rst b/doc/build/changelog/unreleased_20/async_fallback.rst deleted file mode 100644 index a0eccb5580b..00000000000 --- a/doc/build/changelog/unreleased_20/async_fallback.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: change, asyncio - - The ``async_fallback`` dialect argument is now deprecated, and will be - removed in SQLAlchemy 2.1. This flag has not been used for SQLAlchemy's - test suite for some time. asyncio dialects can still run in a synchronous - style by running code within a greenlet using :func:`_util.greenlet_spawn`. diff --git a/doc/build/changelog/unreleased_20/sql_func_typing.rst b/doc/build/changelog/unreleased_20/sql_func_typing.rst deleted file mode 100644 index 7fc310ec7c1..00000000000 --- a/doc/build/changelog/unreleased_20/sql_func_typing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, typing - :tickets: 6810 - - Completed pep-484 typing for the ``sqlalchemy.sql.functions`` module. - :func:`_sql.select` constructs made against ``func`` elements should now - have filled-in return types. diff --git a/doc/build/conf.py b/doc/build/conf.py index d047d6daef8..9ff7e3768a9 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.23" +release = "2.0.24" -release_date = "November 2, 2023" +release_date = "December 28, 2023" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 341eee70434b783891e62f02bd59f7336b918b1b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 28 Dec 2023 11:23:13 -0500 Subject: [PATCH 047/544] Version 2.0.25 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index e14498e1b81..e07119e419b 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.25 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.24 :released: December 28, 2023 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 88ff36dcc73..4687b4915fc 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.24" +__version__ = "2.0.25" def __go(lcls: Any) -> None: From 429cbfe64c493c1cef532da892baf6871a6b2f90 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 28 Dec 2023 16:02:48 -0500 Subject: [PATCH 048/544] pop prefetch values from committed_state when they are available Fixed issue where when making use of the :paramref:`_orm.relationship.post_update` feature at the same time as using a mapper version_id_col could lead to a situation where the second UPDATE statement emitted by the post-update feature would fail to make use of the correct version identifier, assuming an UPDATE was already emitted in that flush which had already bumped the version counter. Fixes: #10800 Change-Id: I3fccdb26ebbd2d987bb4f0e894449b7413556054 (cherry picked from commit 46ec57e5cc5c66616087453a090754f4d0853c0c) --- doc/build/changelog/unreleased_20/10800.rst | 10 +++ lib/sqlalchemy/orm/persistence.py | 13 +++- test/orm/test_versioning.py | 86 +++++++++++++++++++++ 3 files changed, 107 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10800.rst diff --git a/doc/build/changelog/unreleased_20/10800.rst b/doc/build/changelog/unreleased_20/10800.rst new file mode 100644 index 00000000000..346ae1f5ace --- /dev/null +++ b/doc/build/changelog/unreleased_20/10800.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, orm + :tickets: 10800 + + Fixed issue where when making use of the + :paramref:`_orm.relationship.post_update` feature at the same time as using + a mapper version_id_col could lead to a situation where the second UPDATE + statement emitted by the post-update feature would fail to make use of the + correct version identifier, assuming an UPDATE was already emitted in that + flush which had already bumped the version counter. diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 3f537fb7616..1728b4ac88c 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -1659,9 +1659,18 @@ def _postfetch( for c in prefetch_cols: if c.key in params and c in mapper._columntoproperty: - dict_[mapper._columntoproperty[c].key] = params[c.key] + pkey = mapper._columntoproperty[c].key + + # set prefetched value in dict and also pop from committed_state, + # since this is new database state that replaces whatever might + # have previously been fetched (see #10800). this is essentially a + # shorthand version of set_committed_value(), which could also be + # used here directly (with more overhead) + dict_[pkey] = params[c.key] + state.committed_state.pop(pkey, None) + if refresh_flush: - load_evt_attrs.append(mapper._columntoproperty[c].key) + load_evt_attrs.append(pkey) if refresh_flush and load_evt_attrs: mapper.class_manager.dispatch.refresh_flush( diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py index 7f52af71561..a0325059a81 100644 --- a/test/orm/test_versioning.py +++ b/test/orm/test_versioning.py @@ -2029,3 +2029,89 @@ def test_round_trip(self, fixture_session): fixture_session.commit() eq_(f1.version, 2) + + +class PostUpdateVersioningTest(fixtures.DeclarativeMappedTest): + """test for #10800""" + + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class User(Base): + __tablename__ = "user" + + id = Column(Integer, primary_key=True) + + class Parent(Base): + __tablename__ = "parent" + + id = Column(Integer, primary_key=True) + version_id = Column(Integer) + updated_by_id = Column( + Integer, + ForeignKey("user.id"), + ) + + updated_by = relationship( + "User", + foreign_keys=[updated_by_id], + post_update=True, + ) + + __mapper_args__ = { + "version_id_col": version_id, + } + + def test_bumped_version_id(self): + User, Parent = self.classes("User", "Parent") + + session = fixture_session() + u1 = User(id=1) + u2 = User(id=2) + p1 = Parent(id=1, updated_by=u1) + session.add(u1) + session.add(u2) + session.add(p1) + + u2id = u2.id + session.commit() + session.close() + + p1 = session.get(Parent, 1) + p1.updated_by + p1.version_id = p1.version_id + p1.updated_by_id = u2id + assert "version_id" in inspect(p1).committed_state + + with self.sql_execution_asserter(testing.db) as asserter: + session.commit() + + asserter.assert_( + CompiledSQL( + "UPDATE parent SET version_id=:version_id, " + "updated_by_id=:updated_by_id WHERE parent.id = :parent_id " + "AND parent.version_id = :parent_version_id", + [ + { + "version_id": 2, + "updated_by_id": 2, + "parent_id": 1, + "parent_version_id": 1, + } + ], + ), + CompiledSQL( + "UPDATE parent SET version_id=:version_id, " + "updated_by_id=:updated_by_id WHERE parent.id = :parent_id " + "AND parent.version_id = :parent_version_id", + [ + { + "version_id": 3, + "updated_by_id": 2, + "parent_id": 1, + "parent_version_id": 2, + } + ], + ), + ) From 4e5809edd037f8fec8f80d684413e0c12c406855 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 28 Dec 2023 23:50:20 +0100 Subject: [PATCH 049/544] Remove unused method in SimpleResultMetaData Change-Id: I859c52613da84519bacbf55a105e3a16bb8e9728 (cherry picked from commit 0da6e5dd22c33d57fc206825f58dccc3d7c3b61c) --- lib/sqlalchemy/engine/result.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index acbe6f09236..2e7f1db34c6 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -329,9 +329,6 @@ def __setstate__(self, state: Dict[str, Any]) -> None: _tuplefilter=_tuplefilter, ) - def _contains(self, value: Any, row: Row[Any]) -> bool: - return value in row._data - def _index_for_key(self, key: Any, raiseerr: bool = True) -> int: if int in key.__class__.__mro__: key = self._keys[key] From 3a49862bcdf3921d03c6fd622c69b1ebfa33aac8 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 29 Dec 2023 11:45:55 -0500 Subject: [PATCH 050/544] document alternative relationship to AC patterns using event hook to defer construction this is for 2.0 and above only as it includes a typed mapping recipe as well. Fixes: #4660 Change-Id: I9478c7f451c0e58096cca60c0725396fed339abf (cherry picked from commit b44018d46b0b306bae04e0b0ea2e11ca78ef64e9) --- doc/build/orm/join_conditions.rst | 83 ++++++++++++++++++++++++++++--- 1 file changed, 75 insertions(+), 8 deletions(-) diff --git a/doc/build/orm/join_conditions.rst b/doc/build/orm/join_conditions.rst index 2e6d2d936b3..61f5e451210 100644 --- a/doc/build/orm/join_conditions.rst +++ b/doc/build/orm/join_conditions.rst @@ -763,14 +763,6 @@ complexity is kept within the middle. Relationship to Aliased Class ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. versionadded:: 1.3 - The :class:`.AliasedClass` construct can now be specified as the - target of a :func:`_orm.relationship`, replacing the previous approach - of using non-primary mappers, which had limitations such that they did - not inherit sub-relationships of the mapped entity as well as that they - required complex configuration against an alternate selectable. The - recipes in this section are now updated to use :class:`.AliasedClass`. - In the previous section, we illustrated a technique where we used :paramref:`_orm.relationship.secondary` in order to place additional tables within a join condition. There is one complex join case where @@ -847,6 +839,81 @@ With the above mapping, a simple join looks like: {execsql}SELECT a.id AS a_id, a.b_id AS a_b_id FROM a JOIN (b JOIN d ON d.b_id = b.id JOIN c ON c.id = d.c_id) ON a.b_id = b.id +Integrating AliasedClass Mappings with Typing and Avoiding Early Mapper Configuration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The creation of the :func:`_orm.aliased` construct against a mapped class +forces the :func:`_orm.configure_mappers` step to proceed, which will resolve +all current classes and their relationships. This may be problematic if +unrelated mapped classes needed by the current mappings have not yet been +declared, or if the configuration of the relationship itself needs access +to as-yet undeclared classes. Additionally, SQLAlchemy's Declarative pattern +works with Python typing most effectively when relationships are declared +up front. + +To organize the construction of the relationship to work with these issues, a +configure level event hook like :meth:`.MapperEvents.before_mapper_configured` +may be used, which will invoke the configuration code only when all mappings +are ready for configuration:: + + from sqlalchemy import event + + + class A(Base): + __tablename__ = "a" + + id = mapped_column(Integer, primary_key=True) + b_id = mapped_column(ForeignKey("b.id")) + + + @event.listens_for(A, "before_mapper_configured") + def _configure_ab_relationship(mapper, cls): + # do the above configuration in a configuration hook + + j = join(B, D, D.b_id == B.id).join(C, C.id == D.c_id) + B_viacd = aliased(B, j, flat=True) + A.b = relationship(B_viacd, primaryjoin=A.b_id == j.c.b_id) + +Above, the function ``_configure_ab_relationship()`` will be invoked only +when a fully configured version of ``A`` is requested, at which point the +classes ``B``, ``D`` and ``C`` would be available. + +For an approach that integrates with inline typing, a similar technique can be +used to effectively generate a "singleton" creation pattern for the aliased +class where it is late-initialized as a global variable, which can then be used +in the relationship inline:: + + from typing import Any + + B_viacd: Any = None + b_viacd_join: Any = None + + + class A(Base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True) + b_id: Mapped[int] = mapped_column(ForeignKey("b.id")) + + # 1. the relationship can be declared using lambdas, allowing it to resolve + # to targets that are late-configured + b: Mapped[B] = relationship( + lambda: B_viacd, primaryjoin=lambda: A.b_id == b_viacd_join.c.b_id + ) + + + # 2. configure the targets of the relationship using a before_mapper_configured + # hook. + @event.listens_for(A, "before_mapper_configured") + def _configure_ab_relationship(mapper, cls): + # 3. set up the join() and AliasedClass as globals from within + # the configuration hook. + + global B_viacd, b_viacd_join + + b_viacd_join = join(B, D, D.b_id == B.id).join(C, C.id == D.c_id) + B_viacd = aliased(B, b_viacd_join, flat=True) + Using the AliasedClass target in Queries ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From a52dcbeb10330c3f67cf3a6de263676341dea63b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 30 Dec 2023 00:14:39 +0100 Subject: [PATCH 051/544] fix typo in session.reset docs Change-Id: I6073cc623f216ffad8c18396001191b38eccc129 (cherry picked from commit 02472e8b65ac4062f2c3e7cee19608c801fba14c) --- lib/sqlalchemy/orm/scoping.py | 6 +++--- lib/sqlalchemy/orm/session.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index ab632bdd564..4af98da0658 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -534,12 +534,12 @@ def reset(self) -> None: behalf of the :class:`_orm.scoping.scoped_session` class. This method provides for same "reset-only" behavior that the - :meth:_orm.Session.close method has provided historically, where the + :meth:`_orm.Session.close` method has provided historically, where the state of the :class:`_orm.Session` is reset as though the object were brand new, and ready to be used again. - The method may then be useful for :class:`_orm.Session` objects + This method may then be useful for :class:`_orm.Session` objects which set :paramref:`_orm.Session.close_resets_only` to ``False``, - so that "reset only" behavior is still available from this method. + so that "reset only" behavior is still available. .. versionadded:: 2.0.22 diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index d8619812719..161b3201566 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -2472,12 +2472,12 @@ def reset(self) -> None: :class:`_orm.Session`, resetting the session to its initial state. This method provides for same "reset-only" behavior that the - :meth:_orm.Session.close method has provided historically, where the + :meth:`_orm.Session.close` method has provided historically, where the state of the :class:`_orm.Session` is reset as though the object were brand new, and ready to be used again. - The method may then be useful for :class:`_orm.Session` objects + This method may then be useful for :class:`_orm.Session` objects which set :paramref:`_orm.Session.close_resets_only` to ``False``, - so that "reset only" behavior is still available from this method. + so that "reset only" behavior is still available. .. versionadded:: 2.0.22 From 8772041cc62ba9eaa5b0fee89d9da325b526f267 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 30 Dec 2023 10:36:40 -0500 Subject: [PATCH 052/544] support pep695 when resolving type map types Added preliminary support for Python 3.12 pep-695 type alias structures, when resolving custom type maps for ORM Annotated Declarative mappings. Fixes: #10807 Change-Id: Ia28123ce1d6d1fd6bae5e8a037be4754c890f281 (cherry picked from commit 692525492986a109877d881b2f2936b610b9066f) --- doc/build/changelog/unreleased_20/10807.rst | 7 +++ lib/sqlalchemy/orm/decl_api.py | 5 ++ lib/sqlalchemy/sql/type_api.py | 5 +- lib/sqlalchemy/testing/requirements.py | 6 ++ lib/sqlalchemy/util/typing.py | 8 ++- setup.cfg | 2 +- .../test_tm_future_annotations_sync.py | 62 +++++++++++++++++++ test/orm/declarative/test_typed_mapping.py | 62 +++++++++++++++++++ 8 files changed, 153 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10807.rst diff --git a/doc/build/changelog/unreleased_20/10807.rst b/doc/build/changelog/unreleased_20/10807.rst new file mode 100644 index 00000000000..afceef63e30 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10807.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: usecase, orm + :tickets: 10807 + + Added preliminary support for Python 3.12 pep-695 type alias structures, + when resolving custom type maps for ORM Annotated Declarative mappings. + diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 60bd2ae4901..fe7ed146219 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -77,6 +77,7 @@ from ..util.typing import is_generic from ..util.typing import is_literal from ..util.typing import is_newtype +from ..util.typing import is_pep695 from ..util.typing import Literal from ..util.typing import Self @@ -1264,6 +1265,10 @@ def _resolve_type( elif is_newtype(python_type): python_type_type = flatten_newtype(python_type) search = ((python_type, python_type_type),) + elif is_pep695(python_type): + python_type_type = python_type.__value__ + flattened = None + search = ((python_type, python_type_type),) else: python_type_type = cast("Type[Any]", python_type) flattened = None diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 9226b01e61a..ab387bc7afb 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -39,6 +39,7 @@ from .. import util from ..util.typing import Protocol from ..util.typing import Self +from ..util.typing import TypeAliasType from ..util.typing import TypedDict from ..util.typing import TypeGuard @@ -67,7 +68,9 @@ _TE = TypeVar("_TE", bound="TypeEngine[Any]") _CT = TypeVar("_CT", bound=Any) -_MatchedOnType = Union["GenericProtocol[Any]", NewType, Type[Any]] +_MatchedOnType = Union[ + "GenericProtocol[Any]", TypeAliasType, NewType, Type[Any] +] class _NoValueInList(Enum): diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 4dd5176a3ee..be700a420cc 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1530,6 +1530,12 @@ def python311(self): lambda: util.py311, "Python 3.11 or above required" ) + @property + def python312(self): + return exclusions.only_if( + lambda: util.py312, "Python 3.12 or above required" + ) + @property def cpython(self): return exclusions.only_if( diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index faf71c89a29..83735f93b74 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -53,7 +53,7 @@ from typing_extensions import TypedDict as TypedDict # 3.8 from typing_extensions import TypeGuard as TypeGuard # 3.10 from typing_extensions import Self as Self # 3.11 - + from typing_extensions import TypeAliasType as TypeAliasType # 3.12 _T = TypeVar("_T", bound=Any) _KT = TypeVar("_KT") @@ -77,7 +77,7 @@ _AnnotationScanType = Union[ - Type[Any], str, ForwardRef, NewType, "GenericProtocol[Any]" + Type[Any], str, ForwardRef, NewType, TypeAliasType, "GenericProtocol[Any]" ] @@ -319,6 +319,10 @@ def is_generic(type_: _AnnotationScanType) -> TypeGuard[GenericProtocol[Any]]: return hasattr(type_, "__args__") and hasattr(type_, "__origin__") +def is_pep695(type_: _AnnotationScanType) -> TypeGuard[TypeAliasType]: + return isinstance(type_, TypeAliasType) + + def flatten_newtype(type_: NewType) -> Type[Any]: super_type = type_.__supertype__ while is_newtype(super_type): diff --git a/setup.cfg b/setup.cfg index c8594c17885..093961626f6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,7 +39,7 @@ package_dir = install_requires = importlib-metadata;python_version<"3.8" greenlet != 0.4.17;(platform_machine=='aarch64' or (platform_machine=='ppc64le' or (platform_machine=='x86_64' or (platform_machine=='amd64' or (platform_machine=='AMD64' or (platform_machine=='win32' or platform_machine=='WIN32')))))) - typing-extensions >= 4.2.0 + typing-extensions >= 4.6.0 [options.extras_require] asyncio = diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index e61900418e2..e64834b39d7 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -31,6 +31,8 @@ from typing_extensions import get_args as get_args from typing_extensions import Literal as Literal +from typing_extensions import TypeAlias as TypeAlias +from typing_extensions import TypedDict from sqlalchemy import BIGINT from sqlalchemy import BigInteger @@ -93,6 +95,31 @@ from sqlalchemy.util.typing import Annotated +class _SomeDict1(TypedDict): + type: Literal["1"] + + +class _SomeDict2(TypedDict): + type: Literal["2"] + + +_UnionTypeAlias: TypeAlias = Union[_SomeDict1, _SomeDict2] + +_StrTypeAlias: TypeAlias = str + +_StrPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] +_UnionPep695: TypeAlias = str + +if compat.py312: + exec( + """ +type _UnionPep695 = _SomeDict1 | _SomeDict2 +type _StrPep695 = str +""", + globals(), + ) + + def expect_annotation_syntax_error(name): return expect_raises_message( sa_exc.ArgumentError, @@ -731,6 +758,41 @@ class MyClass(decl_base): is_true(MyClass.__table__.c.data_two.nullable) eq_(MyClass.__table__.c.data_three.type.length, 50) + def test_plain_typealias_as_typemap_keys( + self, decl_base: Type[DeclarativeBase] + ): + decl_base.registry.update_type_annotation_map( + {_UnionTypeAlias: JSON, _StrTypeAlias: String(30)} + ) + + class Test(decl_base): + __tablename__ = "test" + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[_StrTypeAlias] + structure: Mapped[_UnionTypeAlias] + + eq_(Test.__table__.c.data.type.length, 30) + is_(Test.__table__.c.structure.type._type_affinity, JSON) + + @testing.requires.python312 + def test_pep695_typealias_as_typemap_keys( + self, decl_base: Type[DeclarativeBase] + ): + """test #10807""" + + decl_base.registry.update_type_annotation_map( + {_UnionPep695: JSON, _StrPep695: String(30)} + ) + + class Test(decl_base): + __tablename__ = "test" + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[_StrPep695] # type: ignore + structure: Mapped[_UnionPep695] # type: ignore + + eq_(Test.__table__.c.data.type.length, 30) + is_(Test.__table__.c.structure.type._type_affinity, JSON) + @testing.requires.python310 def test_we_got_all_attrs_test_annotated(self): argnames = _py_inspect.getfullargspec(mapped_column) diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 8da83ccb9d6..44327324cab 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -22,6 +22,8 @@ from typing_extensions import get_args as get_args from typing_extensions import Literal as Literal +from typing_extensions import TypeAlias as TypeAlias +from typing_extensions import TypedDict from sqlalchemy import BIGINT from sqlalchemy import BigInteger @@ -84,6 +86,31 @@ from sqlalchemy.util.typing import Annotated +class _SomeDict1(TypedDict): + type: Literal["1"] + + +class _SomeDict2(TypedDict): + type: Literal["2"] + + +_UnionTypeAlias: TypeAlias = Union[_SomeDict1, _SomeDict2] + +_StrTypeAlias: TypeAlias = str + +_StrPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] +_UnionPep695: TypeAlias = str + +if compat.py312: + exec( + """ +type _UnionPep695 = _SomeDict1 | _SomeDict2 +type _StrPep695 = str +""", + globals(), + ) + + def expect_annotation_syntax_error(name): return expect_raises_message( sa_exc.ArgumentError, @@ -722,6 +749,41 @@ class MyClass(decl_base): is_true(MyClass.__table__.c.data_two.nullable) eq_(MyClass.__table__.c.data_three.type.length, 50) + def test_plain_typealias_as_typemap_keys( + self, decl_base: Type[DeclarativeBase] + ): + decl_base.registry.update_type_annotation_map( + {_UnionTypeAlias: JSON, _StrTypeAlias: String(30)} + ) + + class Test(decl_base): + __tablename__ = "test" + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[_StrTypeAlias] + structure: Mapped[_UnionTypeAlias] + + eq_(Test.__table__.c.data.type.length, 30) + is_(Test.__table__.c.structure.type._type_affinity, JSON) + + @testing.requires.python312 + def test_pep695_typealias_as_typemap_keys( + self, decl_base: Type[DeclarativeBase] + ): + """test #10807""" + + decl_base.registry.update_type_annotation_map( + {_UnionPep695: JSON, _StrPep695: String(30)} + ) + + class Test(decl_base): + __tablename__ = "test" + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[_StrPep695] # type: ignore + structure: Mapped[_UnionPep695] # type: ignore + + eq_(Test.__table__.c.data.type.length, 30) + is_(Test.__table__.c.structure.type._type_affinity, JSON) + @testing.requires.python310 def test_we_got_all_attrs_test_annotated(self): argnames = _py_inspect.getfullargspec(mapped_column) From f459ddd5dae2aca8aa3ccaf775ff80453f22aad1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 1 Jan 2024 12:49:10 -0500 Subject: [PATCH 053/544] add a generic argument to _HasClauseElement Further enhancements to pep-484 typing to allow SQL functions from :attr:`_sql.func` derived elements to work more effectively with ORM-mapped attributes. Fixes: #10801 Change-Id: Ib8222d888a2d8c3fbeab0d1bf5edb535916d4721 (cherry picked from commit 74a31c56ed931921f89026faf50768c86801376f) --- doc/build/changelog/unreleased_20/10801.rst | 7 +++++ lib/sqlalchemy/ext/hybrid.py | 6 ++-- lib/sqlalchemy/orm/_typing.py | 2 +- lib/sqlalchemy/orm/relationships.py | 5 ++- lib/sqlalchemy/sql/_elements_constructors.py | 4 +-- lib/sqlalchemy/sql/_typing.py | 31 ++++++++++--------- .../typing/plain_files/sql/functions_again.py | 14 +++++++++ 7 files changed, 46 insertions(+), 23 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10801.rst diff --git a/doc/build/changelog/unreleased_20/10801.rst b/doc/build/changelog/unreleased_20/10801.rst new file mode 100644 index 00000000000..a35a5485d58 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10801.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, typing + :tickets: 10801 + + Further enhancements to pep-484 typing to allow SQL functions from + :attr:`_sql.func` derived elements to work more effectively with ORM-mapped + attributes. diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 615f166b479..5aadc329e56 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -930,7 +930,7 @@ def __call__(s, self: Any) -> None: class _HybridExprCallableType(Protocol[_T_co]): def __call__( s, cls: Any - ) -> Union[_HasClauseElement, SQLColumnExpression[_T_co]]: + ) -> Union[_HasClauseElement[_T_co], SQLColumnExpression[_T_co]]: ... @@ -1447,7 +1447,7 @@ class Comparator(interfaces.PropComparator[_T]): classes for usage with hybrids.""" def __init__( - self, expression: Union[_HasClauseElement, SQLColumnExpression[_T]] + self, expression: Union[_HasClauseElement[_T], SQLColumnExpression[_T]] ): self.expression = expression @@ -1482,7 +1482,7 @@ class ExprComparator(Comparator[_T]): def __init__( self, cls: Type[Any], - expression: Union[_HasClauseElement, SQLColumnExpression[_T]], + expression: Union[_HasClauseElement[_T], SQLColumnExpression[_T]], hybrid: hybrid_property[_T], ): self.cls = cls diff --git a/lib/sqlalchemy/orm/_typing.py b/lib/sqlalchemy/orm/_typing.py index 07f5e61a0ff..78aac6d572d 100644 --- a/lib/sqlalchemy/orm/_typing.py +++ b/lib/sqlalchemy/orm/_typing.py @@ -78,7 +78,7 @@ _ORMColumnExprArgument = Union[ ColumnElement[_T], - _HasClauseElement, + _HasClauseElement[_T], roles.ExpressionElementRole[_T], ] diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 7ea30d7b180..7e27eff4177 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -179,7 +179,10 @@ ORMBackrefArgument = Union[str, Tuple[str, Dict[str, Any]]] _ORMColCollectionElement = Union[ - ColumnClause[Any], _HasClauseElement, roles.DMLColumnRole, "Mapped[Any]" + ColumnClause[Any], + _HasClauseElement[Any], + roles.DMLColumnRole, + "Mapped[Any]", ] _ORMColCollectionArgument = Union[ str, diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index 23e275ed5d7..a51e4a2cf4c 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -436,10 +436,8 @@ def outparam( return BindParameter(key, None, type_=type_, unique=False, isoutparam=True) -# mypy insists that BinaryExpression and _HasClauseElement protocol overlap. -# they do not. at all. bug in mypy? @overload -def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]: # type: ignore +def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]: ... diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index 003cc51245a..f99e93fbac6 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -11,6 +11,7 @@ from typing import Any from typing import Callable from typing import Dict +from typing import Generic from typing import Iterable from typing import Mapping from typing import NoReturn @@ -52,7 +53,6 @@ from .elements import SQLCoreOperations from .elements import TextClause from .lambdas import LambdaElement - from .roles import ColumnsClauseRole from .roles import FromClauseRole from .schema import Column from .selectable import Alias @@ -72,6 +72,7 @@ from ..util.typing import TypeGuard _T = TypeVar("_T", bound=Any) +_T_co = TypeVar("_T_co", bound=Any, covariant=True) _CE = TypeVar("_CE", bound="ColumnElement[Any]") @@ -79,10 +80,10 @@ _CLE = TypeVar("_CLE", bound="ClauseElement") -class _HasClauseElement(Protocol): +class _HasClauseElement(Protocol, Generic[_T_co]): """indicates a class that has a __clause_element__() method""" - def __clause_element__(self) -> ColumnsClauseRole: + def __clause_element__(self) -> roles.ExpressionElementRole[_T_co]: ... @@ -112,8 +113,8 @@ def __call__(self, obj: _CE) -> _CE: roles.ColumnsClauseRole, Literal["*", 1], Type[Any], - Inspectable[_HasClauseElement], - _HasClauseElement, + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], ) @@ -127,7 +128,7 @@ def __call__(self, obj: _CE) -> _CE: str, "TextClause", "ColumnElement[_T]", - _HasClauseElement, + _HasClauseElement[_T], roles.ExpressionElementRole[_T], ] @@ -137,8 +138,8 @@ def __call__(self, obj: _CE) -> _CE: "SQLCoreOperations[_T]", Literal["*", 1], Type[_T], - Inspectable[_HasClauseElement], - _HasClauseElement, + Inspectable[_HasClauseElement[_T]], + _HasClauseElement[_T], ] """open-ended SELECT columns clause argument. @@ -172,7 +173,7 @@ def __call__(self, obj: _CE) -> _CE: _ColumnExpressionArgument = Union[ "ColumnElement[_T]", - _HasClauseElement, + _HasClauseElement[_T], "SQLCoreOperations[_T]", roles.ExpressionElementRole[_T], Callable[[], "ColumnElement[_T]"], @@ -212,8 +213,8 @@ def __call__(self, obj: _CE) -> _CE: _FromClauseArgument = Union[ roles.FromClauseRole, Type[Any], - Inspectable[_HasClauseElement], - _HasClauseElement, + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], ] """A FROM clause, like we would send to select().select_from(). @@ -240,7 +241,7 @@ def __call__(self, obj: _CE) -> _CE: _DMLColumnArgument = Union[ str, - _HasClauseElement, + _HasClauseElement[Any], roles.DMLColumnRole, "SQLCoreOperations[Any]", ] @@ -271,8 +272,8 @@ def __call__(self, obj: _CE) -> _CE: "Alias", "CTE", Type[Any], - Inspectable[_HasClauseElement], - _HasClauseElement, + Inspectable[_HasClauseElement[Any]], + _HasClauseElement[Any], ] _PropagateAttrsType = util.immutabledict[str, Any] @@ -364,7 +365,7 @@ def is_quoted_name(s: str) -> TypeGuard[quoted_name]: return hasattr(s, "quote") -def is_has_clause_element(s: object) -> TypeGuard[_HasClauseElement]: +def is_has_clause_element(s: object) -> TypeGuard[_HasClauseElement[Any]]: return hasattr(s, "__clause_element__") diff --git a/test/typing/plain_files/sql/functions_again.py b/test/typing/plain_files/sql/functions_again.py index 5173d1fe082..87ade922468 100644 --- a/test/typing/plain_files/sql/functions_again.py +++ b/test/typing/plain_files/sql/functions_again.py @@ -1,4 +1,5 @@ from sqlalchemy import func +from sqlalchemy import select from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column @@ -27,3 +28,16 @@ class Foo(Base): reveal_type(func.row_number().filter()) # EXPECTED_TYPE: FunctionFilter[Any] reveal_type(func.row_number().filter(Foo.a > 0)) + + +# test #10801 +# EXPECTED_TYPE: max[int] +reveal_type(func.max(Foo.b)) + + +stmt1 = select( + Foo.a, + func.min(Foo.b), +).group_by(Foo.a) +# EXPECTED_TYPE: Select[Tuple[int, int]] +reveal_type(stmt1) From c7948f81d32fe125bf6bbb2de423a73522089a56 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 1 Jan 2024 17:04:05 -0500 Subject: [PATCH 054/544] 2024 setup Change-Id: I2853d99bbc19c94227e2b88d450873197013bdfb --- lib/sqlalchemy/__init__.py | 2 +- lib/sqlalchemy/connectors/__init__.py | 2 +- lib/sqlalchemy/connectors/aioodbc.py | 2 +- lib/sqlalchemy/connectors/asyncio.py | 2 +- lib/sqlalchemy/connectors/pyodbc.py | 2 +- lib/sqlalchemy/cyextension/__init__.py | 2 +- lib/sqlalchemy/cyextension/collections.pyx | 2 +- lib/sqlalchemy/cyextension/immutabledict.pxd | 2 +- lib/sqlalchemy/cyextension/immutabledict.pyx | 2 +- lib/sqlalchemy/cyextension/processors.pyx | 2 +- lib/sqlalchemy/cyextension/resultproxy.pyx | 2 +- lib/sqlalchemy/cyextension/util.pyx | 2 +- lib/sqlalchemy/dialects/__init__.py | 2 +- lib/sqlalchemy/dialects/_typing.py | 2 +- lib/sqlalchemy/dialects/mssql/__init__.py | 2 +- lib/sqlalchemy/dialects/mssql/aioodbc.py | 2 +- lib/sqlalchemy/dialects/mssql/base.py | 2 +- lib/sqlalchemy/dialects/mssql/information_schema.py | 2 +- lib/sqlalchemy/dialects/mssql/json.py | 2 +- lib/sqlalchemy/dialects/mssql/provision.py | 2 +- lib/sqlalchemy/dialects/mssql/pymssql.py | 2 +- lib/sqlalchemy/dialects/mssql/pyodbc.py | 2 +- lib/sqlalchemy/dialects/mysql/__init__.py | 2 +- lib/sqlalchemy/dialects/mysql/aiomysql.py | 2 +- lib/sqlalchemy/dialects/mysql/asyncmy.py | 2 +- lib/sqlalchemy/dialects/mysql/base.py | 2 +- lib/sqlalchemy/dialects/mysql/cymysql.py | 2 +- lib/sqlalchemy/dialects/mysql/dml.py | 2 +- lib/sqlalchemy/dialects/mysql/enumerated.py | 2 +- lib/sqlalchemy/dialects/mysql/expression.py | 2 +- lib/sqlalchemy/dialects/mysql/json.py | 2 +- lib/sqlalchemy/dialects/mysql/mariadb.py | 2 +- lib/sqlalchemy/dialects/mysql/mariadbconnector.py | 2 +- lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 2 +- lib/sqlalchemy/dialects/mysql/mysqldb.py | 2 +- lib/sqlalchemy/dialects/mysql/provision.py | 2 +- lib/sqlalchemy/dialects/mysql/pymysql.py | 2 +- lib/sqlalchemy/dialects/mysql/pyodbc.py | 2 +- lib/sqlalchemy/dialects/mysql/reflection.py | 2 +- lib/sqlalchemy/dialects/mysql/reserved_words.py | 2 +- lib/sqlalchemy/dialects/mysql/types.py | 2 +- lib/sqlalchemy/dialects/oracle/__init__.py | 2 +- lib/sqlalchemy/dialects/oracle/base.py | 2 +- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 2 +- lib/sqlalchemy/dialects/oracle/dictionary.py | 2 +- lib/sqlalchemy/dialects/oracle/oracledb.py | 2 +- lib/sqlalchemy/dialects/oracle/provision.py | 2 +- lib/sqlalchemy/dialects/oracle/types.py | 2 +- lib/sqlalchemy/dialects/postgresql/__init__.py | 2 +- lib/sqlalchemy/dialects/postgresql/_psycopg_common.py | 2 +- lib/sqlalchemy/dialects/postgresql/array.py | 2 +- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 2 +- lib/sqlalchemy/dialects/postgresql/base.py | 2 +- lib/sqlalchemy/dialects/postgresql/dml.py | 2 +- lib/sqlalchemy/dialects/postgresql/ext.py | 2 +- lib/sqlalchemy/dialects/postgresql/hstore.py | 2 +- lib/sqlalchemy/dialects/postgresql/json.py | 2 +- lib/sqlalchemy/dialects/postgresql/named_types.py | 2 +- lib/sqlalchemy/dialects/postgresql/operators.py | 2 +- lib/sqlalchemy/dialects/postgresql/pg8000.py | 2 +- lib/sqlalchemy/dialects/postgresql/pg_catalog.py | 2 +- lib/sqlalchemy/dialects/postgresql/provision.py | 2 +- lib/sqlalchemy/dialects/postgresql/psycopg.py | 2 +- lib/sqlalchemy/dialects/postgresql/psycopg2.py | 2 +- lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py | 2 +- lib/sqlalchemy/dialects/postgresql/ranges.py | 2 +- lib/sqlalchemy/dialects/postgresql/types.py | 2 +- lib/sqlalchemy/dialects/sqlite/__init__.py | 2 +- lib/sqlalchemy/dialects/sqlite/aiosqlite.py | 2 +- lib/sqlalchemy/dialects/sqlite/base.py | 2 +- lib/sqlalchemy/dialects/sqlite/dml.py | 2 +- lib/sqlalchemy/dialects/sqlite/json.py | 2 +- lib/sqlalchemy/dialects/sqlite/provision.py | 2 +- lib/sqlalchemy/dialects/sqlite/pysqlcipher.py | 2 +- lib/sqlalchemy/dialects/sqlite/pysqlite.py | 2 +- lib/sqlalchemy/engine/__init__.py | 2 +- lib/sqlalchemy/engine/_py_processors.py | 2 +- lib/sqlalchemy/engine/_py_row.py | 2 +- lib/sqlalchemy/engine/_py_util.py | 2 +- lib/sqlalchemy/engine/base.py | 2 +- lib/sqlalchemy/engine/characteristics.py | 2 +- lib/sqlalchemy/engine/create.py | 2 +- lib/sqlalchemy/engine/cursor.py | 2 +- lib/sqlalchemy/engine/default.py | 2 +- lib/sqlalchemy/engine/events.py | 2 +- lib/sqlalchemy/engine/interfaces.py | 2 +- lib/sqlalchemy/engine/mock.py | 2 +- lib/sqlalchemy/engine/processors.py | 2 +- lib/sqlalchemy/engine/reflection.py | 2 +- lib/sqlalchemy/engine/result.py | 2 +- lib/sqlalchemy/engine/row.py | 2 +- lib/sqlalchemy/engine/strategies.py | 2 +- lib/sqlalchemy/engine/url.py | 2 +- lib/sqlalchemy/engine/util.py | 2 +- lib/sqlalchemy/event/__init__.py | 2 +- lib/sqlalchemy/event/api.py | 2 +- lib/sqlalchemy/event/attr.py | 2 +- lib/sqlalchemy/event/base.py | 2 +- lib/sqlalchemy/event/legacy.py | 2 +- lib/sqlalchemy/event/registry.py | 2 +- lib/sqlalchemy/events.py | 2 +- lib/sqlalchemy/exc.py | 2 +- lib/sqlalchemy/ext/__init__.py | 2 +- lib/sqlalchemy/ext/associationproxy.py | 2 +- lib/sqlalchemy/ext/asyncio/__init__.py | 2 +- lib/sqlalchemy/ext/asyncio/base.py | 2 +- lib/sqlalchemy/ext/asyncio/engine.py | 2 +- lib/sqlalchemy/ext/asyncio/exc.py | 2 +- lib/sqlalchemy/ext/asyncio/result.py | 2 +- lib/sqlalchemy/ext/asyncio/scoping.py | 2 +- lib/sqlalchemy/ext/asyncio/session.py | 2 +- lib/sqlalchemy/ext/automap.py | 2 +- lib/sqlalchemy/ext/baked.py | 2 +- lib/sqlalchemy/ext/compiler.py | 2 +- lib/sqlalchemy/ext/declarative/__init__.py | 2 +- lib/sqlalchemy/ext/declarative/extensions.py | 2 +- lib/sqlalchemy/ext/horizontal_shard.py | 2 +- lib/sqlalchemy/ext/hybrid.py | 2 +- lib/sqlalchemy/ext/indexable.py | 2 +- lib/sqlalchemy/ext/instrumentation.py | 2 +- lib/sqlalchemy/ext/mutable.py | 2 +- lib/sqlalchemy/ext/mypy/__init__.py | 2 +- lib/sqlalchemy/ext/mypy/apply.py | 2 +- lib/sqlalchemy/ext/mypy/decl_class.py | 2 +- lib/sqlalchemy/ext/mypy/infer.py | 2 +- lib/sqlalchemy/ext/mypy/names.py | 2 +- lib/sqlalchemy/ext/mypy/plugin.py | 2 +- lib/sqlalchemy/ext/mypy/util.py | 2 +- lib/sqlalchemy/ext/orderinglist.py | 2 +- lib/sqlalchemy/ext/serializer.py | 2 +- lib/sqlalchemy/future/__init__.py | 2 +- lib/sqlalchemy/future/engine.py | 2 +- lib/sqlalchemy/inspection.py | 2 +- lib/sqlalchemy/log.py | 2 +- lib/sqlalchemy/orm/__init__.py | 2 +- lib/sqlalchemy/orm/_orm_constructors.py | 2 +- lib/sqlalchemy/orm/_typing.py | 2 +- lib/sqlalchemy/orm/attributes.py | 2 +- lib/sqlalchemy/orm/base.py | 2 +- lib/sqlalchemy/orm/bulk_persistence.py | 2 +- lib/sqlalchemy/orm/clsregistry.py | 2 +- lib/sqlalchemy/orm/collections.py | 2 +- lib/sqlalchemy/orm/context.py | 2 +- lib/sqlalchemy/orm/decl_api.py | 2 +- lib/sqlalchemy/orm/decl_base.py | 2 +- lib/sqlalchemy/orm/dependency.py | 2 +- lib/sqlalchemy/orm/descriptor_props.py | 2 +- lib/sqlalchemy/orm/dynamic.py | 2 +- lib/sqlalchemy/orm/evaluator.py | 2 +- lib/sqlalchemy/orm/events.py | 2 +- lib/sqlalchemy/orm/exc.py | 2 +- lib/sqlalchemy/orm/identity.py | 2 +- lib/sqlalchemy/orm/instrumentation.py | 2 +- lib/sqlalchemy/orm/interfaces.py | 2 +- lib/sqlalchemy/orm/loading.py | 2 +- lib/sqlalchemy/orm/mapped_collection.py | 2 +- lib/sqlalchemy/orm/mapper.py | 2 +- lib/sqlalchemy/orm/path_registry.py | 2 +- lib/sqlalchemy/orm/persistence.py | 2 +- lib/sqlalchemy/orm/properties.py | 2 +- lib/sqlalchemy/orm/query.py | 2 +- lib/sqlalchemy/orm/relationships.py | 2 +- lib/sqlalchemy/orm/scoping.py | 2 +- lib/sqlalchemy/orm/session.py | 2 +- lib/sqlalchemy/orm/state.py | 2 +- lib/sqlalchemy/orm/state_changes.py | 2 +- lib/sqlalchemy/orm/strategies.py | 2 +- lib/sqlalchemy/orm/strategy_options.py | 2 +- lib/sqlalchemy/orm/sync.py | 2 +- lib/sqlalchemy/orm/unitofwork.py | 2 +- lib/sqlalchemy/orm/util.py | 2 +- lib/sqlalchemy/orm/writeonly.py | 2 +- lib/sqlalchemy/pool/__init__.py | 2 +- lib/sqlalchemy/pool/base.py | 2 +- lib/sqlalchemy/pool/events.py | 2 +- lib/sqlalchemy/pool/impl.py | 2 +- lib/sqlalchemy/schema.py | 2 +- lib/sqlalchemy/sql/__init__.py | 2 +- lib/sqlalchemy/sql/_dml_constructors.py | 2 +- lib/sqlalchemy/sql/_elements_constructors.py | 2 +- lib/sqlalchemy/sql/_orm_types.py | 2 +- lib/sqlalchemy/sql/_py_util.py | 2 +- lib/sqlalchemy/sql/_selectable_constructors.py | 2 +- lib/sqlalchemy/sql/_typing.py | 2 +- lib/sqlalchemy/sql/annotation.py | 2 +- lib/sqlalchemy/sql/base.py | 2 +- lib/sqlalchemy/sql/cache_key.py | 2 +- lib/sqlalchemy/sql/coercions.py | 2 +- lib/sqlalchemy/sql/compiler.py | 2 +- lib/sqlalchemy/sql/crud.py | 2 +- lib/sqlalchemy/sql/ddl.py | 2 +- lib/sqlalchemy/sql/default_comparator.py | 2 +- lib/sqlalchemy/sql/dml.py | 2 +- lib/sqlalchemy/sql/elements.py | 2 +- lib/sqlalchemy/sql/events.py | 2 +- lib/sqlalchemy/sql/expression.py | 2 +- lib/sqlalchemy/sql/functions.py | 2 +- lib/sqlalchemy/sql/lambdas.py | 2 +- lib/sqlalchemy/sql/naming.py | 2 +- lib/sqlalchemy/sql/operators.py | 2 +- lib/sqlalchemy/sql/roles.py | 2 +- lib/sqlalchemy/sql/schema.py | 2 +- lib/sqlalchemy/sql/selectable.py | 2 +- lib/sqlalchemy/sql/sqltypes.py | 2 +- lib/sqlalchemy/sql/traversals.py | 2 +- lib/sqlalchemy/sql/type_api.py | 2 +- lib/sqlalchemy/sql/util.py | 2 +- lib/sqlalchemy/sql/visitors.py | 2 +- lib/sqlalchemy/testing/__init__.py | 2 +- lib/sqlalchemy/testing/assertions.py | 2 +- lib/sqlalchemy/testing/assertsql.py | 2 +- lib/sqlalchemy/testing/asyncio.py | 2 +- lib/sqlalchemy/testing/config.py | 2 +- lib/sqlalchemy/testing/engines.py | 2 +- lib/sqlalchemy/testing/entities.py | 2 +- lib/sqlalchemy/testing/exclusions.py | 2 +- lib/sqlalchemy/testing/fixtures/__init__.py | 2 +- lib/sqlalchemy/testing/fixtures/base.py | 2 +- lib/sqlalchemy/testing/fixtures/mypy.py | 2 +- lib/sqlalchemy/testing/fixtures/orm.py | 2 +- lib/sqlalchemy/testing/fixtures/sql.py | 2 +- lib/sqlalchemy/testing/pickleable.py | 2 +- lib/sqlalchemy/testing/plugin/__init__.py | 2 +- lib/sqlalchemy/testing/plugin/bootstrap.py | 2 +- lib/sqlalchemy/testing/plugin/plugin_base.py | 2 +- lib/sqlalchemy/testing/plugin/pytestplugin.py | 2 +- lib/sqlalchemy/testing/profiling.py | 2 +- lib/sqlalchemy/testing/provision.py | 2 +- lib/sqlalchemy/testing/requirements.py | 2 +- lib/sqlalchemy/testing/schema.py | 2 +- lib/sqlalchemy/testing/suite/__init__.py | 2 +- lib/sqlalchemy/testing/suite/test_cte.py | 2 +- lib/sqlalchemy/testing/suite/test_ddl.py | 2 +- lib/sqlalchemy/testing/suite/test_deprecations.py | 2 +- lib/sqlalchemy/testing/suite/test_dialect.py | 2 +- lib/sqlalchemy/testing/suite/test_insert.py | 2 +- lib/sqlalchemy/testing/suite/test_reflection.py | 2 +- lib/sqlalchemy/testing/suite/test_results.py | 2 +- lib/sqlalchemy/testing/suite/test_rowcount.py | 2 +- lib/sqlalchemy/testing/suite/test_select.py | 2 +- lib/sqlalchemy/testing/suite/test_sequence.py | 2 +- lib/sqlalchemy/testing/suite/test_types.py | 2 +- lib/sqlalchemy/testing/suite/test_unicode_ddl.py | 2 +- lib/sqlalchemy/testing/suite/test_update_delete.py | 2 +- lib/sqlalchemy/testing/util.py | 2 +- lib/sqlalchemy/testing/warnings.py | 2 +- lib/sqlalchemy/types.py | 2 +- lib/sqlalchemy/util/__init__.py | 2 +- lib/sqlalchemy/util/_collections.py | 2 +- lib/sqlalchemy/util/_concurrency_py3k.py | 2 +- lib/sqlalchemy/util/_has_cy.py | 2 +- lib/sqlalchemy/util/_py_collections.py | 2 +- lib/sqlalchemy/util/compat.py | 2 +- lib/sqlalchemy/util/concurrency.py | 2 +- lib/sqlalchemy/util/deprecations.py | 2 +- lib/sqlalchemy/util/langhelpers.py | 2 +- lib/sqlalchemy/util/preloaded.py | 2 +- lib/sqlalchemy/util/queue.py | 2 +- lib/sqlalchemy/util/tool_support.py | 2 +- lib/sqlalchemy/util/topological.py | 2 +- lib/sqlalchemy/util/typing.py | 2 +- 261 files changed, 261 insertions(+), 261 deletions(-) diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 4687b4915fc..1cd1abfd07f 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -1,5 +1,5 @@ # __init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/connectors/__init__.py b/lib/sqlalchemy/connectors/__init__.py index 1969d7236bc..f1cae0b3ceb 100644 --- a/lib/sqlalchemy/connectors/__init__.py +++ b/lib/sqlalchemy/connectors/__init__.py @@ -1,5 +1,5 @@ # connectors/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/connectors/aioodbc.py b/lib/sqlalchemy/connectors/aioodbc.py index c6986366e1c..9b861639a49 100644 --- a/lib/sqlalchemy/connectors/aioodbc.py +++ b/lib/sqlalchemy/connectors/aioodbc.py @@ -1,5 +1,5 @@ # connectors/aioodbc.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/connectors/asyncio.py b/lib/sqlalchemy/connectors/asyncio.py index 997407ccd58..6ba228a6253 100644 --- a/lib/sqlalchemy/connectors/asyncio.py +++ b/lib/sqlalchemy/connectors/asyncio.py @@ -1,5 +1,5 @@ # connectors/asyncio.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py index 49712a57c41..7e1cd3afe8f 100644 --- a/lib/sqlalchemy/connectors/pyodbc.py +++ b/lib/sqlalchemy/connectors/pyodbc.py @@ -1,5 +1,5 @@ # connectors/pyodbc.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/cyextension/__init__.py b/lib/sqlalchemy/cyextension/__init__.py index 67aa690e02f..88a4d903967 100644 --- a/lib/sqlalchemy/cyextension/__init__.py +++ b/lib/sqlalchemy/cyextension/__init__.py @@ -1,5 +1,5 @@ # cyextension/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/cyextension/collections.pyx b/lib/sqlalchemy/cyextension/collections.pyx index a45b5d90433..86d24852b3f 100644 --- a/lib/sqlalchemy/cyextension/collections.pyx +++ b/lib/sqlalchemy/cyextension/collections.pyx @@ -1,5 +1,5 @@ # cyextension/collections.pyx -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/cyextension/immutabledict.pxd b/lib/sqlalchemy/cyextension/immutabledict.pxd index d733d48affd..76f22893168 100644 --- a/lib/sqlalchemy/cyextension/immutabledict.pxd +++ b/lib/sqlalchemy/cyextension/immutabledict.pxd @@ -1,5 +1,5 @@ # cyextension/immutabledict.pxd -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/cyextension/immutabledict.pyx b/lib/sqlalchemy/cyextension/immutabledict.pyx index d43d465febe..b37eccc4c39 100644 --- a/lib/sqlalchemy/cyextension/immutabledict.pyx +++ b/lib/sqlalchemy/cyextension/immutabledict.pyx @@ -1,5 +1,5 @@ # cyextension/immutabledict.pyx -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/cyextension/processors.pyx b/lib/sqlalchemy/cyextension/processors.pyx index 03d8411c336..3d714569fa0 100644 --- a/lib/sqlalchemy/cyextension/processors.pyx +++ b/lib/sqlalchemy/cyextension/processors.pyx @@ -1,5 +1,5 @@ # cyextension/processors.pyx -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/cyextension/resultproxy.pyx b/lib/sqlalchemy/cyextension/resultproxy.pyx index e81df51f38d..b6e357a1f35 100644 --- a/lib/sqlalchemy/cyextension/resultproxy.pyx +++ b/lib/sqlalchemy/cyextension/resultproxy.pyx @@ -1,5 +1,5 @@ # cyextension/resultproxy.pyx -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/cyextension/util.pyx b/lib/sqlalchemy/cyextension/util.pyx index 63daddf4640..cb17acd69c0 100644 --- a/lib/sqlalchemy/cyextension/util.pyx +++ b/lib/sqlalchemy/cyextension/util.pyx @@ -1,5 +1,5 @@ # cyextension/util.pyx -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py index 055d087cf24..7d5cc1c9c2f 100644 --- a/lib/sqlalchemy/dialects/__init__.py +++ b/lib/sqlalchemy/dialects/__init__.py @@ -1,5 +1,5 @@ # dialects/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/_typing.py b/lib/sqlalchemy/dialects/_typing.py index 9d2500e48e8..9ee6e4bca1c 100644 --- a/lib/sqlalchemy/dialects/_typing.py +++ b/lib/sqlalchemy/dialects/_typing.py @@ -1,5 +1,5 @@ # dialects/_typing.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py index c601cba1f30..19ab7c422c9 100644 --- a/lib/sqlalchemy/dialects/mssql/__init__.py +++ b/lib/sqlalchemy/dialects/mssql/__init__.py @@ -1,5 +1,5 @@ # dialects/mssql/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/aioodbc.py b/lib/sqlalchemy/dialects/mssql/aioodbc.py index e9d22155a74..65945d97559 100644 --- a/lib/sqlalchemy/dialects/mssql/aioodbc.py +++ b/lib/sqlalchemy/dialects/mssql/aioodbc.py @@ -1,5 +1,5 @@ # dialects/mssql/aioodbc.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 952a7a1f690..e015dccdc99 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1,5 +1,5 @@ # dialects/mssql/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py index 2c30c55b6e0..11771638832 100644 --- a/lib/sqlalchemy/dialects/mssql/information_schema.py +++ b/lib/sqlalchemy/dialects/mssql/information_schema.py @@ -1,5 +1,5 @@ # dialects/mssql/information_schema.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/json.py b/lib/sqlalchemy/dialects/mssql/json.py index f79d6e3ed5e..18bea09d0f1 100644 --- a/lib/sqlalchemy/dialects/mssql/json.py +++ b/lib/sqlalchemy/dialects/mssql/json.py @@ -1,5 +1,5 @@ # dialects/mssql/json.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/provision.py b/lib/sqlalchemy/dialects/mssql/provision.py index 1913c95717a..143d386c45e 100644 --- a/lib/sqlalchemy/dialects/mssql/provision.py +++ b/lib/sqlalchemy/dialects/mssql/provision.py @@ -1,5 +1,5 @@ # dialects/mssql/provision.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index 5351be1131e..ea1f9bd3a7e 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -1,5 +1,5 @@ # dialects/mssql/pymssql.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index 17c4e4c830d..f27dee1bd59 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -1,5 +1,5 @@ # dialects/mssql/pyodbc.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py index 49d859b418d..60bac87443d 100644 --- a/lib/sqlalchemy/dialects/mysql/__init__.py +++ b/lib/sqlalchemy/dialects/mysql/__init__.py @@ -1,5 +1,5 @@ # dialects/mysql/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/aiomysql.py b/lib/sqlalchemy/dialects/mysql/aiomysql.py index c8a0a36abcd..65482a76b27 100644 --- a/lib/sqlalchemy/dialects/mysql/aiomysql.py +++ b/lib/sqlalchemy/dialects/mysql/aiomysql.py @@ -1,5 +1,5 @@ # dialects/mysql/aiomysql.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/asyncmy.py b/lib/sqlalchemy/dialects/mysql/asyncmy.py index d55129bed81..9928a879ec5 100644 --- a/lib/sqlalchemy/dialects/mysql/asyncmy.py +++ b/lib/sqlalchemy/dialects/mysql/asyncmy.py @@ -1,5 +1,5 @@ # dialects/mysql/asyncmy.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index c51b3eefca5..78bf4636afd 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1,5 +1,5 @@ # dialects/mysql/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py index a96a71eb4c3..f199aa4e895 100644 --- a/lib/sqlalchemy/dialects/mysql/cymysql.py +++ b/lib/sqlalchemy/dialects/mysql/cymysql.py @@ -1,5 +1,5 @@ # dialects/mysql/cymysql.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/dml.py b/lib/sqlalchemy/dialects/mysql/dml.py index aba60103f7f..e4005c267e4 100644 --- a/lib/sqlalchemy/dialects/mysql/dml.py +++ b/lib/sqlalchemy/dialects/mysql/dml.py @@ -1,5 +1,5 @@ # dialects/mysql/dml.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/enumerated.py b/lib/sqlalchemy/dialects/mysql/enumerated.py index a70d499e436..96499d7bee2 100644 --- a/lib/sqlalchemy/dialects/mysql/enumerated.py +++ b/lib/sqlalchemy/dialects/mysql/enumerated.py @@ -1,5 +1,5 @@ # dialects/mysql/enumerated.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/expression.py b/lib/sqlalchemy/dialects/mysql/expression.py index 0c41aeb727b..b81b58afc28 100644 --- a/lib/sqlalchemy/dialects/mysql/expression.py +++ b/lib/sqlalchemy/dialects/mysql/expression.py @@ -1,5 +1,5 @@ # dialects/mysql/expression.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/json.py b/lib/sqlalchemy/dialects/mysql/json.py index 8359e4d36ad..ebe4a34d212 100644 --- a/lib/sqlalchemy/dialects/mysql/json.py +++ b/lib/sqlalchemy/dialects/mysql/json.py @@ -1,5 +1,5 @@ # dialects/mysql/json.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/mariadb.py b/lib/sqlalchemy/dialects/mysql/mariadb.py index 17f858184fc..10a05f9cb36 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadb.py +++ b/lib/sqlalchemy/dialects/mysql/mariadb.py @@ -1,5 +1,5 @@ # dialects/mysql/mariadb.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py index 3ee9c1e0053..2fe3a192aa9 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py @@ -1,5 +1,5 @@ # dialects/mysql/mariadbconnector.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index 73254530164..b1523392d8c 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -1,5 +1,5 @@ # dialects/mysql/mysqlconnector.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index d42cdc9b0fd..d46d159d4cd 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -1,5 +1,5 @@ # dialects/mysql/mysqldb.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/provision.py b/lib/sqlalchemy/dialects/mysql/provision.py index b3584ee5c7e..3f05bcee74d 100644 --- a/lib/sqlalchemy/dialects/mysql/provision.py +++ b/lib/sqlalchemy/dialects/mysql/provision.py @@ -1,5 +1,5 @@ # dialects/mysql/provision.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index 6e87173be97..830e4416c79 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -1,5 +1,5 @@ # dialects/mysql/pymysql.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py index 87be2827b50..428c8dfd385 100644 --- a/lib/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py @@ -1,5 +1,5 @@ # dialects/mysql/pyodbc.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py index d678bc9f4a6..74c60f07b58 100644 --- a/lib/sqlalchemy/dialects/mysql/reflection.py +++ b/lib/sqlalchemy/dialects/mysql/reflection.py @@ -1,5 +1,5 @@ # dialects/mysql/reflection.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/reserved_words.py b/lib/sqlalchemy/dialects/mysql/reserved_words.py index b092428cf32..009988a6085 100644 --- a/lib/sqlalchemy/dialects/mysql/reserved_words.py +++ b/lib/sqlalchemy/dialects/mysql/reserved_words.py @@ -1,5 +1,5 @@ # dialects/mysql/reserved_words.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py index 3fc96e61076..f563ead357f 100644 --- a/lib/sqlalchemy/dialects/mysql/types.py +++ b/lib/sqlalchemy/dialects/mysql/types.py @@ -1,5 +1,5 @@ # dialects/mysql/types.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py index 49464d6de71..e2c8d327a06 100644 --- a/lib/sqlalchemy/dialects/oracle/__init__.py +++ b/lib/sqlalchemy/dialects/oracle/__init__.py @@ -1,5 +1,5 @@ # dialects/oracle/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 10dd69e99df..1a6144a28eb 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -1,5 +1,5 @@ # dialects/oracle/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 95b7abe3b87..440ccad2bc1 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -1,5 +1,5 @@ # dialects/oracle/cx_oracle.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/dictionary.py b/lib/sqlalchemy/dialects/oracle/dictionary.py index 5d4056ad2af..63479b9fcc6 100644 --- a/lib/sqlalchemy/dialects/oracle/dictionary.py +++ b/lib/sqlalchemy/dialects/oracle/dictionary.py @@ -1,5 +1,5 @@ # dialects/oracle/dictionary.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index c4e2b1ffffd..4c6e62446c0 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -1,5 +1,5 @@ # dialects/oracle/oracledb.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/provision.py b/lib/sqlalchemy/dialects/oracle/provision.py index c9100192e17..b33c1525cd5 100644 --- a/lib/sqlalchemy/dialects/oracle/provision.py +++ b/lib/sqlalchemy/dialects/oracle/provision.py @@ -1,5 +1,5 @@ # dialects/oracle/provision.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/types.py b/lib/sqlalchemy/dialects/oracle/types.py index bc9e563ff75..36caaa05e60 100644 --- a/lib/sqlalchemy/dialects/oracle/types.py +++ b/lib/sqlalchemy/dialects/oracle/types.py @@ -1,5 +1,5 @@ # dialects/oracle/types.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index 5e327a6eefe..f85c1e990da 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -1,5 +1,5 @@ # dialects/postgresql/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py b/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py index 95f549dc68f..46858c9f22c 100644 --- a/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py +++ b/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py @@ -1,5 +1,5 @@ # dialects/postgresql/_psycopg_common.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 5c677059b75..9e81e8368c0 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -1,5 +1,5 @@ # dialects/postgresql/array.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 00dbe6d9594..7e93b1232e1 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -1,5 +1,5 @@ # dialects/postgresql/asyncpg.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index ea7ac156fe1..e5f3cb50c58 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1,5 +1,5 @@ # dialects/postgresql/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/dml.py b/lib/sqlalchemy/dialects/postgresql/dml.py index 26300c27de3..f227d0fac52 100644 --- a/lib/sqlalchemy/dialects/postgresql/dml.py +++ b/lib/sqlalchemy/dialects/postgresql/dml.py @@ -1,5 +1,5 @@ # dialects/postgresql/dml.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py index 22815d9fd64..7fc08953fcc 100644 --- a/lib/sqlalchemy/dialects/postgresql/ext.py +++ b/lib/sqlalchemy/dialects/postgresql/ext.py @@ -1,5 +1,5 @@ # dialects/postgresql/ext.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index 0ef548e7948..04c8cf16015 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -1,5 +1,5 @@ # dialects/postgresql/hstore.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index a0f1814a7a8..dff12e7f498 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -1,5 +1,5 @@ # dialects/postgresql/json.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/named_types.py b/lib/sqlalchemy/dialects/postgresql/named_types.py index 26d690ccd30..a0a34a96488 100644 --- a/lib/sqlalchemy/dialects/postgresql/named_types.py +++ b/lib/sqlalchemy/dialects/postgresql/named_types.py @@ -1,5 +1,5 @@ # dialects/postgresql/named_types.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/operators.py b/lib/sqlalchemy/dialects/postgresql/operators.py index a712022bcb7..53e175f9c54 100644 --- a/lib/sqlalchemy/dialects/postgresql/operators.py +++ b/lib/sqlalchemy/dialects/postgresql/operators.py @@ -1,5 +1,5 @@ # dialects/postgresql/operators.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index d0de5cd8947..fd7d9a37880 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -1,5 +1,5 @@ # dialects/postgresql/pg8000.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py index 25bd6bb99d9..7b44bc93f7b 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py +++ b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py @@ -1,5 +1,5 @@ # dialects/postgresql/pg_catalog.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/provision.py b/lib/sqlalchemy/dialects/postgresql/provision.py index 9fafaed9baa..a87bb932066 100644 --- a/lib/sqlalchemy/dialects/postgresql/provision.py +++ b/lib/sqlalchemy/dialects/postgresql/provision.py @@ -1,5 +1,5 @@ # dialects/postgresql/provision.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index 2c7fd592dea..6ab6097985a 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -1,5 +1,5 @@ # dialects/postgresql/psycopg.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index ef960c297d0..0b89149ec9d 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -1,5 +1,5 @@ # dialects/postgresql/psycopg2.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py index df8675bf864..3cc3b69fb34 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py @@ -1,5 +1,5 @@ # dialects/postgresql/psycopg2cffi.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index ede52634fe8..6faf5e11cd0 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -1,5 +1,5 @@ # dialects/postgresql/ranges.py -# Copyright (C) 2013-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2013-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/types.py b/lib/sqlalchemy/dialects/postgresql/types.py index 75abab8384a..879389989c0 100644 --- a/lib/sqlalchemy/dialects/postgresql/types.py +++ b/lib/sqlalchemy/dialects/postgresql/types.py @@ -1,5 +1,5 @@ # dialects/postgresql/types.py -# Copyright (C) 2013-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2013-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py index 18edf67f109..45f088e2147 100644 --- a/lib/sqlalchemy/dialects/sqlite/__init__.py +++ b/lib/sqlalchemy/dialects/sqlite/__init__.py @@ -1,5 +1,5 @@ # dialects/sqlite/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py index e553d4700fb..6c915634d11 100644 --- a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py @@ -1,5 +1,5 @@ # dialects/sqlite/aiosqlite.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 1052c3d4d3d..59ba49c25ec 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1,5 +1,5 @@ # dialects/sqlite/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/dml.py b/lib/sqlalchemy/dialects/sqlite/dml.py index 007502e9153..42e5b0fc7a5 100644 --- a/lib/sqlalchemy/dialects/sqlite/dml.py +++ b/lib/sqlalchemy/dialects/sqlite/dml.py @@ -1,5 +1,5 @@ # dialects/sqlite/dml.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/json.py b/lib/sqlalchemy/dialects/sqlite/json.py index 6a8f374f944..ec2980297e2 100644 --- a/lib/sqlalchemy/dialects/sqlite/json.py +++ b/lib/sqlalchemy/dialects/sqlite/json.py @@ -1,5 +1,5 @@ # dialects/sqlite/json.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/provision.py b/lib/sqlalchemy/dialects/sqlite/provision.py index 397ef10088f..f18568b0b33 100644 --- a/lib/sqlalchemy/dialects/sqlite/provision.py +++ b/lib/sqlalchemy/dialects/sqlite/provision.py @@ -1,5 +1,5 @@ # dialects/sqlite/provision.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py index df8d7c5d83e..388a4dff817 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -1,5 +1,5 @@ # dialects/sqlite/pysqlcipher.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index d22cbd6f110..e2487790071 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -1,5 +1,5 @@ # dialects/sqlite/pysqlite.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index 843f970257a..af0f7ee8bec 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -1,5 +1,5 @@ # engine/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/_py_processors.py b/lib/sqlalchemy/engine/_py_processors.py index bedfaeedfba..2cc35b501eb 100644 --- a/lib/sqlalchemy/engine/_py_processors.py +++ b/lib/sqlalchemy/engine/_py_processors.py @@ -1,5 +1,5 @@ # engine/_py_processors.py -# Copyright (C) 2010-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2024 the SQLAlchemy authors and contributors # # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com # diff --git a/lib/sqlalchemy/engine/_py_row.py b/lib/sqlalchemy/engine/_py_row.py index 50705a76550..4e1dd7d430d 100644 --- a/lib/sqlalchemy/engine/_py_row.py +++ b/lib/sqlalchemy/engine/_py_row.py @@ -1,5 +1,5 @@ # engine/_py_row.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/_py_util.py b/lib/sqlalchemy/engine/_py_util.py index 2ef9d03ffd8..2be4322abbc 100644 --- a/lib/sqlalchemy/engine/_py_util.py +++ b/lib/sqlalchemy/engine/_py_util.py @@ -1,5 +1,5 @@ # engine/_py_util.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index bcf6ca2280f..6d8cc667045 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -1,5 +1,5 @@ # engine/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/characteristics.py b/lib/sqlalchemy/engine/characteristics.py index aed2fd6b385..7dd3a2f31e3 100644 --- a/lib/sqlalchemy/engine/characteristics.py +++ b/lib/sqlalchemy/engine/characteristics.py @@ -1,5 +1,5 @@ # engine/characteristics.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py index 86e801f8d52..16130ca4f10 100644 --- a/lib/sqlalchemy/engine/create.py +++ b/lib/sqlalchemy/engine/create.py @@ -1,5 +1,5 @@ # engine/create.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index ff6e311a743..a46a9af16ff 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -1,5 +1,5 @@ # engine/cursor.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 553d8f0bea1..0d8054a9db9 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -1,5 +1,5 @@ # engine/default.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/events.py b/lib/sqlalchemy/engine/events.py index 4f6353080b7..b8e8936b94c 100644 --- a/lib/sqlalchemy/engine/events.py +++ b/lib/sqlalchemy/engine/events.py @@ -1,5 +1,5 @@ # engine/events.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 4bf0d3e9e7d..9060f599e01 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -1,5 +1,5 @@ # engine/interfaces.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/mock.py b/lib/sqlalchemy/engine/mock.py index 618ea1d85ef..c9fa5eb31a7 100644 --- a/lib/sqlalchemy/engine/mock.py +++ b/lib/sqlalchemy/engine/mock.py @@ -1,5 +1,5 @@ # engine/mock.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/processors.py b/lib/sqlalchemy/engine/processors.py index bdca5351c52..610e03d5a1c 100644 --- a/lib/sqlalchemy/engine/processors.py +++ b/lib/sqlalchemy/engine/processors.py @@ -1,5 +1,5 @@ # engine/processors.py -# Copyright (C) 2010-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2024 the SQLAlchemy authors and contributors # # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com # diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 66e94429cb1..ef1e566c29e 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -1,5 +1,5 @@ # engine/reflection.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 2e7f1db34c6..c9d51e06677 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1,5 +1,5 @@ # engine/result.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py index d2bb2e4c9a6..f6209352288 100644 --- a/lib/sqlalchemy/engine/row.py +++ b/lib/sqlalchemy/engine/row.py @@ -1,5 +1,5 @@ # engine/row.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index f884f203c9e..30c331e8d44 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -1,5 +1,5 @@ # engine/strategies.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 04ae5e91fbb..31e94f441a2 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -1,5 +1,5 @@ # engine/url.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py index 9b147a7014b..3d95ac58625 100644 --- a/lib/sqlalchemy/engine/util.py +++ b/lib/sqlalchemy/engine/util.py @@ -1,5 +1,5 @@ # engine/util.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/__init__.py b/lib/sqlalchemy/event/__init__.py index 20a20d18e61..9b54f07fc42 100644 --- a/lib/sqlalchemy/event/__init__.py +++ b/lib/sqlalchemy/event/__init__.py @@ -1,5 +1,5 @@ # event/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py index bb1dbea0fc9..4a39d10f406 100644 --- a/lib/sqlalchemy/event/api.py +++ b/lib/sqlalchemy/event/api.py @@ -1,5 +1,5 @@ # event/api.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py index 0aa34198305..0a0d9571f71 100644 --- a/lib/sqlalchemy/event/attr.py +++ b/lib/sqlalchemy/event/attr.py @@ -1,5 +1,5 @@ # event/attr.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py index f92b2ede3cd..18a34624783 100644 --- a/lib/sqlalchemy/event/base.py +++ b/lib/sqlalchemy/event/base.py @@ -1,5 +1,5 @@ # event/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/legacy.py b/lib/sqlalchemy/event/legacy.py index f3a7d04acee..067b7205840 100644 --- a/lib/sqlalchemy/event/legacy.py +++ b/lib/sqlalchemy/event/legacy.py @@ -1,5 +1,5 @@ # event/legacy.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py index fb2fed815f1..c048735e21a 100644 --- a/lib/sqlalchemy/event/registry.py +++ b/lib/sqlalchemy/event/registry.py @@ -1,5 +1,5 @@ # event/registry.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 0124d14dd5f..8c3bf01cf6a 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -1,5 +1,5 @@ # events.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 0e90c60e565..c4025a2b8cb 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -1,5 +1,5 @@ # exc.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py index e3af738b7ce..f03ed945f35 100644 --- a/lib/sqlalchemy/ext/__init__.py +++ b/lib/sqlalchemy/ext/__init__.py @@ -1,5 +1,5 @@ # ext/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 31df1345348..9c5a0e4bd40 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -1,5 +1,5 @@ # ext/associationproxy.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/__init__.py b/lib/sqlalchemy/ext/asyncio/__init__.py index 8564db6f22e..78c707b26d8 100644 --- a/lib/sqlalchemy/ext/asyncio/__init__.py +++ b/lib/sqlalchemy/ext/asyncio/__init__.py @@ -1,5 +1,5 @@ # ext/asyncio/__init__.py -# Copyright (C) 2020-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/base.py b/lib/sqlalchemy/ext/asyncio/base.py index 69d9cce55c8..330651b074f 100644 --- a/lib/sqlalchemy/ext/asyncio/base.py +++ b/lib/sqlalchemy/ext/asyncio/base.py @@ -1,5 +1,5 @@ # ext/asyncio/base.py -# Copyright (C) 2020-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index bf968cc3884..5c4ec8cd050 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -1,5 +1,5 @@ # ext/asyncio/engine.py -# Copyright (C) 2020-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/exc.py b/lib/sqlalchemy/ext/asyncio/exc.py index 3f937679b93..1cf6f363860 100644 --- a/lib/sqlalchemy/ext/asyncio/exc.py +++ b/lib/sqlalchemy/ext/asyncio/exc.py @@ -1,5 +1,5 @@ # ext/asyncio/exc.py -# Copyright (C) 2020-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/result.py b/lib/sqlalchemy/ext/asyncio/result.py index a13e106ff31..2f664bcd623 100644 --- a/lib/sqlalchemy/ext/asyncio/result.py +++ b/lib/sqlalchemy/ext/asyncio/result.py @@ -1,5 +1,5 @@ # ext/asyncio/result.py -# Copyright (C) 2020-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/scoping.py b/lib/sqlalchemy/ext/asyncio/scoping.py index 4c68f53ffa8..a5127b86613 100644 --- a/lib/sqlalchemy/ext/asyncio/scoping.py +++ b/lib/sqlalchemy/ext/asyncio/scoping.py @@ -1,5 +1,5 @@ # ext/asyncio/scoping.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index 30232e59cbb..cdca94a9abb 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -1,5 +1,5 @@ # ext/asyncio/session.py -# Copyright (C) 2020-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index 18568c7f28f..9247c730e7f 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -1,5 +1,5 @@ # ext/automap.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py index 82db494e411..60f7ae66447 100644 --- a/lib/sqlalchemy/ext/baked.py +++ b/lib/sqlalchemy/ext/baked.py @@ -1,5 +1,5 @@ # ext/baked.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index 39a55410305..01462ad0b48 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -1,5 +1,5 @@ # ext/compiler.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py index 2f6b2f23fa8..37da40377b6 100644 --- a/lib/sqlalchemy/ext/declarative/__init__.py +++ b/lib/sqlalchemy/ext/declarative/__init__.py @@ -1,5 +1,5 @@ # ext/declarative/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/declarative/extensions.py b/lib/sqlalchemy/ext/declarative/extensions.py index acc9d08cfbf..c0f7e340580 100644 --- a/lib/sqlalchemy/ext/declarative/extensions.py +++ b/lib/sqlalchemy/ext/declarative/extensions.py @@ -1,5 +1,5 @@ # ext/declarative/extensions.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index 963bd005a4b..4fff1bf8970 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -1,5 +1,5 @@ # ext/horizontal_shard.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 5aadc329e56..5a35c0a27e6 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -1,5 +1,5 @@ # ext/hybrid.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/indexable.py b/lib/sqlalchemy/ext/indexable.py index b3d90a6e926..3c419308a69 100644 --- a/lib/sqlalchemy/ext/indexable.py +++ b/lib/sqlalchemy/ext/indexable.py @@ -1,5 +1,5 @@ # ext/indexable.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py index 688c762e72b..e84dde26877 100644 --- a/lib/sqlalchemy/ext/instrumentation.py +++ b/lib/sqlalchemy/ext/instrumentation.py @@ -1,5 +1,5 @@ # ext/instrumentation.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index ff4dea08661..0dc65c007b3 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -1,5 +1,5 @@ # ext/mutable.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/__init__.py b/lib/sqlalchemy/ext/mypy/__init__.py index 8a2e38098e3..de2c02ee9f1 100644 --- a/lib/sqlalchemy/ext/mypy/__init__.py +++ b/lib/sqlalchemy/ext/mypy/__init__.py @@ -1,5 +1,5 @@ # ext/mypy/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/apply.py b/lib/sqlalchemy/ext/mypy/apply.py index e18cd08a3fe..4185d29b948 100644 --- a/lib/sqlalchemy/ext/mypy/apply.py +++ b/lib/sqlalchemy/ext/mypy/apply.py @@ -1,5 +1,5 @@ # ext/mypy/apply.py -# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/decl_class.py b/lib/sqlalchemy/ext/mypy/decl_class.py index 9e2dcbb9aba..d7dff91cbd8 100644 --- a/lib/sqlalchemy/ext/mypy/decl_class.py +++ b/lib/sqlalchemy/ext/mypy/decl_class.py @@ -1,5 +1,5 @@ # ext/mypy/decl_class.py -# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/infer.py b/lib/sqlalchemy/ext/mypy/infer.py index f7626bdf6b1..09b3c443ab0 100644 --- a/lib/sqlalchemy/ext/mypy/infer.py +++ b/lib/sqlalchemy/ext/mypy/infer.py @@ -1,5 +1,5 @@ # ext/mypy/infer.py -# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/names.py b/lib/sqlalchemy/ext/mypy/names.py index 256e0be636a..35b4e2ba819 100644 --- a/lib/sqlalchemy/ext/mypy/names.py +++ b/lib/sqlalchemy/ext/mypy/names.py @@ -1,5 +1,5 @@ # ext/mypy/names.py -# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/plugin.py b/lib/sqlalchemy/ext/mypy/plugin.py index 862d7d2166f..00eb4d1cc03 100644 --- a/lib/sqlalchemy/ext/mypy/plugin.py +++ b/lib/sqlalchemy/ext/mypy/plugin.py @@ -1,5 +1,5 @@ # ext/mypy/plugin.py -# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/util.py b/lib/sqlalchemy/ext/mypy/util.py index 238c82a54f2..10cdb56b050 100644 --- a/lib/sqlalchemy/ext/mypy/util.py +++ b/lib/sqlalchemy/ext/mypy/util.py @@ -1,5 +1,5 @@ # ext/mypy/util.py -# Copyright (C) 2021-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py index a6c42ff0936..1a12cf38c69 100644 --- a/lib/sqlalchemy/ext/orderinglist.py +++ b/lib/sqlalchemy/ext/orderinglist.py @@ -1,5 +1,5 @@ # ext/orderinglist.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py index 706bff29fb0..f21e997a227 100644 --- a/lib/sqlalchemy/ext/serializer.py +++ b/lib/sqlalchemy/ext/serializer.py @@ -1,5 +1,5 @@ # ext/serializer.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/future/__init__.py b/lib/sqlalchemy/future/__init__.py index c76360fcfff..8ce36ccbc24 100644 --- a/lib/sqlalchemy/future/__init__.py +++ b/lib/sqlalchemy/future/__init__.py @@ -1,5 +1,5 @@ # future/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/future/engine.py b/lib/sqlalchemy/future/engine.py index bc43f4601c4..b55cda08d94 100644 --- a/lib/sqlalchemy/future/engine.py +++ b/lib/sqlalchemy/future/engine.py @@ -1,5 +1,5 @@ # future/engine.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py index 1fe37d925f2..7123d0608db 100644 --- a/lib/sqlalchemy/inspection.py +++ b/lib/sqlalchemy/inspection.py @@ -1,5 +1,5 @@ # inspection.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py index 55c1a3eb44f..9046e33b75f 100644 --- a/lib/sqlalchemy/log.py +++ b/lib/sqlalchemy/log.py @@ -1,5 +1,5 @@ # log.py -# Copyright (C) 2006-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2006-2024 the SQLAlchemy authors and contributors # # Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk # diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index f6888aeee45..70a11294314 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -1,5 +1,5 @@ # orm/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index df36c386416..53577b4da3e 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -1,5 +1,5 @@ # orm/_orm_constructors.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/_typing.py b/lib/sqlalchemy/orm/_typing.py index 78aac6d572d..fa6dd7c3c3a 100644 --- a/lib/sqlalchemy/orm/_typing.py +++ b/lib/sqlalchemy/orm/_typing.py @@ -1,5 +1,5 @@ # orm/_typing.py -# Copyright (C) 2022-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 1098359ecaa..dc9743b8b3d 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -1,5 +1,5 @@ # orm/attributes.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index 362346cc2a8..50f6703b5ed 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -1,5 +1,5 @@ # orm/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 31caedc3785..3f558d2d405 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -1,5 +1,5 @@ # orm/bulk_persistence.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/clsregistry.py b/lib/sqlalchemy/orm/clsregistry.py index bb062c5c981..2cce129cbfe 100644 --- a/lib/sqlalchemy/orm/clsregistry.py +++ b/lib/sqlalchemy/orm/clsregistry.py @@ -1,5 +1,5 @@ # orm/clsregistry.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index 3a4964c4609..534a1b64861 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -1,5 +1,5 @@ # orm/collections.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 3e73d80e716..7ab7e6279ea 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -1,5 +1,5 @@ # orm/context.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 60bd2ae4901..95cec3b874d 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -1,5 +1,5 @@ # orm/decl_api.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index 6e8578863ed..3786cfe4a8f 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -1,5 +1,5 @@ # orm/decl_base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py index e941dbcbf47..9bdd92428e2 100644 --- a/lib/sqlalchemy/orm/dependency.py +++ b/lib/sqlalchemy/orm/dependency.py @@ -1,5 +1,5 @@ # orm/dependency.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index c1fe9de85ca..a70f0b3ec37 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -1,5 +1,5 @@ # orm/descriptor_props.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index 1d0c03606c8..d5db03a19db 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -1,5 +1,5 @@ # orm/dynamic.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py index f3796f03d1e..f2644548c11 100644 --- a/lib/sqlalchemy/orm/evaluator.py +++ b/lib/sqlalchemy/orm/evaluator.py @@ -1,5 +1,5 @@ # orm/evaluator.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 1a54dfd49a5..f0ba693f443 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -1,5 +1,5 @@ # orm/events.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py index f30e50350ba..8ab831002ab 100644 --- a/lib/sqlalchemy/orm/exc.py +++ b/lib/sqlalchemy/orm/exc.py @@ -1,5 +1,5 @@ # orm/exc.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py index 81140a94ef5..23682f7ef22 100644 --- a/lib/sqlalchemy/orm/identity.py +++ b/lib/sqlalchemy/orm/identity.py @@ -1,5 +1,5 @@ # orm/identity.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py index b12d80ac4f7..9360e4a3435 100644 --- a/lib/sqlalchemy/orm/instrumentation.py +++ b/lib/sqlalchemy/orm/instrumentation.py @@ -1,5 +1,5 @@ # orm/instrumentation.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index fed07334fb5..3d9018257e8 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -1,5 +1,5 @@ # orm/interfaces.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index 624e8c199ab..1de71f9c71c 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -1,5 +1,5 @@ # orm/loading.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/mapped_collection.py b/lib/sqlalchemy/orm/mapped_collection.py index a75789f851d..24ac0cc1b95 100644 --- a/lib/sqlalchemy/orm/mapped_collection.py +++ b/lib/sqlalchemy/orm/mapped_collection.py @@ -1,5 +1,5 @@ # orm/mapped_collection.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index c66d876e087..9b8ddb10770 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -1,5 +1,5 @@ # orm/mapper.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py index 354552a5a40..c97afe7e613 100644 --- a/lib/sqlalchemy/orm/path_registry.py +++ b/lib/sqlalchemy/orm/path_registry.py @@ -1,5 +1,5 @@ # orm/path_registry.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 1728b4ac88c..0c2529d5d13 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -1,5 +1,5 @@ # orm/persistence.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 4bb396edc5d..6e2e73dc46f 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -1,5 +1,5 @@ # orm/properties.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 5da7ee9b228..a13e23fc192 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1,5 +1,5 @@ # orm/query.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 7e27eff4177..b19d23b277b 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -1,5 +1,5 @@ # orm/relationships.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index 4af98da0658..ff8271fafdb 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -1,5 +1,5 @@ # orm/scoping.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index 161b3201566..e80a8af6e29 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -1,5 +1,5 @@ # orm/session.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index d9e1f854d77..e628a682c6c 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -1,5 +1,5 @@ # orm/state.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/state_changes.py b/lib/sqlalchemy/orm/state_changes.py index 3d74ff2de22..56963c6af1d 100644 --- a/lib/sqlalchemy/orm/state_changes.py +++ b/lib/sqlalchemy/orm/state_changes.py @@ -1,5 +1,5 @@ # orm/state_changes.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 1e58f4091a6..d7671e07941 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -1,5 +1,5 @@ # orm/strategies.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index c62851e1b3b..7de0e31d4d3 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1,5 +1,5 @@ # orm/strategy_options.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py index dbe8fb7a251..db09a3e9027 100644 --- a/lib/sqlalchemy/orm/sync.py +++ b/lib/sqlalchemy/orm/sync.py @@ -1,5 +1,5 @@ # orm/sync.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py index 20fe022076b..7e2df2b0eff 100644 --- a/lib/sqlalchemy/orm/unitofwork.py +++ b/lib/sqlalchemy/orm/unitofwork.py @@ -1,5 +1,5 @@ # orm/unitofwork.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index ea2f1a12e93..a6186df7f28 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1,5 +1,5 @@ # orm/util.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/writeonly.py b/lib/sqlalchemy/orm/writeonly.py index 416a0399f93..2f54fc9a86f 100644 --- a/lib/sqlalchemy/orm/writeonly.py +++ b/lib/sqlalchemy/orm/writeonly.py @@ -1,5 +1,5 @@ # orm/writeonly.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/pool/__init__.py b/lib/sqlalchemy/pool/__init__.py index c25a8f85d87..29fd652931f 100644 --- a/lib/sqlalchemy/pool/__init__.py +++ b/lib/sqlalchemy/pool/__init__.py @@ -1,5 +1,5 @@ # pool/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index 90ad1d4764c..18e0171989e 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -1,5 +1,5 @@ # pool/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/pool/events.py b/lib/sqlalchemy/pool/events.py index 8e06fdbd2be..99d180abc99 100644 --- a/lib/sqlalchemy/pool/events.py +++ b/lib/sqlalchemy/pool/events.py @@ -1,5 +1,5 @@ # pool/events.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/pool/impl.py b/lib/sqlalchemy/pool/impl.py index ced015088cb..5bb2dd7778d 100644 --- a/lib/sqlalchemy/pool/impl.py +++ b/lib/sqlalchemy/pool/impl.py @@ -1,5 +1,5 @@ # pool/impl.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index 19782bd7cfd..9edca4e5cce 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -1,5 +1,5 @@ # schema.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py index a81509fed74..9e0d2ca2a79 100644 --- a/lib/sqlalchemy/sql/__init__.py +++ b/lib/sqlalchemy/sql/__init__.py @@ -1,5 +1,5 @@ # sql/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_dml_constructors.py b/lib/sqlalchemy/sql/_dml_constructors.py index 5c0cc6247a9..a7ead521f86 100644 --- a/lib/sqlalchemy/sql/_dml_constructors.py +++ b/lib/sqlalchemy/sql/_dml_constructors.py @@ -1,5 +1,5 @@ # sql/_dml_constructors.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index a51e4a2cf4c..9dd2a58a1b8 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -1,5 +1,5 @@ # sql/_elements_constructors.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_orm_types.py b/lib/sqlalchemy/sql/_orm_types.py index 26e289c779f..bccb533ca0e 100644 --- a/lib/sqlalchemy/sql/_orm_types.py +++ b/lib/sqlalchemy/sql/_orm_types.py @@ -1,5 +1,5 @@ # sql/_orm_types.py -# Copyright (C) 2022-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_py_util.py b/lib/sqlalchemy/sql/_py_util.py index edff0d66910..df372bf5d54 100644 --- a/lib/sqlalchemy/sql/_py_util.py +++ b/lib/sqlalchemy/sql/_py_util.py @@ -1,5 +1,5 @@ # sql/_py_util.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_selectable_constructors.py b/lib/sqlalchemy/sql/_selectable_constructors.py index 41e8b6eb164..c1e0fa9dc40 100644 --- a/lib/sqlalchemy/sql/_selectable_constructors.py +++ b/lib/sqlalchemy/sql/_selectable_constructors.py @@ -1,5 +1,5 @@ # sql/_selectable_constructors.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index f99e93fbac6..024507ec798 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -1,5 +1,5 @@ # sql/_typing.py -# Copyright (C) 2022-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/annotation.py b/lib/sqlalchemy/sql/annotation.py index 08ff47d3d64..14e48bd2b8c 100644 --- a/lib/sqlalchemy/sql/annotation.py +++ b/lib/sqlalchemy/sql/annotation.py @@ -1,5 +1,5 @@ # sql/annotation.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 104c5958a07..f8e3e73883a 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -1,5 +1,5 @@ # sql/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/cache_key.py b/lib/sqlalchemy/sql/cache_key.py index 831b90809b2..640a27de2d7 100644 --- a/lib/sqlalchemy/sql/cache_key.py +++ b/lib/sqlalchemy/sql/cache_key.py @@ -1,5 +1,5 @@ # sql/cache_key.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 3926e557a94..3d33924d894 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -1,5 +1,5 @@ # sql/coercions.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index b4b8bcfd26e..6c82bab8316 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -1,5 +1,5 @@ # sql/compiler.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py index e51403eceda..fc6f51de1cc 100644 --- a/lib/sqlalchemy/sql/crud.py +++ b/lib/sqlalchemy/sql/crud.py @@ -1,5 +1,5 @@ # sql/crud.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 06bbcae2e4b..7fcd7e9f8d8 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -1,5 +1,5 @@ # sql/ddl.py -# Copyright (C) 2009-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py index 5dbf3e3573f..939b14c5d4c 100644 --- a/lib/sqlalchemy/sql/default_comparator.py +++ b/lib/sqlalchemy/sql/default_comparator.py @@ -1,5 +1,5 @@ # sql/default_comparator.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 4ca6ed338f4..e51e0a59e9c 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -1,5 +1,5 @@ # sql/dml.py -# Copyright (C) 2009-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 531be31555e..4a65fe67294 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -1,5 +1,5 @@ # sql/elements.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/events.py b/lib/sqlalchemy/sql/events.py index 43102ecc2ae..1a6a9a6a7d0 100644 --- a/lib/sqlalchemy/sql/events.py +++ b/lib/sqlalchemy/sql/events.py @@ -1,5 +1,5 @@ # sql/events.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index b25fb50d40f..ba42445d013 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -1,5 +1,5 @@ # sql/expression.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index 5b54f46ab73..dfa6f9df5ca 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -1,5 +1,5 @@ # sql/functions.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index 7aef605ac72..a53ebae7973 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -1,5 +1,5 @@ # sql/lambdas.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/naming.py b/lib/sqlalchemy/sql/naming.py index a0daa2ca860..7213ddb297e 100644 --- a/lib/sqlalchemy/sql/naming.py +++ b/lib/sqlalchemy/sql/naming.py @@ -1,5 +1,5 @@ # sql/naming.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 1d3f2f483f6..98a0abbaa75 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -1,5 +1,5 @@ # sql/operators.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/roles.py b/lib/sqlalchemy/sql/roles.py index 6f299224328..42c561cb4b7 100644 --- a/lib/sqlalchemy/sql/roles.py +++ b/lib/sqlalchemy/sql/roles.py @@ -1,5 +1,5 @@ # sql/roles.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 556e6c81534..fcde05721cf 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -1,5 +1,5 @@ # sql/schema.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 91b939e0af5..ee33d46616a 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -1,5 +1,5 @@ # sql/selectable.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 7e866cc032d..91e382de694 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -1,5 +1,5 @@ # sql/sqltypes.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/traversals.py b/lib/sqlalchemy/sql/traversals.py index 5758dff3c43..6c44d52175e 100644 --- a/lib/sqlalchemy/sql/traversals.py +++ b/lib/sqlalchemy/sql/traversals.py @@ -1,5 +1,5 @@ # sql/traversals.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 9226b01e61a..e60f09c2394 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -1,5 +1,5 @@ # sql/type_api.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 19551831fe3..617bf56a6a4 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -1,5 +1,5 @@ # sql/util.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py index cccebe65ba8..2f06ae71a06 100644 --- a/lib/sqlalchemy/sql/visitors.py +++ b/lib/sqlalchemy/sql/visitors.py @@ -1,5 +1,5 @@ # sql/visitors.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py index b218774b0d2..d3a6f32c716 100644 --- a/lib/sqlalchemy/testing/__init__.py +++ b/lib/sqlalchemy/testing/__init__.py @@ -1,5 +1,5 @@ # testing/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py index e7b4161672c..baef79d1817 100644 --- a/lib/sqlalchemy/testing/assertions.py +++ b/lib/sqlalchemy/testing/assertions.py @@ -1,5 +1,5 @@ # testing/assertions.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py index 3865497ff4c..e061f269a85 100644 --- a/lib/sqlalchemy/testing/assertsql.py +++ b/lib/sqlalchemy/testing/assertsql.py @@ -1,5 +1,5 @@ # testing/assertsql.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/asyncio.py b/lib/sqlalchemy/testing/asyncio.py index 4236dcf92e2..17dc861c95a 100644 --- a/lib/sqlalchemy/testing/asyncio.py +++ b/lib/sqlalchemy/testing/asyncio.py @@ -1,5 +1,5 @@ # testing/asyncio.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py index 8430203dee2..ed24851df04 100644 --- a/lib/sqlalchemy/testing/config.py +++ b/lib/sqlalchemy/testing/config.py @@ -1,5 +1,5 @@ # testing/config.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index 749f9c160e8..65b182b9e07 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -1,5 +1,5 @@ # testing/engines.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py index 3c43f04613f..8f0f36bd1c4 100644 --- a/lib/sqlalchemy/testing/entities.py +++ b/lib/sqlalchemy/testing/entities.py @@ -1,5 +1,5 @@ # testing/entities.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index 09cf5b3247a..7dca583f8ec 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -1,5 +1,5 @@ # testing/exclusions.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/__init__.py b/lib/sqlalchemy/testing/fixtures/__init__.py index 932051ce8ed..5981fb583d2 100644 --- a/lib/sqlalchemy/testing/fixtures/__init__.py +++ b/lib/sqlalchemy/testing/fixtures/__init__.py @@ -1,5 +1,5 @@ # testing/fixtures/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/base.py b/lib/sqlalchemy/testing/fixtures/base.py index 199ae7134ea..0697f4902f2 100644 --- a/lib/sqlalchemy/testing/fixtures/base.py +++ b/lib/sqlalchemy/testing/fixtures/base.py @@ -1,5 +1,5 @@ # testing/fixtures/base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/mypy.py b/lib/sqlalchemy/testing/fixtures/mypy.py index 80e5ee07335..730c7bdc234 100644 --- a/lib/sqlalchemy/testing/fixtures/mypy.py +++ b/lib/sqlalchemy/testing/fixtures/mypy.py @@ -1,5 +1,5 @@ # testing/fixtures/mypy.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/orm.py b/lib/sqlalchemy/testing/fixtures/orm.py index da622c068cf..5ddd21ec64b 100644 --- a/lib/sqlalchemy/testing/fixtures/orm.py +++ b/lib/sqlalchemy/testing/fixtures/orm.py @@ -1,5 +1,5 @@ # testing/fixtures/orm.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/sql.py b/lib/sqlalchemy/testing/fixtures/sql.py index 911dddda312..1448510625d 100644 --- a/lib/sqlalchemy/testing/fixtures/sql.py +++ b/lib/sqlalchemy/testing/fixtures/sql.py @@ -1,5 +1,5 @@ # testing/fixtures/sql.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/pickleable.py b/lib/sqlalchemy/testing/pickleable.py index 89155a84190..761891ad4ac 100644 --- a/lib/sqlalchemy/testing/pickleable.py +++ b/lib/sqlalchemy/testing/pickleable.py @@ -1,5 +1,5 @@ # testing/pickleable.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/plugin/__init__.py b/lib/sqlalchemy/testing/plugin/__init__.py index 16031a9824b..0f987773195 100644 --- a/lib/sqlalchemy/testing/plugin/__init__.py +++ b/lib/sqlalchemy/testing/plugin/__init__.py @@ -1,5 +1,5 @@ # testing/plugin/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/plugin/bootstrap.py b/lib/sqlalchemy/testing/plugin/bootstrap.py index e331224b210..d0d375458ed 100644 --- a/lib/sqlalchemy/testing/plugin/bootstrap.py +++ b/lib/sqlalchemy/testing/plugin/bootstrap.py @@ -1,5 +1,5 @@ # testing/plugin/bootstrap.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py index 1f17fc595f6..11eb35cfa9b 100644 --- a/lib/sqlalchemy/testing/plugin/plugin_base.py +++ b/lib/sqlalchemy/testing/plugin/plugin_base.py @@ -1,5 +1,5 @@ # testing/plugin/plugin_base.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py index 47644e3d28b..2752813515a 100644 --- a/lib/sqlalchemy/testing/plugin/pytestplugin.py +++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py @@ -1,5 +1,5 @@ # testing/plugin/pytestplugin.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py index 5471b1cfd48..b9093c9017a 100644 --- a/lib/sqlalchemy/testing/profiling.py +++ b/lib/sqlalchemy/testing/profiling.py @@ -1,5 +1,5 @@ # testing/profiling.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index 884d558138a..a0ccc15294a 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -1,5 +1,5 @@ # testing/provision.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 4dd5176a3ee..5e95f94a2c2 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1,5 +1,5 @@ # testing/requirements.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py index 72ef9754ef5..7dfd33d4d09 100644 --- a/lib/sqlalchemy/testing/schema.py +++ b/lib/sqlalchemy/testing/schema.py @@ -1,5 +1,5 @@ # testing/schema.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/__init__.py b/lib/sqlalchemy/testing/suite/__init__.py index 08f31c6c06d..a146cb3163c 100644 --- a/lib/sqlalchemy/testing/suite/__init__.py +++ b/lib/sqlalchemy/testing/suite/__init__.py @@ -1,5 +1,5 @@ # testing/suite/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_cte.py b/lib/sqlalchemy/testing/suite/test_cte.py index f73a5a6a781..5d37880e1eb 100644 --- a/lib/sqlalchemy/testing/suite/test_cte.py +++ b/lib/sqlalchemy/testing/suite/test_cte.py @@ -1,5 +1,5 @@ # testing/suite/test_cte.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_ddl.py b/lib/sqlalchemy/testing/suite/test_ddl.py index 2256a03163e..3d9b8ec13d0 100644 --- a/lib/sqlalchemy/testing/suite/test_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_ddl.py @@ -1,5 +1,5 @@ # testing/suite/test_ddl.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_deprecations.py b/lib/sqlalchemy/testing/suite/test_deprecations.py index 793b401ba85..07970c03ecb 100644 --- a/lib/sqlalchemy/testing/suite/test_deprecations.py +++ b/lib/sqlalchemy/testing/suite/test_deprecations.py @@ -1,5 +1,5 @@ # testing/suite/test_deprecations.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_dialect.py b/lib/sqlalchemy/testing/suite/test_dialect.py index 68ae800330a..696472037d1 100644 --- a/lib/sqlalchemy/testing/suite/test_dialect.py +++ b/lib/sqlalchemy/testing/suite/test_dialect.py @@ -1,5 +1,5 @@ # testing/suite/test_dialect.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index e03d4c6430c..cc30945cab6 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -1,5 +1,5 @@ # testing/suite/test_insert.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 26839ab8777..f0d4dca1c26 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -1,5 +1,5 @@ # testing/suite/test_reflection.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py index 3e688c7cebc..b3f432fb76c 100644 --- a/lib/sqlalchemy/testing/suite/test_results.py +++ b/lib/sqlalchemy/testing/suite/test_results.py @@ -1,5 +1,5 @@ # testing/suite/test_results.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_rowcount.py b/lib/sqlalchemy/testing/suite/test_rowcount.py index 651e746d46b..c48ed355c91 100644 --- a/lib/sqlalchemy/testing/suite/test_rowcount.py +++ b/lib/sqlalchemy/testing/suite/test_rowcount.py @@ -1,5 +1,5 @@ # testing/suite/test_rowcount.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index 4825c53a396..866bf09cb5d 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -1,5 +1,5 @@ # testing/suite/test_select.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_sequence.py b/lib/sqlalchemy/testing/suite/test_sequence.py index b3f63076ae4..138616f1399 100644 --- a/lib/sqlalchemy/testing/suite/test_sequence.py +++ b/lib/sqlalchemy/testing/suite/test_sequence.py @@ -1,5 +1,5 @@ # testing/suite/test_sequence.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index c9a5d6c2601..4a7c1f199e1 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -1,5 +1,5 @@ # testing/suite/test_types.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_unicode_ddl.py b/lib/sqlalchemy/testing/suite/test_unicode_ddl.py index cd7f6309bd4..1f15ab5647f 100644 --- a/lib/sqlalchemy/testing/suite/test_unicode_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_unicode_ddl.py @@ -1,5 +1,5 @@ # testing/suite/test_unicode_ddl.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_update_delete.py b/lib/sqlalchemy/testing/suite/test_update_delete.py index 17238a0205f..a46d8fad87e 100644 --- a/lib/sqlalchemy/testing/suite/test_update_delete.py +++ b/lib/sqlalchemy/testing/suite/test_update_delete.py @@ -1,5 +1,5 @@ # testing/suite/test_update_delete.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py index cf24b43a969..a6ce6ca3cc2 100644 --- a/lib/sqlalchemy/testing/util.py +++ b/lib/sqlalchemy/testing/util.py @@ -1,5 +1,5 @@ # testing/util.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py index 6a2ac08e39e..baef037f73e 100644 --- a/lib/sqlalchemy/testing/warnings.py +++ b/lib/sqlalchemy/testing/warnings.py @@ -1,5 +1,5 @@ # testing/warnings.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py index dfe6d2edb7c..a5bb56cf661 100644 --- a/lib/sqlalchemy/types.py +++ b/lib/sqlalchemy/types.py @@ -1,5 +1,5 @@ # types.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index 31630755fc8..69424e7ccb6 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -1,5 +1,5 @@ # util/__init__.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index 1e602165c80..615577b742a 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -1,5 +1,5 @@ # util/_collections.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/_concurrency_py3k.py b/lib/sqlalchemy/util/_concurrency_py3k.py index 47da59779fb..82a349409b0 100644 --- a/lib/sqlalchemy/util/_concurrency_py3k.py +++ b/lib/sqlalchemy/util/_concurrency_py3k.py @@ -1,5 +1,5 @@ # util/_concurrency_py3k.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/_has_cy.py b/lib/sqlalchemy/util/_has_cy.py index 37e0c4e891c..7713e236aca 100644 --- a/lib/sqlalchemy/util/_has_cy.py +++ b/lib/sqlalchemy/util/_has_cy.py @@ -1,5 +1,5 @@ # util/_has_cy.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/_py_collections.py b/lib/sqlalchemy/util/_py_collections.py index 7dba5092bcf..745620d92a2 100644 --- a/lib/sqlalchemy/util/_py_collections.py +++ b/lib/sqlalchemy/util/_py_collections.py @@ -1,5 +1,5 @@ # util/_py_collections.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index 73cdafea5db..e1b5e661433 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -1,5 +1,5 @@ # util/compat.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/concurrency.py b/lib/sqlalchemy/util/concurrency.py index 1141cbc165a..65a62052125 100644 --- a/lib/sqlalchemy/util/concurrency.py +++ b/lib/sqlalchemy/util/concurrency.py @@ -1,5 +1,5 @@ # util/concurrency.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py index 26d9924898b..3034715b5e6 100644 --- a/lib/sqlalchemy/util/deprecations.py +++ b/lib/sqlalchemy/util/deprecations.py @@ -1,5 +1,5 @@ # util/deprecations.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 9c56487c400..0a8d87b1880 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -1,5 +1,5 @@ # util/langhelpers.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/preloaded.py b/lib/sqlalchemy/util/preloaded.py index c5b4a0fabb8..e91ce685450 100644 --- a/lib/sqlalchemy/util/preloaded.py +++ b/lib/sqlalchemy/util/preloaded.py @@ -1,5 +1,5 @@ # util/preloaded.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py index b641c910c71..08ee9ead8e1 100644 --- a/lib/sqlalchemy/util/queue.py +++ b/lib/sqlalchemy/util/queue.py @@ -1,5 +1,5 @@ # util/queue.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/tool_support.py b/lib/sqlalchemy/util/tool_support.py index 4a9f9473de5..a203a2ab75a 100644 --- a/lib/sqlalchemy/util/tool_support.py +++ b/lib/sqlalchemy/util/tool_support.py @@ -1,5 +1,5 @@ # util/tool_support.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py index 8c6a663f602..aebbb436cec 100644 --- a/lib/sqlalchemy/util/topological.py +++ b/lib/sqlalchemy/util/topological.py @@ -1,5 +1,5 @@ # util/topological.py -# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index faf71c89a29..0a10e9e8b09 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -1,5 +1,5 @@ # util/typing.py -# Copyright (C) 2022-2023 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under From c65e4f4471cd10051476caaadcc92d7a7eb557b4 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 1 Jan 2024 16:54:58 -0500 Subject: [PATCH 055/544] ensure correct lock type propagated in pool recreate Fixed critical issue in asyncio version of the connection pool where calling :meth:`_asyncio.AsyncEngine.dispose` would produce a new connection pool that did not fully re-establish the use of asyncio-compatible mutexes, leading to the use of a plain ``threading.Lock()`` which would then cause deadlocks in an asyncio context when using concurrency features like ``asyncio.gather()``. Fixes: #10813 Change-Id: I95ec698b6a1ba79555aa0b28e6bce65fedf3b1fe (cherry picked from commit 2ed32bbf891b8f7e6c151071b4711319d9aa84f0) --- doc/build/changelog/unreleased_14/10813.rst | 11 +++++++++++ lib/sqlalchemy/event/attr.py | 22 ++++++++++++++++++--- test/ext/asyncio/test_engine_py3k.py | 20 +++++++++++++++++++ 3 files changed, 50 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_14/10813.rst diff --git a/doc/build/changelog/unreleased_14/10813.rst b/doc/build/changelog/unreleased_14/10813.rst new file mode 100644 index 00000000000..d4f72d8e0b2 --- /dev/null +++ b/doc/build/changelog/unreleased_14/10813.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, asyncio + :tickets: 10813 + :versions: 1.4.51, 2.0.25 + + Fixed critical issue in asyncio version of the connection pool where + calling :meth:`_asyncio.AsyncEngine.dispose` would produce a new connection + pool that did not fully re-establish the use of asyncio-compatible mutexes, + leading to the use of a plain ``threading.Lock()`` which would then cause + deadlocks in an asyncio context when using concurrency features like + ``asyncio.gather()``. diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py index 0aa34198305..1d5dff749cc 100644 --- a/lib/sqlalchemy/event/attr.py +++ b/lib/sqlalchemy/event/attr.py @@ -404,7 +404,12 @@ def __exit__( class _CompoundListener(_InstanceLevelDispatch[_ET]): - __slots__ = "_exec_once_mutex", "_exec_once", "_exec_w_sync_once" + __slots__ = ( + "_exec_once_mutex", + "_exec_once", + "_exec_w_sync_once", + "_is_asyncio", + ) _exec_once_mutex: _MutexProtocol parent_listeners: Collection[_ListenerFnType] @@ -412,11 +417,18 @@ class _CompoundListener(_InstanceLevelDispatch[_ET]): _exec_once: bool _exec_w_sync_once: bool + def __init__(self, *arg: Any, **kw: Any): + super().__init__(*arg, **kw) + self._is_asyncio = False + def _set_asyncio(self) -> None: - self._exec_once_mutex = AsyncAdaptedLock() + self._is_asyncio = True def _memoized_attr__exec_once_mutex(self) -> _MutexProtocol: - return threading.Lock() + if self._is_asyncio: + return AsyncAdaptedLock() + else: + return threading.Lock() def _exec_once_impl( self, retry_on_exception: bool, *args: Any, **kw: Any @@ -525,6 +537,7 @@ class _ListenerCollection(_CompoundListener[_ET]): propagate: Set[_ListenerFnType] def __init__(self, parent: _ClsLevelDispatch[_ET], target_cls: Type[_ET]): + super().__init__() if target_cls not in parent._clslevel: parent.update_subclass(target_cls) self._exec_once = False @@ -564,6 +577,9 @@ def _update( existing_listeners.extend(other_listeners) + if other._is_asyncio: + self._set_asyncio() + to_associate = other.propagate.union(other_listeners) registry._stored_in_collection_multi(self, other, to_associate) diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index 7289d5494eb..8c816b1a32b 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -1396,3 +1396,23 @@ def test_regen_trans_but_not_conn(self, connection_no_trans): async_t2 = async_conn.get_transaction() is_(async_t1, async_t2) + + +class PoolRegenTest(EngineFixture): + @testing.requires.queue_pool + @async_test + @testing.variation("do_dispose", [True, False]) + async def test_gather_after_dispose(self, testing_engine, do_dispose): + engine = testing_engine( + asyncio=True, options=dict(pool_size=10, max_overflow=10) + ) + + async def thing(engine): + async with engine.connect() as conn: + await conn.exec_driver_sql("select 1") + + if do_dispose: + await engine.dispose() + + tasks = [thing(engine) for _ in range(10)] + await asyncio.gather(*tasks) From edf683ab7dfa29d6eb8e4a7b153b310bdc94ef2d Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Fri, 24 Sep 2021 17:48:09 -0400 Subject: [PATCH 056/544] add new notes on viewonly section Updated join_conditions documentation to explain the limits of mutation tracking on advanced relationships and illustrate potential ways to remedy the situation. Instead of simply writing a note, the (functional) code from the original issue was turned into a tutorial that explains the various approaches. Fixes: #4201 Change-Id: Id8bd163777688efd799d9b41f1c9edfce2f4dfad (cherry picked from commit 6f08bb70c6908061636ab01c3b579812cbd9f06c) --- doc/build/glossary.rst | 13 ++ doc/build/orm/join_conditions.rst | 253 +++++++++++++++++++++++- lib/sqlalchemy/orm/_orm_constructors.py | 13 +- 3 files changed, 267 insertions(+), 12 deletions(-) diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst index c3e49cacf61..d6aaba83826 100644 --- a/doc/build/glossary.rst +++ b/doc/build/glossary.rst @@ -811,6 +811,19 @@ Glossary :ref:`session_basics` + flush + flushing + flushed + + This refers to the actual process used by the :term:`unit of work` + to emit changes to a database. In SQLAlchemy this process occurs + via the :class:`_orm.Session` object and is usually automatic, but + can also be controlled manually. + + .. seealso:: + + :ref:`session_flushing` + expire expired expires diff --git a/doc/build/orm/join_conditions.rst b/doc/build/orm/join_conditions.rst index 61f5e451210..a4a905c74cc 100644 --- a/doc/build/orm/join_conditions.rst +++ b/doc/build/orm/join_conditions.rst @@ -752,10 +752,17 @@ there's just "one" table on both the "left" and the "right" side; the complexity is kept within the middle. .. warning:: A relationship like the above is typically marked as - ``viewonly=True`` and should be considered as read-only. While there are + ``viewonly=True``, using :paramref:`_orm.relationship.viewonly`, + and should be considered as read-only. While there are sometimes ways to make relationships like the above writable, this is generally complicated and error prone. +.. seealso:: + + :ref:`relationship_viewonly_notes` + + + .. _relationship_non_primary_mapper: .. _relationship_aliased_class: @@ -1053,3 +1060,247 @@ of special Python attributes. .. seealso:: :ref:`mapper_hybrids` + +.. _relationship_viewonly_notes: + +Notes on using the viewonly relationship parameter +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The :paramref:`_orm.relationship.viewonly` parameter when applied to a +:func:`_orm.relationship` construct indicates that this :func:`_orm.relationship` +will not take part in any ORM :term:`unit of work` operations, and additionally +that the attribute does not expect to participate within in-Python mutations +of its represented collection. This means +that while the viewonly relationship may refer to a mutable Python collection +like a list or set, making changes to that list or set as present on a +mapped instance will have **no effect** on the ORM flush process. + +To explore this scenario consider this mapping:: + + from __future__ import annotations + + import datetime + + from sqlalchemy import and_ + from sqlalchemy import ForeignKey + from sqlalchemy import func + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + from sqlalchemy.orm import relationship + + + class Base(DeclarativeBase): + pass + + + class User(Base): + __tablename__ = "user_account" + + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str | None] + + all_tasks: Mapped[list[Task]] = relationship() + + current_week_tasks: Mapped[list[Task]] = relationship( + primaryjoin=lambda: and_( + User.id == Task.user_account_id, + # this expression works on PostgreSQL but may not be supported + # by other database engines + Task.task_date >= func.now() - datetime.timedelta(days=7), + ), + viewonly=True, + ) + + + class Task(Base): + __tablename__ = "task" + + id: Mapped[int] = mapped_column(primary_key=True) + user_account_id: Mapped[int] = mapped_column(ForeignKey("user_account.id")) + description: Mapped[str | None] + task_date: Mapped[datetime.datetime] = mapped_column(server_default=func.now()) + + user: Mapped[User] = relationship(back_populates="current_week_tasks") + +The following sections will note different aspects of this configuration. + +In-Python mutations including backrefs are not appropriate with viewonly=True +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The above mapping targets the ``User.current_week_tasks`` viewonly relationship +as the :term:`backref` target of the ``Task.user`` attribute. This is not +currently flagged by SQLAlchemy's ORM configuration process, however is a +configuration error. Changing the ``.user`` attribute on a ``Task`` will not +affect the ``.current_week_tasks`` attribute:: + + >>> u1 = User() + >>> t1 = Task(task_date=datetime.datetime.now()) + >>> t1.user = u1 + >>> u1.current_week_tasks + [] + +There is another parameter called :paramref:`_orm.relationship.sync_backrefs` +which can be turned on here to allow ``.current_week_tasks`` to be mutated in this +case, however this is not considered to be a best practice with a viewonly +relationship, which instead should not be relied upon for in-Python mutations. + +In this mapping, backrefs can be configured between ``User.all_tasks`` and +``Task.user``, as these are both not viewonly and will synchronize normally. + +Beyond the issue of backref mutations being disabled for viewonly relationships, +plain changes to the ``User.all_tasks`` collection in Python +are also not reflected in the ``User.current_week_tasks`` collection until +changes have been flushed to the database. + +Overall, for a use case where a custom collection should respond immediately to +in-Python mutations, the viewonly relationship is generally not appropriate. A +better approach is to use the :ref:`hybrids_toplevel` feature of SQLAlchemy, or +for instance-only cases to use a Python ``@property``, where a user-defined +collection that is generated in terms of the current Python instance can be +implemented. To change our example to work this way, we repair the +:paramref:`_orm.relationship.back_populates` parameter on ``Task.user`` to +reference ``User.all_tasks``, and +then illustrate a simple ``@property`` that will deliver results in terms of +the immediate ``User.all_tasks`` collection:: + + class User(Base): + __tablename__ = "user_account" + + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str | None] + + all_tasks: Mapped[list[Task]] = relationship(back_populates="user") + + @property + def current_week_tasks(self) -> list[Task]: + past_seven_days = datetime.datetime.now() - datetime.timedelta(days=7) + return [t for t in self.all_tasks if t.task_date >= past_seven_days] + + + class Task(Base): + __tablename__ = "task" + + id: Mapped[int] = mapped_column(primary_key=True) + user_account_id: Mapped[int] = mapped_column(ForeignKey("user_account.id")) + description: Mapped[str | None] + task_date: Mapped[datetime.datetime] = mapped_column(server_default=func.now()) + + user: Mapped[User] = relationship(back_populates="all_tasks") + +Using an in-Python collection calculated on the fly each time, we are guaranteed +to have the correct answer at all times, without the need to use a database +at all:: + + >>> u1 = User() + >>> t1 = Task(task_date=datetime.datetime.now()) + >>> t1.user = u1 + >>> u1.current_week_tasks + [<__main__.Task object at 0x7f3d699523c0>] + + +viewonly=True collections / attributes do not get re-queried until expired +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Continuing with the original viewonly attribute, if we do in fact make changes +to the ``User.all_tasks`` collection on a :term:`persistent` object, the +viewonly collection can only show the net result of this change after **two** +things occur. The first is that the change to ``User.all_tasks`` is +:term:`flushed`, so that the new data is available in the database, at least +within the scope of the local transaction. The second is that the ``User.current_week_tasks`` +attribute is :term:`expired` and reloaded via a new SQL query to the database. + +To support this requirement, the simplest flow to use is one where the +**viewonly relationship is consumed only in operations that are primarily read +only to start with**. Such as below, if we retrieve a ``User`` fresh from +the database, the collection will be current:: + + >>> with Session(e) as sess: + ... u1 = sess.scalar(select(User).where(User.id == 1)) + ... print(u1.current_week_tasks) + [<__main__.Task object at 0x7f8711b906b0>] + + +When we make modifications to ``u1.all_tasks``, if we want to see these changes +reflected in the ``u1.current_week_tasks`` viewonly relationship, these changes need to be flushed +and the ``u1.current_week_tasks`` attribute needs to be expired, so that +it will :term:`lazy load` on next access. The simplest approach to this is +to use :meth:`_orm.Session.commit`, keeping the :paramref:`_orm.Session.expire_on_commit` +parameter set at its default of ``True``:: + + >>> with Session(e) as sess: + ... u1 = sess.scalar(select(User).where(User.id == 1)) + ... u1.all_tasks.append(Task(task_date=datetime.datetime.now())) + ... sess.commit() + ... print(u1.current_week_tasks) + [<__main__.Task object at 0x7f8711b90ec0>, <__main__.Task object at 0x7f8711b90a10>] + +Above, the call to :meth:`_orm.Session.commit` flushed the changes to ``u1.all_tasks`` +to the database, then expired all objects, so that when we accessed ``u1.current_week_tasks``, +a :term:` lazy load` occurred which fetched the contents for this attribute +freshly from the database. + +To intercept operations without actually committing the transaction, +the attribute needs to be explicitly :term:`expired` +first. A simplistic way to do this is to just call it directly. In +the example below, :meth:`_orm.Session.flush` sends pending changes to the +database, then :meth:`_orm.Session.expire` is used to expire the ``u1.current_week_tasks`` +collection so that it re-fetches on next access:: + + >>> with Session(e) as sess: + ... u1 = sess.scalar(select(User).where(User.id == 1)) + ... u1.all_tasks.append(Task(task_date=datetime.datetime.now())) + ... sess.flush() + ... sess.expire(u1, ["current_week_tasks"]) + ... print(u1.current_week_tasks) + [<__main__.Task object at 0x7fd95a4c8c50>, <__main__.Task object at 0x7fd95a4c8c80>] + +We can in fact skip the call to :meth:`_orm.Session.flush`, assuming a +:class:`_orm.Session` that keeps :paramref:`_orm.Session.autoflush` at its +default value of ``True``, as the expired ``current_week_tasks`` attribute will +trigger autoflush when accessed after expiration:: + + >>> with Session(e) as sess: + ... u1 = sess.scalar(select(User).where(User.id == 1)) + ... u1.all_tasks.append(Task(task_date=datetime.datetime.now())) + ... sess.expire(u1, ["current_week_tasks"]) + ... print(u1.current_week_tasks) # triggers autoflush before querying + [<__main__.Task object at 0x7fd95a4c8c50>, <__main__.Task object at 0x7fd95a4c8c80>] + +Continuing with the above approach to something more elaborate, we can apply +the expiration programmatically when the related ``User.all_tasks`` collection +changes, using :ref:`event hooks `. This an **advanced +technique**, where simpler architectures like ``@property`` or sticking to +read-only use cases should be examined first. In our simple example, this +would be configured as:: + + from sqlalchemy import event, inspect + + + @event.listens_for(User.all_tasks, "append") + @event.listens_for(User.all_tasks, "remove") + @event.listens_for(User.all_tasks, "bulk_replace") + def _expire_User_current_week_tasks(target, value, initiator): + inspect(target).session.expire(target, ["current_week_tasks"]) + +With the above hooks, mutation operations are intercepted and result in +the ``User.current_week_tasks`` collection to be expired automatically:: + + >>> with Session(e) as sess: + ... u1 = sess.scalar(select(User).where(User.id == 1)) + ... u1.all_tasks.append(Task(task_date=datetime.datetime.now())) + ... print(u1.current_week_tasks) + [<__main__.Task object at 0x7f66d093ccb0>, <__main__.Task object at 0x7f66d093cce0>] + +The :class:`_orm.AttributeEvents` event hooks used above are also triggered +by backref mutations, so with the above hooks a change to ``Task.user`` is +also intercepted:: + + >>> with Session(e) as sess: + ... u1 = sess.scalar(select(User).where(User.id == 1)) + ... t1 = Task(task_date=datetime.datetime.now()) + ... t1.user = u1 + ... sess.add(t1) + ... print(u1.current_week_tasks) + [<__main__.Task object at 0x7f3b0c070d10>, <__main__.Task object at 0x7f3b0c057d10>] + diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index 53577b4da3e..a541c1fdf1e 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -1688,19 +1688,10 @@ class that will be synchronized with this one. It is usually the full set of related objects, to prevent modifications of the collection from resulting in persistence operations. - When using the :paramref:`_orm.relationship.viewonly` flag in - conjunction with backrefs, the originating relationship for a - particular state change will not produce state changes within the - viewonly relationship. This is the behavior implied by - :paramref:`_orm.relationship.sync_backref` being set to False. - - .. versionchanged:: 1.3.17 - the - :paramref:`_orm.relationship.sync_backref` flag is set to False - when using viewonly in conjunction with backrefs. - .. seealso:: - :paramref:`_orm.relationship.sync_backref` + :ref:`relationship_viewonly_notes` - more details on best practices + when using :paramref:`_orm.relationship.viewonly`. :param sync_backref: A boolean that enables the events used to synchronize the in-Python From 2ce6010c924e963dcd9245b99e583611be5fb061 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 11:36:20 -0500 Subject: [PATCH 057/544] force uselist=False for all collection class not present Fixed issue where ORM Annotated Declarative would mis-interpret the left hand side of a relationship without any collection specified as uselist=True if the left type were given as a class and not a string, without using future-style annotations. Fixes: #10815 Change-Id: I85daccec03f7e6ea3b49eb07c06e0f85e361a1c0 (cherry picked from commit c1139c2e5d2f14738798d3c0deb876286014c808) --- doc/build/changelog/unreleased_20/10815.rst | 8 ++++++ lib/sqlalchemy/orm/relationships.py | 15 ++++------ .../test_tm_future_annotations_sync.py | 28 ++++++++++++++++++- test/orm/declarative/test_typed_mapping.py | 28 ++++++++++++++++++- 4 files changed, 68 insertions(+), 11 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10815.rst diff --git a/doc/build/changelog/unreleased_20/10815.rst b/doc/build/changelog/unreleased_20/10815.rst new file mode 100644 index 00000000000..2240764aebc --- /dev/null +++ b/doc/build/changelog/unreleased_20/10815.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm + :tickets: 10815 + + Fixed issue where ORM Annotated Declarative would mis-interpret the left + hand side of a relationship without any collection specified as + uselist=True if the left type were given as a class and not a string, + without using future-style annotations. diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index b19d23b277b..bcdd79cb75a 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -1814,15 +1814,12 @@ def declarative_scan( argument, originating_module ) - # we don't allow the collection class to be a - # __forward_arg__ right now, so if we see a forward arg here, - # we know there was no collection class either - if ( - self.collection_class is None - and not is_write_only - and not is_dynamic - ): - self.uselist = False + if ( + self.collection_class is None + and not is_write_only + and not is_dynamic + ): + self.uselist = False # ticket #8759 # if a lead argument was given to relationship(), like diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index e64834b39d7..e2a442767ae 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -2750,7 +2750,7 @@ class B(decl_base): is_false(B.__mapper__.attrs["a"].uselist) is_false(B.__mapper__.attrs["a_warg"].uselist) - def test_one_to_one_example(self, decl_base: Type[DeclarativeBase]): + def test_one_to_one_example_quoted(self, decl_base: Type[DeclarativeBase]): """test example in the relationship docs will derive uselist=False correctly""" @@ -2774,6 +2774,32 @@ class Child(decl_base): is_(p1.child, c1) is_(c1.parent, p1) + def test_one_to_one_example_non_quoted( + self, decl_base: Type[DeclarativeBase] + ): + """test example in the relationship docs will derive uselist=False + correctly""" + + class Child(decl_base): + __tablename__ = "child" + + id: Mapped[int] = mapped_column(primary_key=True) + parent_id: Mapped[int] = mapped_column(ForeignKey("parent.id")) + parent: Mapped["Parent"] = relationship(back_populates="child") + + class Parent(decl_base): + __tablename__ = "parent" + + id: Mapped[int] = mapped_column(primary_key=True) + child: Mapped[Child] = relationship( # noqa: F821 + back_populates="parent" + ) + + c1 = Child() + p1 = Parent(child=c1) + is_(p1.child, c1) + is_(c1.parent, p1) + def test_collection_class_dict_no_collection(self, decl_base): class A(decl_base): __tablename__ = "a" diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 44327324cab..0f1bb452d52 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -2741,7 +2741,7 @@ class B(decl_base): is_false(B.__mapper__.attrs["a"].uselist) is_false(B.__mapper__.attrs["a_warg"].uselist) - def test_one_to_one_example(self, decl_base: Type[DeclarativeBase]): + def test_one_to_one_example_quoted(self, decl_base: Type[DeclarativeBase]): """test example in the relationship docs will derive uselist=False correctly""" @@ -2765,6 +2765,32 @@ class Child(decl_base): is_(p1.child, c1) is_(c1.parent, p1) + def test_one_to_one_example_non_quoted( + self, decl_base: Type[DeclarativeBase] + ): + """test example in the relationship docs will derive uselist=False + correctly""" + + class Child(decl_base): + __tablename__ = "child" + + id: Mapped[int] = mapped_column(primary_key=True) + parent_id: Mapped[int] = mapped_column(ForeignKey("parent.id")) + parent: Mapped["Parent"] = relationship(back_populates="child") + + class Parent(decl_base): + __tablename__ = "parent" + + id: Mapped[int] = mapped_column(primary_key=True) + child: Mapped[Child] = relationship( # noqa: F821 + back_populates="parent" + ) + + c1 = Child() + p1 = Parent(child=c1) + is_(p1.child, c1) + is_(c1.parent, p1) + def test_collection_class_dict_no_collection(self, decl_base): class A(decl_base): __tablename__ = "a" From 35151512c5966657fa9dfcea2f9db7940be839f9 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 21 Dec 2023 23:41:56 +0100 Subject: [PATCH 058/544] Add oracledb_async driver support Added support for :ref:`oracledb` in async mode. The current implementation has some limitation, preventing the support for :meth:`_asyncio.AsyncConnection.stream`. Improved support if planned for the 2.1 release of SQLAlchemy. Fixes: #10679 Change-Id: Iff123cf6241bcfa0fbac57529b80f933951be0a7 (cherry picked from commit dca7673fb6c0fd8292ce26676ec479527b52015a) --- doc/build/changelog/unreleased_20/10679.rst | 8 + lib/sqlalchemy/connectors/aioodbc.py | 13 -- lib/sqlalchemy/connectors/asyncio.py | 9 +- lib/sqlalchemy/dialects/oracle/__init__.py | 6 +- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 2 + lib/sqlalchemy/dialects/oracle/oracledb.py | 205 +++++++++++++++++- lib/sqlalchemy/dialects/postgresql/psycopg.py | 3 + lib/sqlalchemy/ext/asyncio/engine.py | 5 + lib/sqlalchemy/testing/provision.py | 5 +- setup.cfg | 1 + test/dialect/oracle/test_dialect.py | 23 +- test/dialect/oracle/test_types.py | 33 +-- test/ext/asyncio/test_engine_py3k.py | 56 +++-- test/ext/asyncio/test_session_py3k.py | 21 +- test/sql/test_operators.py | 2 +- tox.ini | 2 +- 16 files changed, 332 insertions(+), 62 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10679.rst diff --git a/doc/build/changelog/unreleased_20/10679.rst b/doc/build/changelog/unreleased_20/10679.rst new file mode 100644 index 00000000000..485a87ea75d --- /dev/null +++ b/doc/build/changelog/unreleased_20/10679.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: oracle, asyncio + :tickets: 10679 + + Added support for :ref:`oracledb` in async mode. + The current implementation has some limitation, preventing + the support for :meth:`_asyncio.AsyncConnection.stream`. + Improved support if planned for the 2.1 release of SQLAlchemy. diff --git a/lib/sqlalchemy/connectors/aioodbc.py b/lib/sqlalchemy/connectors/aioodbc.py index 9b861639a49..3b5c3b4978e 100644 --- a/lib/sqlalchemy/connectors/aioodbc.py +++ b/lib/sqlalchemy/connectors/aioodbc.py @@ -170,18 +170,5 @@ def get_pool_class(cls, url): else: return pool.AsyncAdaptedQueuePool - def _do_isolation_level(self, connection, autocommit, isolation_level): - connection.set_autocommit(autocommit) - connection.set_isolation_level(isolation_level) - - def _do_autocommit(self, connection, value): - connection.set_autocommit(value) - - def set_readonly(self, connection, value): - connection.set_read_only(value) - - def set_deferrable(self, connection, value): - connection.set_deferrable(value) - def get_driver_connection(self, connection): return connection._connection diff --git a/lib/sqlalchemy/connectors/asyncio.py b/lib/sqlalchemy/connectors/asyncio.py index 6ba228a6253..0b44f23a025 100644 --- a/lib/sqlalchemy/connectors/asyncio.py +++ b/lib/sqlalchemy/connectors/asyncio.py @@ -35,10 +35,13 @@ def __init__(self, adapt_connection): self.await_ = adapt_connection.await_ cursor = self._connection.cursor() + self._cursor = self._aenter_cursor(cursor) - self._cursor = self.await_(cursor.__aenter__()) self._rows = collections.deque() + def _aenter_cursor(self, cursor): + return self.await_(cursor.__aenter__()) + @property def description(self): return self._cursor.description @@ -77,10 +80,6 @@ async def _execute_async(self, operation, parameters): result = await self._cursor.execute(operation, parameters or ()) if self._cursor.description and not self.server_side: - # aioodbc has a "fake" async result, so we have to pull it out - # of that here since our default result is not async. - # we could just as easily grab "_rows" here and be done with it - # but this is safer. self._rows = collections.deque(await self._cursor.fetchall()) return result diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py index e2c8d327a06..d855122ee0c 100644 --- a/lib/sqlalchemy/dialects/oracle/__init__.py +++ b/lib/sqlalchemy/dialects/oracle/__init__.py @@ -5,7 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors - +from types import ModuleType from . import base # noqa from . import cx_oracle # noqa @@ -33,6 +33,10 @@ from .base import VARCHAR from .base import VARCHAR2 +# Alias oracledb also as oracledb_async +oracledb_async = type( + "oracledb_async", (ModuleType,), {"dialect": oracledb.dialect_async} +) base.dialect = dialect = cx_oracle.dialect diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 440ccad2bc1..e8ed3ab5cb2 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -815,6 +815,8 @@ def _generate_out_parameter_vars(self): out_parameters[name] = self.cursor.var( dbtype, + # this is fine also in oracledb_async since + # the driver will await the read coroutine outconverter=lambda value: value.read(), arraysize=len_params, ) diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index 4c6e62446c0..1229573ad1e 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -23,6 +23,31 @@ :ref:`cx_oracle` - all of cx_Oracle's notes apply to the oracledb driver as well. +The SQLAlchemy ``oracledb`` dialect provides both a sync and an async +implementation under the same dialect name. The proper version is +selected depending on how the engine is created: + +* calling :func:`_sa.create_engine` with ``oracle+oracledb://...`` will + automatically select the sync version, e.g.:: + + from sqlalchemy import create_engine + sync_engine = create_engine("oracle+oracledb://scott:tiger@localhost/?service_name=XEPDB1") + +* calling :func:`_asyncio.create_async_engine` with + ``oracle+oracledb://...`` will automatically select the async version, + e.g.:: + + from sqlalchemy.ext.asyncio import create_async_engine + asyncio_engine = create_async_engine("oracle+oracledb://scott:tiger@localhost/?service_name=XEPDB1") + +The asyncio version of the dialect may also be specified explicitly using the +``oracledb_async`` suffix, as:: + + from sqlalchemy.ext.asyncio import create_async_engine + asyncio_engine = create_async_engine("oracle+oracledb_async://scott:tiger@localhost/?service_name=XEPDB1") + +.. versionadded:: 2.0.25 added support for the async version of oracledb. + Thick mode support ------------------ @@ -49,15 +74,32 @@ .. versionadded:: 2.0.0 added support for oracledb driver. """ # noqa +from __future__ import annotations + +import collections import re +from typing import Any +from typing import TYPE_CHECKING from .cx_oracle import OracleDialect_cx_oracle as _OracleDialect_cx_oracle from ... import exc +from ... import pool +from ...connectors.asyncio import AsyncAdapt_dbapi_connection +from ...connectors.asyncio import AsyncAdapt_dbapi_cursor +from ...connectors.asyncio import AsyncAdaptFallback_dbapi_connection +from ...util import asbool +from ...util import await_fallback +from ...util import await_only + +if TYPE_CHECKING: + from oracledb import AsyncConnection + from oracledb import AsyncCursor class OracleDialect_oracledb(_OracleDialect_cx_oracle): supports_statement_cache = True driver = "oracledb" + _min_version = (1,) def __init__( self, @@ -92,6 +134,10 @@ def import_dbapi(cls): def is_thin_mode(cls, connection): return connection.connection.dbapi_connection.thin + @classmethod + def get_async_dialect_cls(cls, url): + return OracleDialectAsync_oracledb + def _load_version(self, dbapi_module): version = (0, 0, 0) if dbapi_module is not None: @@ -101,10 +147,165 @@ def _load_version(self, dbapi_module): int(x) for x in m.group(1, 2, 3) if x is not None ) self.oracledb_ver = version - if self.oracledb_ver < (1,) and self.oracledb_ver > (0, 0, 0): + if ( + self.oracledb_ver > (0, 0, 0) + and self.oracledb_ver < self._min_version + ): raise exc.InvalidRequestError( - "oracledb version 1 and above are supported" + f"oracledb version {self._min_version} and above are supported" + ) + + +class AsyncAdapt_oracledb_cursor(AsyncAdapt_dbapi_cursor): + _cursor: AsyncCursor + __slots__ = () + + @property + def outputtypehandler(self): + return self._cursor.outputtypehandler + + @outputtypehandler.setter + def outputtypehandler(self, value): + self._cursor.outputtypehandler = value + + def var(self, *args, **kwargs): + return self._cursor.var(*args, **kwargs) + + def close(self): + self._rows.clear() + self._cursor.close() + + def setinputsizes(self, *args: Any, **kwargs: Any) -> Any: + return self._cursor.setinputsizes(*args, **kwargs) + + def _aenter_cursor(self, cursor: AsyncCursor) -> AsyncCursor: + try: + return cursor.__enter__() + except Exception as error: + self._adapt_connection._handle_exception(error) + + async def _execute_async(self, operation, parameters): + # override to not use mutex, oracledb already has mutex + + if parameters is None: + result = await self._cursor.execute(operation) + else: + result = await self._cursor.execute(operation, parameters) + + if self._cursor.description and not self.server_side: + self._rows = collections.deque(await self._cursor.fetchall()) + return result + + async def _executemany_async( + self, + operation, + seq_of_parameters, + ): + # override to not use mutex, oracledb already has mutex + return await self._cursor.executemany(operation, seq_of_parameters) + + def __enter__(self): + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + self.close() + + +class AsyncAdapt_oracledb_connection(AsyncAdapt_dbapi_connection): + _connection: AsyncConnection + __slots__ = () + + thin = True + + _cursor_cls = AsyncAdapt_oracledb_cursor + _ss_cursor_cls = None + + @property + def autocommit(self): + return self._connection.autocommit + + @autocommit.setter + def autocommit(self, value): + self._connection.autocommit = value + + @property + def outputtypehandler(self): + return self._connection.outputtypehandler + + @outputtypehandler.setter + def outputtypehandler(self, value): + self._connection.outputtypehandler = value + + @property + def version(self): + return self._connection.version + + @property + def stmtcachesize(self): + return self._connection.stmtcachesize + + @stmtcachesize.setter + def stmtcachesize(self, value): + self._connection.stmtcachesize = value + + def cursor(self): + return AsyncAdapt_oracledb_cursor(self) + + +class AsyncAdaptFallback_oracledb_connection( + AsyncAdaptFallback_dbapi_connection, AsyncAdapt_oracledb_connection +): + __slots__ = () + + +class OracledbAdaptDBAPI: + def __init__(self, oracledb) -> None: + self.oracledb = oracledb + + for k, v in self.oracledb.__dict__.items(): + if k != "connect": + self.__dict__[k] = v + + def connect(self, *arg, **kw): + async_fallback = kw.pop("async_fallback", False) + creator_fn = kw.pop("async_creator_fn", self.oracledb.connect_async) + + if asbool(async_fallback): + return AsyncAdaptFallback_oracledb_connection( + self, await_fallback(creator_fn(*arg, **kw)) + ) + + else: + return AsyncAdapt_oracledb_connection( + self, await_only(creator_fn(*arg, **kw)) ) +class OracleDialectAsync_oracledb(OracleDialect_oracledb): + is_async = True + supports_statement_cache = True + + _min_version = (2,) + + # thick_mode mode is not supported by asyncio, oracledb will raise + @classmethod + def import_dbapi(cls): + import oracledb + + return OracledbAdaptDBAPI(oracledb) + + @classmethod + def get_pool_class(cls, url): + async_fallback = url.query.get("async_fallback", False) + + if asbool(async_fallback): + return pool.FallbackAsyncAdaptedQueuePool + else: + return pool.AsyncAdaptedQueuePool + + def get_driver_connection(self, connection): + return connection._connection + + dialect = OracleDialect_oracledb +dialect_async = OracleDialectAsync_oracledb diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index 6ab6097985a..df3d50e4867 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -79,6 +79,8 @@ if TYPE_CHECKING: from typing import Iterable + from psycopg import AsyncConnection + logger = logging.getLogger("sqlalchemy.dialects.postgresql") @@ -619,6 +621,7 @@ def __iter__(self): class AsyncAdapt_psycopg_connection(AdaptedConnection): + _connection: AsyncConnection __slots__ = () await_ = staticmethod(await_only) diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 5c4ec8cd050..02b70ecd583 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -573,6 +573,11 @@ async def stream( :meth:`.AsyncConnection.stream_scalars` """ + if not self.dialect.supports_server_side_cursors: + raise exc.InvalidRequestError( + "Cant use `stream` or `stream_scalars` with the current " + "dialect since it does not support server side cursors." + ) result = await greenlet_spawn( self._proxied.execute, diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index a0ccc15294a..e50c6eb5d5d 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -147,7 +147,10 @@ def generate_db_urls(db_urls, extra_drivers): ] for url_obj, dialect in urls_plus_dialects: - backend_to_driver_we_already_have[dialect.name].add(dialect.driver) + # use get_driver_name instead of dialect.driver to account for + # "_async" virtual drivers like oracledb and psycopg + driver_name = url_obj.get_driver_name() + backend_to_driver_we_already_have[dialect.name].add(driver_name) backend_to_driver_we_need = {} diff --git a/setup.cfg b/setup.cfg index 093961626f6..45151ef4d4c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -185,4 +185,5 @@ docker_mssql = mssql+pyodbc://scott:tiger^5HHH@127.0.0.1:1433/test?driver=ODBC+D oracle = oracle+cx_oracle://scott:tiger@oracle18c/xe cxoracle = oracle+cx_oracle://scott:tiger@oracle18c/xe oracledb = oracle+oracledb://scott:tiger@oracle18c/xe +oracledb_async = oracle+oracledb_async://scott:tiger@oracle18c/xe docker_oracle = oracle+cx_oracle://scott:tiger@127.0.0.1:1521/?service_name=FREEPDB1 diff --git a/test/dialect/oracle/test_dialect.py b/test/dialect/oracle/test_dialect.py index 93cf0b74578..68ee3f71800 100644 --- a/test/dialect/oracle/test_dialect.py +++ b/test/dialect/oracle/test_dialect.py @@ -36,6 +36,7 @@ from sqlalchemy.testing import is_false from sqlalchemy.testing import is_true from sqlalchemy.testing.assertions import expect_raises_message +from sqlalchemy.testing.assertions import is_ from sqlalchemy.testing.schema import Column from sqlalchemy.testing.schema import pep435_enum from sqlalchemy.testing.schema import Table @@ -69,6 +70,8 @@ def test_minimum_version(self): class OracleDbDialectTest(fixtures.TestBase): + __only_on__ = "oracle+oracledb" + def test_oracledb_version_parse(self): dialect = oracledb.OracleDialect_oracledb() @@ -84,19 +87,36 @@ def check(version): def test_minimum_version(self): with expect_raises_message( exc.InvalidRequestError, - "oracledb version 1 and above are supported", + r"oracledb version \(1,\) and above are supported", ): oracledb.OracleDialect_oracledb(dbapi=Mock(version="0.1.5")) dialect = oracledb.OracleDialect_oracledb(dbapi=Mock(version="7.1.0")) eq_(dialect.oracledb_ver, (7, 1, 0)) + def test_get_dialect(self): + u = url.URL.create("oracle://") + d = oracledb.OracleDialect_oracledb.get_dialect_cls(u) + is_(d, oracledb.OracleDialect_oracledb) + d = oracledb.OracleDialect_oracledb.get_async_dialect_cls(u) + is_(d, oracledb.OracleDialectAsync_oracledb) + d = oracledb.OracleDialectAsync_oracledb.get_dialect_cls(u) + is_(d, oracledb.OracleDialectAsync_oracledb) + d = oracledb.OracleDialectAsync_oracledb.get_dialect_cls(u) + is_(d, oracledb.OracleDialectAsync_oracledb) + + def test_async_version(self): + e = create_engine("oracle+oracledb_async://") + is_true(isinstance(e.dialect, oracledb.OracleDialectAsync_oracledb)) + class OracledbMode(fixtures.TestBase): __backend__ = True __only_on__ = "oracle+oracledb" def _run_in_process(self, fn, fn_kw=None): + if config.db.dialect.is_async: + config.skip_test("thick mode unsupported in async mode") ctx = get_context("spawn") queue = ctx.Queue() process = ctx.Process( @@ -202,6 +222,7 @@ def get_isolation_level(connection): testing.db.dialect.get_isolation_level(dbapi_conn), "READ COMMITTED", ) + conn.close() def test_graceful_failure_isolation_level_not_available(self): engine = engines.testing_engine() diff --git a/test/dialect/oracle/test_types.py b/test/dialect/oracle/test_types.py index 82a81612e1e..d236bf2841e 100644 --- a/test/dialect/oracle/test_types.py +++ b/test/dialect/oracle/test_types.py @@ -50,6 +50,7 @@ from sqlalchemy.testing.schema import Column from sqlalchemy.testing.schema import Table from sqlalchemy.util import b +from sqlalchemy.util.concurrency import await_fallback def exec_sql(conn, sql, *args, **kwargs): @@ -998,13 +999,23 @@ def insert_data(cls, connection): for i in range(1, 11): connection.execute(binary_table.insert(), dict(id=i, data=stream)) + def _read_lob(self, engine, row): + if engine.dialect.is_async: + data = await_fallback(row._mapping["data"].read()) + bindata = await_fallback(row._mapping["bindata"].read()) + else: + data = row._mapping["data"].read() + bindata = row._mapping["bindata"].read() + return data, bindata + def test_lobs_without_convert(self): engine = testing_engine(options=dict(auto_convert_lobs=False)) t = self.tables.z_test with engine.begin() as conn: row = conn.execute(t.select().where(t.c.id == 1)).first() - eq_(row._mapping["data"].read(), "this is text 1") - eq_(row._mapping["bindata"].read(), b("this is binary 1")) + data, bindata = self._read_lob(engine, row) + eq_(data, "this is text 1") + eq_(bindata, b("this is binary 1")) def test_lobs_with_convert(self, connection): t = self.tables.z_test @@ -1028,17 +1039,13 @@ def test_lobs_without_convert_many_rows(self): results = result.fetchall() def go(): - eq_( - [ - dict( - id=row._mapping["id"], - data=row._mapping["data"].read(), - bindata=row._mapping["bindata"].read(), - ) - for row in results - ], - self.data, - ) + actual = [] + for row in results: + data, bindata = self._read_lob(engine, row) + actual.append( + dict(id=row._mapping["id"], data=data, bindata=bindata) + ) + eq_(actual, self.data) # this comes from cx_Oracle because these are raw # cx_Oracle.Variable objects diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index 8c816b1a32b..59c623127bc 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -785,6 +785,27 @@ async def async_creator(x, y, *, z=None): finally: await greenlet_spawn(conn.close) + @testing.combinations("stream", "stream_scalars", argnames="method") + @async_test + async def test_server_side_required_for_scalars( + self, async_engine, method + ): + with mock.patch.object( + async_engine.dialect, "supports_server_side_cursors", False + ): + async with async_engine.connect() as c: + with expect_raises_message( + exc.InvalidRequestError, + "Cant use `stream` or `stream_scalars` with the current " + "dialect since it does not support server side cursors.", + ): + if method == "stream": + await c.stream(select(1)) + elif method == "stream_scalars": + await c.stream_scalars(select(1)) + else: + testing.fail(method) + class AsyncCreatePoolTest(fixtures.TestBase): @config.fixture @@ -857,44 +878,44 @@ async def test_no_async_listeners_pool_event(self, async_engine): ): event.listen(async_engine, "checkout", mock.Mock()) + def select1(self, engine): + if engine.dialect.name == "oracle": + return "select 1 from dual" + else: + return "select 1" + @async_test async def test_sync_before_cursor_execute_engine(self, async_engine): canary = mock.Mock() event.listen(async_engine.sync_engine, "before_cursor_execute", canary) + s1 = self.select1(async_engine) async with async_engine.connect() as conn: sync_conn = conn.sync_connection - await conn.execute(text("select 1")) + await conn.execute(text(s1)) eq_( canary.mock_calls, - [ - mock.call( - sync_conn, mock.ANY, "select 1", mock.ANY, mock.ANY, False - ) - ], + [mock.call(sync_conn, mock.ANY, s1, mock.ANY, mock.ANY, False)], ) @async_test async def test_sync_before_cursor_execute_connection(self, async_engine): canary = mock.Mock() + s1 = self.select1(async_engine) async with async_engine.connect() as conn: sync_conn = conn.sync_connection event.listen( async_engine.sync_engine, "before_cursor_execute", canary ) - await conn.execute(text("select 1")) + await conn.execute(text(s1)) eq_( canary.mock_calls, - [ - mock.call( - sync_conn, mock.ANY, "select 1", mock.ANY, mock.ANY, False - ) - ], + [mock.call(sync_conn, mock.ANY, s1, mock.ANY, mock.ANY, False)], ) @async_test @@ -932,6 +953,9 @@ async def test_inspect_connection(self, async_engine): class AsyncResultTest(EngineFixture): + __backend__ = True + __requires__ = ("server_side_cursors", "async_dialect") + @async_test async def test_no_ss_cursor_w_execute(self, async_engine): users = self.tables.users @@ -1259,7 +1283,13 @@ def test_sync_dbapi_raises(self): def async_engine(self): engine = create_engine("sqlite:///:memory:", future=True) engine.dialect.is_async = True - return _async_engine.AsyncEngine(engine) + engine.dialect.supports_server_side_cursors = True + with mock.patch.object( + engine.dialect.execution_ctx_cls, + "create_server_side_cursor", + engine.dialect.execution_ctx_cls.create_default_cursor, + ): + yield _async_engine.AsyncEngine(engine) @async_test @combinations( diff --git a/test/ext/asyncio/test_session_py3k.py b/test/ext/asyncio/test_session_py3k.py index e38a0cc52a9..2d6ce09da3a 100644 --- a/test/ext/asyncio/test_session_py3k.py +++ b/test/ext/asyncio/test_session_py3k.py @@ -4,7 +4,6 @@ from typing import List from typing import Optional -from sqlalchemy import Column from sqlalchemy import event from sqlalchemy import exc from sqlalchemy import ForeignKey @@ -47,6 +46,7 @@ from sqlalchemy.testing.assertions import not_in from sqlalchemy.testing.entities import ComparableEntity from sqlalchemy.testing.provision import normalize_sequence +from sqlalchemy.testing.schema import Column from .test_engine_py3k import AsyncFixture as _AsyncFixture from ...orm import _fixtures @@ -314,6 +314,7 @@ async def test_stream_partitions(self, async_session, kw): @testing.combinations("statement", "execute", argnames="location") @async_test + @testing.requires.server_side_cursors async def test_no_ss_cursor_w_execute(self, async_session, location): User = self.classes.User @@ -767,7 +768,9 @@ async def go(legacy_inactive_history_style): class A: __tablename__ = "a" - id = Column(Integer, primary_key=True) + id = Column( + Integer, primary_key=True, test_needs_autoincrement=True + ) b = relationship( "B", uselist=False, @@ -779,7 +782,9 @@ class A: @registry.mapped class B: __tablename__ = "b" - id = Column(Integer, primary_key=True) + id = Column( + Integer, primary_key=True, test_needs_autoincrement=True + ) a_id = Column(ForeignKey("a.id")) async with async_engine.begin() as conn: @@ -790,14 +795,8 @@ class B: return go @testing.combinations( - ( - "legacy_style", - True, - ), - ( - "new_style", - False, - ), + ("legacy_style", True), + ("new_style", False), argnames="_legacy_inactive_history_style", id_="ia", ) diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index af51010c761..c841e364db5 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -86,7 +86,7 @@ def operate(self, op, *other, **kwargs): class DefaultColumnComparatorTest( testing.AssertsCompiledSQL, fixtures.TestBase ): - dialect = "default_enhanced" + dialect = __dialect__ = "default_enhanced" @testing.combinations((operators.desc_op, desc), (operators.asc_op, asc)) def test_scalar(self, operator, compare_to): diff --git a/tox.ini b/tox.ini index d60c30ee861..7919ef338dd 100644 --- a/tox.ini +++ b/tox.ini @@ -110,7 +110,7 @@ setenv= oracle: WORKERS={env:TOX_WORKERS:-n2 --max-worker-restart=5} oracle: ORACLE={env:TOX_ORACLE:--db oracle} - oracle: EXTRA_ORACLE_DRIVERS={env:EXTRA_ORACLE_DRIVERS:--dbdriver cx_oracle --dbdriver oracledb} + oracle: EXTRA_ORACLE_DRIVERS={env:EXTRA_ORACLE_DRIVERS:--dbdriver cx_oracle --dbdriver oracledb --dbdriver oracledb_async} sqlite: SQLITE={env:TOX_SQLITE:--db sqlite} sqlite_file: SQLITE={env:TOX_SQLITE_FILE:--db sqlite_file} From dd990d934b2432a3874653a5bafd3f6bb04d2957 Mon Sep 17 00:00:00 2001 From: Paul McMillan Date: Tue, 2 Jan 2024 11:51:48 -0800 Subject: [PATCH 059/544] Fix typo in dataclasses docs (#10809) (cherry picked from commit e1cb7496485549e6548c0ea0806011415cf6137c) --- doc/build/orm/dataclasses.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/dataclasses.rst b/doc/build/orm/dataclasses.rst index 19fabe9f835..1fa37938ec6 100644 --- a/doc/build/orm/dataclasses.rst +++ b/doc/build/orm/dataclasses.rst @@ -41,7 +41,7 @@ decorator. limited and is currently known to be supported by Pyright_ as well as Mypy_ as of **version 1.2**. Note that Mypy 1.1.1 introduced :pep:`681` support but did not correctly accommodate Python descriptors - which will lead to errors when using SQLAlhcemy's ORM mapping scheme. + which will lead to errors when using SQLAlchemy's ORM mapping scheme. .. seealso:: From fd58f058692b022d31bf99bf85b70633094e63ab Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 11:16:13 -0500 Subject: [PATCH 060/544] refactor any_ / all_ Improved compilation of :func:`_sql.any_` / :func:`_sql.all_` in the context of a negation of boolean comparison, will now render ``NOT (expr)`` rather than reversing the equality operator to not equals, allowing finer-grained control of negations for these non-typical operators. Fixes: #10817 Change-Id: If0b324b1220ad3c7f053af91e8a61c81015f312a (cherry picked from commit f3ca2350a5d0a34d86ceb934682798438f769e59) --- doc/build/changelog/unreleased_20/10817.rst | 8 ++ lib/sqlalchemy/sql/default_comparator.py | 3 +- lib/sqlalchemy/sql/elements.py | 12 ++- lib/sqlalchemy/sql/operators.py | 16 ++-- lib/sqlalchemy/sql/sqltypes.py | 24 ++---- test/sql/test_operators.py | 91 +++++++++++++++------ 6 files changed, 104 insertions(+), 50 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10817.rst diff --git a/doc/build/changelog/unreleased_20/10817.rst b/doc/build/changelog/unreleased_20/10817.rst new file mode 100644 index 00000000000..69634d06dca --- /dev/null +++ b/doc/build/changelog/unreleased_20/10817.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, sql + :tickets: 10817 + + Improved compilation of :func:`_sql.any_` / :func:`_sql.all_` in the + context of a negation of boolean comparison, will now render ``NOT (expr)`` + rather than reversing the equality operator to not equals, allowing + finer-grained control of negations for these non-typical operators. diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py index 939b14c5d4c..072acafed30 100644 --- a/lib/sqlalchemy/sql/default_comparator.py +++ b/lib/sqlalchemy/sql/default_comparator.py @@ -56,7 +56,6 @@ def _boolean_compare( negate_op: Optional[OperatorType] = None, reverse: bool = False, _python_is_types: Tuple[Type[Any], ...] = (type(None), bool), - _any_all_expr: bool = False, result_type: Optional[TypeEngine[bool]] = None, **kwargs: Any, ) -> OperatorExpression[bool]: @@ -90,7 +89,7 @@ def _boolean_compare( negate=negate_op, modifiers=kwargs, ) - elif _any_all_expr: + elif expr._is_collection_aggregate: obj = coercions.expect( roles.ConstExprRole, element=obj, operator=op, expr=expr ) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 4a65fe67294..f05b2887d72 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -801,6 +801,7 @@ class CompilerColumnElement( __slots__ = () _propagate_attrs = util.EMPTY_DICT + _is_collection_aggregate = False # SQLCoreOperations should be suiting the ExpressionElementRole @@ -1405,6 +1406,7 @@ class ColumnElement( _is_column_element = True _insert_sentinel: bool = False _omit_from_statements = False + _is_collection_aggregate = False foreign_keys: AbstractSet[ForeignKey] = frozenset() @@ -2359,6 +2361,8 @@ class TextClause( _omit_from_statements = False + _is_collection_aggregate = False + @property def _hide_froms(self) -> Iterable[FromClause]: return () @@ -2964,6 +2968,9 @@ def _construct_for_op( *(left_flattened + right_flattened), ) + if right._is_collection_aggregate: + negate = None + return BinaryExpression( left, right, op, type_=type_, negate=negate, modifiers=modifiers ) @@ -3802,6 +3809,7 @@ class CollectionAggregate(UnaryExpression[_T]): """ inherit_cache = True + _is_collection_aggregate = True @classmethod def _create_any( @@ -3843,7 +3851,7 @@ def operate(self, op, *other, **kwargs): raise exc.ArgumentError( "Only comparison operators may be used with ANY/ALL" ) - kwargs["reverse"] = kwargs["_any_all_expr"] = True + kwargs["reverse"] = True return self.comparator.operate(operators.mirror(op), *other, **kwargs) def reverse_operate(self, op, other, **kwargs): @@ -4031,7 +4039,7 @@ def _negate(self): modifiers=self.modifiers, ) else: - return super()._negate() + return self.self_group()._negate() class Slice(ColumnElement[Any]): diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 98a0abbaa75..b177e01ef1e 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -1819,10 +1819,10 @@ def any_(self) -> ColumnOperators: See the documentation for :func:`_sql.any_` for examples. .. note:: be sure to not confuse the newer - :meth:`_sql.ColumnOperators.any_` method with its older - :class:`_types.ARRAY`-specific counterpart, the - :meth:`_types.ARRAY.Comparator.any` method, which a different - calling syntax and usage pattern. + :meth:`_sql.ColumnOperators.any_` method with the **legacy** + version of this method, the :meth:`_types.ARRAY.Comparator.any` + method that's specific to :class:`_types.ARRAY`, which uses a + different calling style. """ return self.operate(any_op) @@ -1834,10 +1834,10 @@ def all_(self) -> ColumnOperators: See the documentation for :func:`_sql.all_` for examples. .. note:: be sure to not confuse the newer - :meth:`_sql.ColumnOperators.all_` method with its older - :class:`_types.ARRAY`-specific counterpart, the - :meth:`_types.ARRAY.Comparator.all` method, which a different - calling syntax and usage pattern. + :meth:`_sql.ColumnOperators.all_` method with the **legacy** + version of this method, the :meth:`_types.ARRAY.Comparator.all` + method that's specific to :class:`_types.ARRAY`, which uses a + different calling style. """ return self.operate(all_op) diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 91e382de694..0963e8ed200 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -2924,7 +2924,7 @@ def contains(self, *arg, **kw): def any(self, other, operator=None): """Return ``other operator ANY (array)`` clause. - .. note:: This method is an :class:`_types.ARRAY` - specific + .. legacy:: This method is an :class:`_types.ARRAY` - specific construct that is now superseded by the :func:`_sql.any_` function, which features a different calling style. The :func:`_sql.any_` function is also mirrored at the method level @@ -2958,9 +2958,8 @@ def any(self, other, operator=None): arr_type = self.type - # send plain BinaryExpression so that negate remains at None, - # leading to NOT expr for negation. - return elements.BinaryExpression( + return elements.CollectionAggregate._create_any(self.expr).operate( + operators.mirror(operator), coercions.expect( roles.BinaryElementRole, element=other, @@ -2968,19 +2967,17 @@ def any(self, other, operator=None): expr=self.expr, bindparam_type=arr_type.item_type, ), - elements.CollectionAggregate._create_any(self.expr), - operator, ) @util.preload_module("sqlalchemy.sql.elements") def all(self, other, operator=None): """Return ``other operator ALL (array)`` clause. - .. note:: This method is an :class:`_types.ARRAY` - specific - construct that is now superseded by the :func:`_sql.any_` + .. legacy:: This method is an :class:`_types.ARRAY` - specific + construct that is now superseded by the :func:`_sql.all_` function, which features a different calling style. The - :func:`_sql.any_` function is also mirrored at the method level - via the :meth:`_sql.ColumnOperators.any_` method. + :func:`_sql.all_` function is also mirrored at the method level + via the :meth:`_sql.ColumnOperators.all_` method. Usage of array-specific :meth:`_types.ARRAY.Comparator.all` is as follows:: @@ -3010,9 +3007,8 @@ def all(self, other, operator=None): arr_type = self.type - # send plain BinaryExpression so that negate remains at None, - # leading to NOT expr for negation. - return elements.BinaryExpression( + return elements.CollectionAggregate._create_all(self.expr).operate( + operators.mirror(operator), coercions.expect( roles.BinaryElementRole, element=other, @@ -3020,8 +3016,6 @@ def all(self, other, operator=None): expr=self.expr, bindparam_type=arr_type.item_type, ), - elements.CollectionAggregate._create_all(self.expr), - operator, ) comparator_factory = Comparator diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index af51010c761..7e61920aa29 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -4540,7 +4540,7 @@ def t_fixture(self): ) return t - @testing.combinations( + null_comparisons = testing.combinations( lambda col: any_(col) == None, lambda col: col.any_() == None, lambda col: any_(col) == null(), @@ -4551,12 +4551,23 @@ def t_fixture(self): lambda col: None == col.any_(), argnames="expr", ) + + @null_comparisons @testing.combinations("int", "array", argnames="datatype") def test_any_generic_null(self, datatype, expr, t_fixture): col = t_fixture.c.data if datatype == "int" else t_fixture.c.arrval self.assert_compile(expr(col), "NULL = ANY (tab1.%s)" % col.name) + @null_comparisons + @testing.combinations("int", "array", argnames="datatype") + def test_any_generic_null_negate(self, datatype, expr, t_fixture): + col = t_fixture.c.data if datatype == "int" else t_fixture.c.arrval + + self.assert_compile( + ~expr(col), "NOT (NULL = ANY (tab1.%s))" % col.name + ) + @testing.fixture( params=[ ("ANY", any_), @@ -4565,48 +4576,78 @@ def test_any_generic_null(self, datatype, expr, t_fixture): ("ALL", lambda x: x.all_()), ] ) - def operator(self, request): + def any_all_operators(self, request): return request.param + # test legacy array any() / all(). these are superseded by the + # any_() / all_() versions @testing.fixture( params=[ ("ANY", lambda x, *o: x.any(*o)), ("ALL", lambda x, *o: x.all(*o)), ] ) - def array_op(self, request): + def legacy_any_all_operators(self, request): return request.param - def test_array(self, t_fixture, operator): + def test_array(self, t_fixture, any_all_operators): t = t_fixture - op, fn = operator + op, fn = any_all_operators self.assert_compile( 5 == fn(t.c.arrval), f":param_1 = {op} (tab1.arrval)", checkparams={"param_1": 5}, ) - def test_comparator_array(self, t_fixture, operator): + def test_comparator_inline_negate(self, t_fixture, any_all_operators): t = t_fixture - op, fn = operator + op, fn = any_all_operators + self.assert_compile( + 5 != fn(t.c.arrval), + f":param_1 != {op} (tab1.arrval)", + checkparams={"param_1": 5}, + ) + + @testing.combinations( + (operator.eq, "="), + (operator.ne, "!="), + (operator.gt, ">"), + (operator.le, "<="), + argnames="operator,opstring", + ) + def test_comparator_outer_negate( + self, t_fixture, any_all_operators, operator, opstring + ): + """test #10817""" + t = t_fixture + op, fn = any_all_operators + self.assert_compile( + ~(operator(5, fn(t.c.arrval))), + f"NOT (:param_1 {opstring} {op} (tab1.arrval))", + checkparams={"param_1": 5}, + ) + + def test_comparator_array(self, t_fixture, any_all_operators): + t = t_fixture + op, fn = any_all_operators self.assert_compile( 5 > fn(t.c.arrval), f":param_1 > {op} (tab1.arrval)", checkparams={"param_1": 5}, ) - def test_comparator_array_wexpr(self, t_fixture, operator): + def test_comparator_array_wexpr(self, t_fixture, any_all_operators): t = t_fixture - op, fn = operator + op, fn = any_all_operators self.assert_compile( t.c.data > fn(t.c.arrval), f"tab1.data > {op} (tab1.arrval)", checkparams={}, ) - def test_illegal_ops(self, t_fixture, operator): + def test_illegal_ops(self, t_fixture, any_all_operators): t = t_fixture - op, fn = operator + op, fn = any_all_operators assert_raises_message( exc.ArgumentError, @@ -4622,10 +4663,10 @@ def test_illegal_ops(self, t_fixture, operator): t.c.data + fn(t.c.arrval), f"tab1.data + {op} (tab1.arrval)" ) - def test_bindparam_coercion(self, t_fixture, array_op): + def test_bindparam_coercion(self, t_fixture, legacy_any_all_operators): """test #7979""" t = t_fixture - op, fn = array_op + op, fn = legacy_any_all_operators expr = fn(t.c.arrval, bindparam("param")) expected = f"%(param)s = {op} (tab1.arrval)" @@ -4633,9 +4674,11 @@ def test_bindparam_coercion(self, t_fixture, array_op): self.assert_compile(expr, expected, dialect="postgresql") - def test_array_comparator_accessor(self, t_fixture, array_op): + def test_array_comparator_accessor( + self, t_fixture, legacy_any_all_operators + ): t = t_fixture - op, fn = array_op + op, fn = legacy_any_all_operators self.assert_compile( fn(t.c.arrval, 5, operator.gt), @@ -4643,9 +4686,11 @@ def test_array_comparator_accessor(self, t_fixture, array_op): checkparams={"arrval_1": 5}, ) - def test_array_comparator_negate_accessor(self, t_fixture, array_op): + def test_array_comparator_negate_accessor( + self, t_fixture, legacy_any_all_operators + ): t = t_fixture - op, fn = array_op + op, fn = legacy_any_all_operators self.assert_compile( ~fn(t.c.arrval, 5, operator.gt), @@ -4653,9 +4698,9 @@ def test_array_comparator_negate_accessor(self, t_fixture, array_op): checkparams={"arrval_1": 5}, ) - def test_array_expression(self, t_fixture, operator): + def test_array_expression(self, t_fixture, any_all_operators): t = t_fixture - op, fn = operator + op, fn = any_all_operators self.assert_compile( 5 == fn(t.c.arrval[5:6] + postgresql.array([3, 4])), @@ -4671,9 +4716,9 @@ def test_array_expression(self, t_fixture, operator): dialect="postgresql", ) - def test_subq(self, t_fixture, operator): + def test_subq(self, t_fixture, any_all_operators): t = t_fixture - op, fn = operator + op, fn = any_all_operators self.assert_compile( 5 == fn(select(t.c.data).where(t.c.data < 10).scalar_subquery()), @@ -4682,9 +4727,9 @@ def test_subq(self, t_fixture, operator): checkparams={"data_1": 10, "param_1": 5}, ) - def test_scalar_values(self, t_fixture, operator): + def test_scalar_values(self, t_fixture, any_all_operators): t = t_fixture - op, fn = operator + op, fn = any_all_operators self.assert_compile( 5 == fn(values(t.c.data).data([(1,), (42,)]).scalar_values()), From c106bc43046a2bbfd5894cba9f2789bf4c197b01 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 13:03:40 -0500 Subject: [PATCH 061/544] allow literals for function arguments * Fixed the argument types passed to functions so that literal expressions like strings and ints are again interpreted correctly (:ticket:`10818`) this includes a reformatting of the changelog message from #10801 to read as a general "fixed regressions" list. Fixes: #10818 Change-Id: I65ad86e096241863e833608d45f0bdb6069f5896 (cherry picked from commit cc26af00e7483289cb2c2fb7c03e2d0c8fb63362) --- doc/build/changelog/unreleased_20/10801.rst | 15 ++- lib/sqlalchemy/sql/functions.py | 115 +++++++++++++----- .../typing/plain_files/sql/functions_again.py | 13 ++ tools/generate_sql_functions.py | 25 +++- 4 files changed, 129 insertions(+), 39 deletions(-) diff --git a/doc/build/changelog/unreleased_20/10801.rst b/doc/build/changelog/unreleased_20/10801.rst index a35a5485d58..a485e1babba 100644 --- a/doc/build/changelog/unreleased_20/10801.rst +++ b/doc/build/changelog/unreleased_20/10801.rst @@ -1,7 +1,14 @@ .. change:: :tags: bug, typing - :tickets: 10801 + :tickets: 10801, 10818 + + Fixed regressions caused by typing added to the ``sqlalchemy.sql.functions`` + module in version 2.0.24, as part of :ticket:`6810`: + + * Further enhancements to pep-484 typing to allow SQL functions from + :attr:`_sql.func` derived elements to work more effectively with ORM-mapped + attributes (:ticket:`10801`) + + * Fixed the argument types passed to functions so that literal expressions + like strings and ints are again interpreted correctly (:ticket:`10818`) - Further enhancements to pep-484 typing to allow SQL functions from - :attr:`_sql.func` derived elements to work more effectively with ORM-mapped - attributes. diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index dfa6f9df5ca..5cb5812d692 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -999,14 +999,16 @@ def cast(self) -> Type[Cast[Any]]: def char_length(self) -> Type[char_length]: ... - # appease mypy which seems to not want to accept _T from - # _ColumnExpressionArgument, as it includes non-generic types + # set ColumnElement[_T] as a separate overload, to appease mypy + # which seems to not want to accept _T from _ColumnExpressionArgument. + # this is even if all non-generic types are removed from it, so + # reasons remain unclear for why this does not work @overload def coalesce( self, col: ColumnElement[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> coalesce[_T]: ... @@ -1015,15 +1017,24 @@ def coalesce( def coalesce( self, col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> coalesce[_T]: ... + @overload def coalesce( self, - col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ) -> coalesce[_T]: + ... + + def coalesce( + self, + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> coalesce[_T]: ... @@ -1080,14 +1091,16 @@ def localtime(self) -> Type[localtime]: def localtimestamp(self) -> Type[localtimestamp]: ... - # appease mypy which seems to not want to accept _T from - # _ColumnExpressionArgument, as it includes non-generic types + # set ColumnElement[_T] as a separate overload, to appease mypy + # which seems to not want to accept _T from _ColumnExpressionArgument. + # this is even if all non-generic types are removed from it, so + # reasons remain unclear for why this does not work @overload def max( # noqa: A001 self, col: ColumnElement[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> max[_T]: ... @@ -1096,27 +1109,38 @@ def max( # noqa: A001 def max( # noqa: A001 self, col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> max[_T]: ... + @overload def max( # noqa: A001 self, - col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ) -> max[_T]: + ... + + def max( # noqa: A001 + self, + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> max[_T]: ... - # appease mypy which seems to not want to accept _T from - # _ColumnExpressionArgument, as it includes non-generic types + # set ColumnElement[_T] as a separate overload, to appease mypy + # which seems to not want to accept _T from _ColumnExpressionArgument. + # this is even if all non-generic types are removed from it, so + # reasons remain unclear for why this does not work @overload def min( # noqa: A001 self, col: ColumnElement[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> min[_T]: ... @@ -1125,15 +1149,24 @@ def min( # noqa: A001 def min( # noqa: A001 self, col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> min[_T]: ... + @overload def min( # noqa: A001 self, - col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ) -> min[_T]: + ... + + def min( # noqa: A001 + self, + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> min[_T]: ... @@ -1182,14 +1215,16 @@ def rollup(self) -> Type[rollup[Any]]: def session_user(self) -> Type[session_user]: ... - # appease mypy which seems to not want to accept _T from - # _ColumnExpressionArgument, as it includes non-generic types + # set ColumnElement[_T] as a separate overload, to appease mypy + # which seems to not want to accept _T from _ColumnExpressionArgument. + # this is even if all non-generic types are removed from it, so + # reasons remain unclear for why this does not work @overload def sum( # noqa: A001 self, col: ColumnElement[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> sum[_T]: ... @@ -1198,15 +1233,24 @@ def sum( # noqa: A001 def sum( # noqa: A001 self, col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> sum[_T]: ... + @overload def sum( # noqa: A001 self, - col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ) -> sum[_T]: + ... + + def sum( # noqa: A001 + self, + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> sum[_T]: ... @@ -1576,14 +1620,16 @@ class ReturnTypeFromArgs(GenericFunction[_T]): inherit_cache = True - # appease mypy which seems to not want to accept _T from - # _ColumnExpressionArgument, as it includes non-generic types + # set ColumnElement[_T] as a separate overload, to appease mypy which seems + # to not want to accept _T from _ColumnExpressionArgument. this is even if + # all non-generic types are removed from it, so reasons remain unclear for + # why this does not work @overload def __init__( self, col: ColumnElement[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ): ... @@ -1592,12 +1638,23 @@ def __init__( def __init__( self, col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ): ... - def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any): + @overload + def __init__( + self, + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ): + ... + + def __init__( + self, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any + ): fn_args: Sequence[ColumnElement[Any]] = [ coercions.expect( roles.ExpressionElementRole, diff --git a/test/typing/plain_files/sql/functions_again.py b/test/typing/plain_files/sql/functions_again.py index 87ade922468..da656f2d1d9 100644 --- a/test/typing/plain_files/sql/functions_again.py +++ b/test/typing/plain_files/sql/functions_again.py @@ -15,6 +15,7 @@ class Foo(Base): id: Mapped[int] = mapped_column(primary_key=True) a: Mapped[int] b: Mapped[int] + c: Mapped[str] func.row_number().over(order_by=Foo.a, partition_by=Foo.b.desc()) @@ -41,3 +42,15 @@ class Foo(Base): ).group_by(Foo.a) # EXPECTED_TYPE: Select[Tuple[int, int]] reveal_type(stmt1) + +# test #10818 +# EXPECTED_TYPE: coalesce[str] +reveal_type(func.coalesce(Foo.c, "a", "b")) + + +stmt2 = select( + Foo.a, + func.coalesce(Foo.c, "a", "b"), +).group_by(Foo.a) +# EXPECTED_TYPE: Select[Tuple[int, str]] +reveal_type(stmt2) diff --git a/tools/generate_sql_functions.py b/tools/generate_sql_functions.py index 348b3344845..51422dc7e6b 100644 --- a/tools/generate_sql_functions.py +++ b/tools/generate_sql_functions.py @@ -62,14 +62,16 @@ def process_functions(filename: str, cmd: code_writer_cmd) -> str: textwrap.indent( f""" -# appease mypy which seems to not want to accept _T from -# _ColumnExpressionArgument, as it includes non-generic types +# set ColumnElement[_T] as a separate overload, to appease mypy +# which seems to not want to accept _T from _ColumnExpressionArgument. +# this is even if all non-generic types are removed from it, so +# reasons remain unclear for why this does not work @overload def {key}( {' # noqa: A001' if is_reserved_word else ''} self, col: ColumnElement[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> {fn_class.__name__}[_T]: ... @@ -78,15 +80,26 @@ def {key}( {' # noqa: A001' if is_reserved_word else ''} def {key}( {' # noqa: A001' if is_reserved_word else ''} self, col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> {fn_class.__name__}[_T]: ... + +@overload def {key}( {' # noqa: A001' if is_reserved_word else ''} self, - col: _ColumnExpressionArgument[_T], - *args: _ColumnExpressionArgument[Any], + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, +) -> {fn_class.__name__}[_T]: + ... + + +def {key}( {' # noqa: A001' if is_reserved_word else ''} + self, + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> {fn_class.__name__}[_T]: ... From d625a4716af07d35f22fdad5a1f376850165f9cd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 20:07:10 -0500 Subject: [PATCH 062/544] happy new year, continued Change-Id: Icf6e75119321e07311ae00e708a27239f0205106 --- LICENSE | 2 +- doc/build/conf.py | 2 +- doc/build/copyright.rst | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/LICENSE b/LICENSE index 7bf9bbe9683..967cdc5dc10 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2005-2023 SQLAlchemy authors and contributors . +Copyright 2005-2024 SQLAlchemy authors and contributors . Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/doc/build/conf.py b/doc/build/conf.py index 9ff7e3768a9..5ea06dc8f89 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -235,7 +235,7 @@ # General information about the project. project = "SQLAlchemy" -copyright = "2007-2023, the SQLAlchemy authors and contributors" # noqa +copyright = "2007-2024, the SQLAlchemy authors and contributors" # noqa # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/doc/build/copyright.rst b/doc/build/copyright.rst index aa4abac9b1d..b3a67ccf469 100644 --- a/doc/build/copyright.rst +++ b/doc/build/copyright.rst @@ -6,7 +6,7 @@ Appendix: Copyright This is the MIT license: ``_ -Copyright (c) 2005-2023 Michael Bayer and contributors. +Copyright (c) 2005-2024 Michael Bayer and contributors. SQLAlchemy is a trademark of Michael Bayer. Permission is hereby granted, free of charge, to any person obtaining a copy of this From 3a2948993256735e3bf99d4d99750c8e914b3101 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 20:32:38 -0500 Subject: [PATCH 063/544] cherry-pick changelog from 1.4.51 (cherry picked from commit cd6d80e52b07e7c9858e55cfa2e5f32f5dee4b53) --- doc/build/changelog/changelog_14.rst | 38 ++++++++++++++++++++- doc/build/changelog/unreleased_14/10650.rst | 7 ---- doc/build/changelog/unreleased_14/10782.rst | 15 -------- doc/build/changelog/unreleased_14/10813.rst | 11 ------ 4 files changed, 37 insertions(+), 34 deletions(-) delete mode 100644 doc/build/changelog/unreleased_14/10650.rst delete mode 100644 doc/build/changelog/unreleased_14/10782.rst delete mode 100644 doc/build/changelog/unreleased_14/10813.rst diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index e593bb5d565..5300b0691b1 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -15,7 +15,43 @@ This document details individual issue-level changes made throughout .. changelog:: :version: 1.4.51 - :include_notes_from: unreleased_14 + :released: January 2, 2024 + + .. change:: + :tags: bug, mysql + :tickets: 10650 + :versions: 2.0.24 + + Fixed regression introduced by the fix in ticket :ticket:`10492` when using + pool pre-ping with PyMySQL version older than 1.0. + + .. change:: + :tags: bug, orm + :tickets: 10782 + :versions: 2.0.24, 1.4.51 + + Improved a fix first implemented for :ticket:`3208` released in version + 0.9.8, where the registry of classes used internally by declarative could + be subject to a race condition in the case where individual mapped classes + are being garbage collected at the same time while new mapped classes are + being constructed, as can happen in some test suite configurations or + dynamic class creation environments. In addition to the weakref check + already added, the list of items being iterated is also copied first to + avoid "list changed while iterating" errors. Pull request courtesy Yilei + Yang. + + + .. change:: + :tags: bug, asyncio + :tickets: 10813 + :versions: 1.4.51, 2.0.25 + + Fixed critical issue in asyncio version of the connection pool where + calling :meth:`_asyncio.AsyncEngine.dispose` would produce a new connection + pool that did not fully re-establish the use of asyncio-compatible mutexes, + leading to the use of a plain ``threading.Lock()`` which would then cause + deadlocks in an asyncio context when using concurrency features like + ``asyncio.gather()``. .. changelog:: :version: 1.4.50 diff --git a/doc/build/changelog/unreleased_14/10650.rst b/doc/build/changelog/unreleased_14/10650.rst deleted file mode 100644 index dce6b4c75a5..00000000000 --- a/doc/build/changelog/unreleased_14/10650.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 10650 - :versions: 2.0.24 - - Fixed regression introduced by the fix in ticket :ticket:`10492` when using - pool pre-ping with PyMySQL version older than 1.0. diff --git a/doc/build/changelog/unreleased_14/10782.rst b/doc/build/changelog/unreleased_14/10782.rst deleted file mode 100644 index d7b219a3652..00000000000 --- a/doc/build/changelog/unreleased_14/10782.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10782 - :versions: 2.0.24, 1.4.51 - - Improved a fix first implemented for :ticket:`3208` released in version - 0.9.8, where the registry of classes used internally by declarative could - be subject to a race condition in the case where individual mapped classes - are being garbage collected at the same time while new mapped classes are - being constructed, as can happen in some test suite configurations or - dynamic class creation environments. In addition to the weakref check - already added, the list of items being iterated is also copied first to - avoid "list changed while iterating" errors. Pull request courtesy Yilei - Yang. - diff --git a/doc/build/changelog/unreleased_14/10813.rst b/doc/build/changelog/unreleased_14/10813.rst deleted file mode 100644 index d4f72d8e0b2..00000000000 --- a/doc/build/changelog/unreleased_14/10813.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, asyncio - :tickets: 10813 - :versions: 1.4.51, 2.0.25 - - Fixed critical issue in asyncio version of the connection pool where - calling :meth:`_asyncio.AsyncEngine.dispose` would produce a new connection - pool that did not fully re-establish the use of asyncio-compatible mutexes, - leading to the use of a plain ``threading.Lock()`` which would then cause - deadlocks in an asyncio context when using concurrency features like - ``asyncio.gather()``. From a6c43a1c5610832e61795c17ae3898b5b63f76a0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 20:32:39 -0500 Subject: [PATCH 064/544] cherry-pick changelog update for 1.4.52 (cherry picked from commit 966c45280825e24904b7adebe4fc10f81ea26347) --- doc/build/changelog/changelog_14.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index 5300b0691b1..164a10a469d 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -13,6 +13,10 @@ This document details individual issue-level changes made throughout :start-line: 5 +.. changelog:: + :version: 1.4.52 + :include_notes_from: unreleased_14 + .. changelog:: :version: 1.4.51 :released: January 2, 2024 From b9b1a0b827971c3876de1665cade856e4da6ee8a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 20:40:16 -0500 Subject: [PATCH 065/544] changelog fixes Change-Id: Ie0e1d5d2df93e26f31004aff11196043fc665679 (cherry picked from commit 2328b5164125cb0fdb90e85f36d99ef1aa7e3705) --- doc/build/changelog/unreleased_20/10679.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/build/changelog/unreleased_20/10679.rst b/doc/build/changelog/unreleased_20/10679.rst index 485a87ea75d..835b626e98f 100644 --- a/doc/build/changelog/unreleased_20/10679.rst +++ b/doc/build/changelog/unreleased_20/10679.rst @@ -2,7 +2,9 @@ :tags: oracle, asyncio :tickets: 10679 - Added support for :ref:`oracledb` in async mode. - The current implementation has some limitation, preventing - the support for :meth:`_asyncio.AsyncConnection.stream`. - Improved support if planned for the 2.1 release of SQLAlchemy. + Added support for :ref:`oracledb` in asyncio mode, using the newly released + version of the ``oracledb`` DBAPI that includes asyncio support. For the + 2.0 series, this is a preview release, where the current implementation + does not yet have include support for + :meth:`_asyncio.AsyncConnection.stream`. Improved support is planned for + the 2.1 release of SQLAlchemy. From b836ca0a3a213dc387e08df3ba340e6ec3298985 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 20:40:54 -0500 Subject: [PATCH 066/544] - 2.0.25 --- doc/build/changelog/changelog_20.rst | 65 ++++++++++++++++++++- doc/build/changelog/unreleased_20/10679.rst | 10 ---- doc/build/changelog/unreleased_20/10800.rst | 10 ---- doc/build/changelog/unreleased_20/10801.rst | 14 ----- doc/build/changelog/unreleased_20/10807.rst | 7 --- doc/build/changelog/unreleased_20/10815.rst | 8 --- doc/build/changelog/unreleased_20/10817.rst | 8 --- doc/build/conf.py | 4 +- 8 files changed, 66 insertions(+), 60 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/10679.rst delete mode 100644 doc/build/changelog/unreleased_20/10800.rst delete mode 100644 doc/build/changelog/unreleased_20/10801.rst delete mode 100644 doc/build/changelog/unreleased_20/10807.rst delete mode 100644 doc/build/changelog/unreleased_20/10815.rst delete mode 100644 doc/build/changelog/unreleased_20/10817.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index e07119e419b..5f4fac22703 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,70 @@ .. changelog:: :version: 2.0.25 - :include_notes_from: unreleased_20 + :released: January 2, 2024 + + .. change:: + :tags: oracle, asyncio + :tickets: 10679 + + Added support for :ref:`oracledb` in asyncio mode, using the newly released + version of the ``oracledb`` DBAPI that includes asyncio support. For the + 2.0 series, this is a preview release, where the current implementation + does not yet have include support for + :meth:`_asyncio.AsyncConnection.stream`. Improved support is planned for + the 2.1 release of SQLAlchemy. + + .. change:: + :tags: bug, orm + :tickets: 10800 + + Fixed issue where when making use of the + :paramref:`_orm.relationship.post_update` feature at the same time as using + a mapper version_id_col could lead to a situation where the second UPDATE + statement emitted by the post-update feature would fail to make use of the + correct version identifier, assuming an UPDATE was already emitted in that + flush which had already bumped the version counter. + + .. change:: + :tags: bug, typing + :tickets: 10801, 10818 + + Fixed regressions caused by typing added to the ``sqlalchemy.sql.functions`` + module in version 2.0.24, as part of :ticket:`6810`: + + * Further enhancements to pep-484 typing to allow SQL functions from + :attr:`_sql.func` derived elements to work more effectively with ORM-mapped + attributes (:ticket:`10801`) + + * Fixed the argument types passed to functions so that literal expressions + like strings and ints are again interpreted correctly (:ticket:`10818`) + + + .. change:: + :tags: usecase, orm + :tickets: 10807 + + Added preliminary support for Python 3.12 pep-695 type alias structures, + when resolving custom type maps for ORM Annotated Declarative mappings. + + + .. change:: + :tags: bug, orm + :tickets: 10815 + + Fixed issue where ORM Annotated Declarative would mis-interpret the left + hand side of a relationship without any collection specified as + uselist=True if the left type were given as a class and not a string, + without using future-style annotations. + + .. change:: + :tags: bug, sql + :tickets: 10817 + + Improved compilation of :func:`_sql.any_` / :func:`_sql.all_` in the + context of a negation of boolean comparison, will now render ``NOT (expr)`` + rather than reversing the equality operator to not equals, allowing + finer-grained control of negations for these non-typical operators. .. changelog:: :version: 2.0.24 diff --git a/doc/build/changelog/unreleased_20/10679.rst b/doc/build/changelog/unreleased_20/10679.rst deleted file mode 100644 index 835b626e98f..00000000000 --- a/doc/build/changelog/unreleased_20/10679.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: oracle, asyncio - :tickets: 10679 - - Added support for :ref:`oracledb` in asyncio mode, using the newly released - version of the ``oracledb`` DBAPI that includes asyncio support. For the - 2.0 series, this is a preview release, where the current implementation - does not yet have include support for - :meth:`_asyncio.AsyncConnection.stream`. Improved support is planned for - the 2.1 release of SQLAlchemy. diff --git a/doc/build/changelog/unreleased_20/10800.rst b/doc/build/changelog/unreleased_20/10800.rst deleted file mode 100644 index 346ae1f5ace..00000000000 --- a/doc/build/changelog/unreleased_20/10800.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10800 - - Fixed issue where when making use of the - :paramref:`_orm.relationship.post_update` feature at the same time as using - a mapper version_id_col could lead to a situation where the second UPDATE - statement emitted by the post-update feature would fail to make use of the - correct version identifier, assuming an UPDATE was already emitted in that - flush which had already bumped the version counter. diff --git a/doc/build/changelog/unreleased_20/10801.rst b/doc/build/changelog/unreleased_20/10801.rst deleted file mode 100644 index a485e1babba..00000000000 --- a/doc/build/changelog/unreleased_20/10801.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. change:: - :tags: bug, typing - :tickets: 10801, 10818 - - Fixed regressions caused by typing added to the ``sqlalchemy.sql.functions`` - module in version 2.0.24, as part of :ticket:`6810`: - - * Further enhancements to pep-484 typing to allow SQL functions from - :attr:`_sql.func` derived elements to work more effectively with ORM-mapped - attributes (:ticket:`10801`) - - * Fixed the argument types passed to functions so that literal expressions - like strings and ints are again interpreted correctly (:ticket:`10818`) - diff --git a/doc/build/changelog/unreleased_20/10807.rst b/doc/build/changelog/unreleased_20/10807.rst deleted file mode 100644 index afceef63e30..00000000000 --- a/doc/build/changelog/unreleased_20/10807.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: usecase, orm - :tickets: 10807 - - Added preliminary support for Python 3.12 pep-695 type alias structures, - when resolving custom type maps for ORM Annotated Declarative mappings. - diff --git a/doc/build/changelog/unreleased_20/10815.rst b/doc/build/changelog/unreleased_20/10815.rst deleted file mode 100644 index 2240764aebc..00000000000 --- a/doc/build/changelog/unreleased_20/10815.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10815 - - Fixed issue where ORM Annotated Declarative would mis-interpret the left - hand side of a relationship without any collection specified as - uselist=True if the left type were given as a class and not a string, - without using future-style annotations. diff --git a/doc/build/changelog/unreleased_20/10817.rst b/doc/build/changelog/unreleased_20/10817.rst deleted file mode 100644 index 69634d06dca..00000000000 --- a/doc/build/changelog/unreleased_20/10817.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 10817 - - Improved compilation of :func:`_sql.any_` / :func:`_sql.all_` in the - context of a negation of boolean comparison, will now render ``NOT (expr)`` - rather than reversing the equality operator to not equals, allowing - finer-grained control of negations for these non-typical operators. diff --git a/doc/build/conf.py b/doc/build/conf.py index 5ea06dc8f89..b469037158d 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.24" +release = "2.0.25" -release_date = "December 28, 2023" +release_date = "January 2, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 68a249649313c2f5e78862b7843637a94da02eb0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jan 2024 21:22:17 -0500 Subject: [PATCH 067/544] Version 2.0.26 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 5f4fac22703..5bd0385fc5d 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.26 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.25 :released: January 2, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 1cd1abfd07f..544410f69c5 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.25" +__version__ = "2.0.26" def __go(lcls: Any) -> None: From 80f772a6c0d75dc316b4141d83f502f64a9852bd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 8 Jan 2024 09:15:17 -0500 Subject: [PATCH 068/544] DBAPIConnection can be None for checkin event Fixed the type signature for the :meth:`.PoolEvents.checkin` event to indicate that the given :class:`.DBAPIConnection` argument may be ``None`` in the case where the connection has been invalidated. Change-Id: I4c6f0cf999f2ffb730909e2688eb3b0794ecf2ab (cherry picked from commit 071d3e2d2b11a96fc5a143530357244177259189) --- doc/build/changelog/unreleased_20/checkin_conn_none.rst | 6 ++++++ lib/sqlalchemy/pool/events.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/checkin_conn_none.rst diff --git a/doc/build/changelog/unreleased_20/checkin_conn_none.rst b/doc/build/changelog/unreleased_20/checkin_conn_none.rst new file mode 100644 index 00000000000..9aeed4784fd --- /dev/null +++ b/doc/build/changelog/unreleased_20/checkin_conn_none.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, typing + + Fixed the type signature for the :meth:`.PoolEvents.checkin` event to + indicate that the given :class:`.DBAPIConnection` argument may be ``None`` + in the case where the connection has been invalidated. diff --git a/lib/sqlalchemy/pool/events.py b/lib/sqlalchemy/pool/events.py index 99d180abc99..4b4f4e47851 100644 --- a/lib/sqlalchemy/pool/events.py +++ b/lib/sqlalchemy/pool/events.py @@ -173,7 +173,7 @@ def checkout( def checkin( self, - dbapi_connection: DBAPIConnection, + dbapi_connection: Optional[DBAPIConnection], connection_record: ConnectionPoolEntry, ) -> None: """Called when a connection returns to the pool. From 764168ecc05f83eb459523a7fc7de3deec27584b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateusz=20B=C4=85czek?= Date: Mon, 8 Jan 2024 19:44:18 +0100 Subject: [PATCH 069/544] Fix typo in 'Mapping Table Columns' documentation (#10842) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Mateusz Bączek (cherry picked from commit f309674e14072d27aaf1eae521acf4eb7f79a842) --- doc/build/orm/mapping_columns.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/mapping_columns.rst b/doc/build/orm/mapping_columns.rst index 25c6604fafa..30220baebc8 100644 --- a/doc/build/orm/mapping_columns.rst +++ b/doc/build/orm/mapping_columns.rst @@ -4,6 +4,6 @@ Mapping Table Columns ===================== This section has been integrated into the -:ref:`orm_declarative_table_config_toplevel` Declarative section. +:ref:`orm_declarative_table_config_toplevel` section. From 85ae6c96d31d3d12e88cf989c90120418856fce7 Mon Sep 17 00:00:00 2001 From: Xiaokui Shu Date: Mon, 8 Jan 2024 13:44:53 -0500 Subject: [PATCH 070/544] fix code typo in doc:faq:sqlexpressions on `in_()` (#10845) (cherry picked from commit 66cb236856cb458f34b5aa1e4f2ec737e1e45f76) --- doc/build/faq/sqlexpressions.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/faq/sqlexpressions.rst b/doc/build/faq/sqlexpressions.rst index 051d5cca204..7a03bdb0362 100644 --- a/doc/build/faq/sqlexpressions.rst +++ b/doc/build/faq/sqlexpressions.rst @@ -319,7 +319,7 @@ known values are passed. "Expanding" parameters are used for string can be safely cached independently of the actual lists of values being passed to a particular invocation of :meth:`_sql.ColumnOperators.in_`:: - >>> stmt = select(A).where(A.id.in_[1, 2, 3]) + >>> stmt = select(A).where(A.id.in_([1, 2, 3])) To render the IN clause with real bound parameter symbols, use the ``render_postcompile=True`` flag with :meth:`_sql.ClauseElement.compile`: From 29ffe2ca2af2ee5390c2f195f2c5de0e73a769a2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 9 Jan 2024 13:45:52 -0500 Subject: [PATCH 071/544] Add note that password parameter is not to be url encoded References: #10852 Change-Id: Ifa44513ce315214fa5d1b55d3e92b53889caeacc (cherry picked from commit d9ed5cb521d5e7a2b62646b43eaebc1ccf084b40) --- lib/sqlalchemy/engine/url.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 31e94f441a2..db4f2879c7f 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -171,6 +171,11 @@ def create( :param password: database password. Is typically a string, but may also be an object that can be stringified with ``str()``. + .. note:: The password string should **not** be URL encoded when + passed as an argument to :meth:`_engine.URL.create`; the string + should contain the password characters exactly as they would be + typed. + .. note:: A password-producing object will be stringified only **once** per :class:`_engine.Engine` object. For dynamic password generation per connect, see :ref:`engines_dynamic_tokens`. From 4c17299e74c6d07f81f3416a7e1ff809a79c03d0 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 10 Jan 2024 19:30:53 +0100 Subject: [PATCH 072/544] remove unnecessary execution_options.merge_with in _execute_ddl Change-Id: Idcd886bf6ad5db28c4dc581a7f1e91e12f6f9a05 (cherry picked from commit 396b1e621f0576b2df9da8b728a21abc99951901) --- lib/sqlalchemy/engine/base.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 6d8cc667045..dcce3ed342b 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -1498,7 +1498,7 @@ def _execute_ddl( ) -> CursorResult[Any]: """Execute a schema.DDL object.""" - execution_options = ddl._execution_options.merge_with( + exec_opts = ddl._execution_options.merge_with( self._execution_options, execution_options ) @@ -1512,12 +1512,11 @@ def _execute_ddl( event_multiparams, event_params, ) = self._invoke_before_exec_event( - ddl, distilled_parameters, execution_options + ddl, distilled_parameters, exec_opts ) else: event_multiparams = event_params = None - exec_opts = self._execution_options.merge_with(execution_options) schema_translate_map = exec_opts.get("schema_translate_map", None) dialect = self.dialect @@ -1530,7 +1529,7 @@ def _execute_ddl( dialect.execution_ctx_cls._init_ddl, compiled, None, - execution_options, + exec_opts, compiled, ) if self._has_events or self.engine._has_events: @@ -1539,7 +1538,7 @@ def _execute_ddl( ddl, event_multiparams, event_params, - execution_options, + exec_opts, ret, ) return ret From e525704800bbc483b4ca22997157d75bc90b9a41 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 10 Jan 2024 22:31:59 -0500 Subject: [PATCH 073/544] catch OSError (base of ConnectionError) and asyncpg errors for terminate Fixed regression in the asyncpg dialect caused by :ticket:`10717` in release 2.0.24 where the change that now attempts to gracefully close the asyncpg connection before terminating would not fall back to ``terminate()`` for other potential connection-related exceptions other than a timeout error, not taking into account cases where the graceful ``.close()`` attempt fails for other reasons such as connection errors. Fixes: #10863 Change-Id: If1791bce26803f92547cdf26fb641996c7f638fa (cherry picked from commit eff3aa8ad6bf74181280a85bf03d401126c65b01) --- doc/build/changelog/unreleased_20/10863.rst | 11 +++++++++++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/10863.rst diff --git a/doc/build/changelog/unreleased_20/10863.rst b/doc/build/changelog/unreleased_20/10863.rst new file mode 100644 index 00000000000..df722f8fe44 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10863.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, regression, postgresql + :tickets: 10863 + + Fixed regression in the asyncpg dialect caused by :ticket:`10717` in + release 2.0.24 where the change that now attempts to gracefully close the + asyncpg connection before terminating would not fall back to + ``terminate()`` for other potential connection-related exceptions other + than a timeout error, not taking into account cases where the graceful + ``.close()`` attempt fails for other reasons such as connection errors. + diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 7e93b1232e1..85affdca3aa 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -890,7 +890,7 @@ def terminate(self): # try to gracefully close; see #10717 # timeout added in asyncpg 0.14.0 December 2017 self.await_(self._connection.close(timeout=2)) - except asyncio.TimeoutError: + except (asyncio.TimeoutError, OSError, self.dbapi.PostgresError): # in the case where we are recycling an old connection # that may have already been disconnected, close() will # fail with the above timeout. in this case, terminate From 9e33ba6cf8709081cd0f9afe483effca5708e1ef Mon Sep 17 00:00:00 2001 From: Ellis Valentiner Date: Mon, 8 Jan 2024 11:16:21 -0500 Subject: [PATCH 074/544] Support reflecting no inherit check constraint in pg. Added support for reflection of PostgreSQL CHECK constraints marked with "NO INHERIT", setting the key ``no_inherit=True`` in the reflected data. Pull request courtesy Ellis Valentiner. Fixes: #10777 Closes: #10778 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10778 Pull-request-sha: 058082ff6297f9ccdc4977e65ef024e9a093426e Change-Id: Ia33e29c0c57cf0076e8819311f4628d712fdc332 (cherry picked from commit 890b84e1693ce702ef0e20046cadc6e741274013) --- doc/build/changelog/unreleased_20/10777.rst | 7 ++++ lib/sqlalchemy/dialects/postgresql/base.py | 16 +++++++-- test/dialect/postgresql/test_reflection.py | 36 +++++++++++++++++++++ 3 files changed, 56 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10777.rst diff --git a/doc/build/changelog/unreleased_20/10777.rst b/doc/build/changelog/unreleased_20/10777.rst new file mode 100644 index 00000000000..cee5092e8d4 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10777.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: usecase, postgresql, reflection + :tickets: 10777 + + Added support for reflection of PostgreSQL CHECK constraints marked with + "NO INHERIT", setting the key ``no_inherit=True`` in the reflected data. + Pull request courtesy Ellis Valentiner. diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index e5f3cb50c58..a56a10c01ca 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -4696,9 +4696,13 @@ def get_multi_check_constraints( # "CHECK (((a > 1) AND (a < 5))) NOT VALID" # "CHECK (some_boolean_function(a))" # "CHECK (((a\n < 1)\n OR\n (a\n >= 5))\n)" + # "CHECK (a NOT NULL) NO INHERIT" + # "CHECK (a NOT NULL) NO INHERIT NOT VALID" m = re.match( - r"^CHECK *\((.+)\)( NOT VALID)?$", src, flags=re.DOTALL + r"^CHECK *\((.+)\)( NO INHERIT)?( NOT VALID)?$", + src, + flags=re.DOTALL, ) if not m: util.warn("Could not parse CHECK constraint text: %r" % src) @@ -4712,8 +4716,14 @@ def get_multi_check_constraints( "sqltext": sqltext, "comment": comment, } - if m and m.group(2): - entry["dialect_options"] = {"not_valid": True} + if m: + do = {} + if " NOT VALID" in m.groups(): + do["not_valid"] = True + if " NO INHERIT" in m.groups(): + do["no_inherit"] = True + if do: + entry["dialect_options"] = do check_constraints[(schema, table_name)].append(entry) return check_constraints.items() diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index ab4fa2c038d..dd6c8aa88ee 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -2197,6 +2197,42 @@ def test_reflect_with_not_valid_check_constraint(self): ], ) + def test_reflect_with_no_inherit_check_constraint(self): + rows = [ + ("foo", "some name", "CHECK ((a IS NOT NULL)) NO INHERIT", None), + ( + "foo", + "some name", + "CHECK ((a IS NOT NULL)) NO INHERIT NOT VALID", + None, + ), + ] + conn = mock.Mock( + execute=lambda *arg, **kw: mock.MagicMock( + fetchall=lambda: rows, __iter__=lambda self: iter(rows) + ) + ) + check_constraints = testing.db.dialect.get_check_constraints( + conn, "foo" + ) + eq_( + check_constraints, + [ + { + "name": "some name", + "sqltext": "a IS NOT NULL", + "dialect_options": {"no_inherit": True}, + "comment": None, + }, + { + "name": "some name", + "sqltext": "a IS NOT NULL", + "dialect_options": {"not_valid": True, "no_inherit": True}, + "comment": None, + }, + ], + ) + def _apply_stm(self, connection, use_map): if use_map: return connection.execution_options( From a25abbbeb0933d47b61b2614e578cf06b5cb78a1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 12 Jan 2024 09:29:28 -0500 Subject: [PATCH 075/544] add Identity() for remaining examples Fixed the performance example scripts in examples/performance to mostly work with the Oracle database, by adding the :class:`.Identity` construct to all the tables and allowing primary generation to occur on this backend. A few of the "raw DBAPI" cases still are not compatible with Oracle. Change-Id: I7ce19645ea78736dddfda6f33b9356ad75dee68f (cherry picked from commit 6e0a35dfd8bbd12c999abcae3309fe22e83b0444) --- doc/build/changelog/unreleased_20/examples.rst | 8 ++++++++ examples/performance/bulk_updates.py | 3 ++- examples/performance/large_resultsets.py | 3 ++- examples/performance/short_selects.py | 3 ++- examples/performance/single_inserts.py | 3 ++- 5 files changed, 16 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/examples.rst diff --git a/doc/build/changelog/unreleased_20/examples.rst b/doc/build/changelog/unreleased_20/examples.rst new file mode 100644 index 00000000000..8ac2c567ed5 --- /dev/null +++ b/doc/build/changelog/unreleased_20/examples.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, examples + + Fixed the performance example scripts in examples/performance to mostly + work with the Oracle database, by adding the :class:`.Identity` construct + to all the tables and allowing primary generation to occur on this backend. + A few of the "raw DBAPI" cases still are not compatible with Oracle. + diff --git a/examples/performance/bulk_updates.py b/examples/performance/bulk_updates.py index c15d0f16726..8b782353df0 100644 --- a/examples/performance/bulk_updates.py +++ b/examples/performance/bulk_updates.py @@ -5,6 +5,7 @@ """ from sqlalchemy import Column from sqlalchemy import create_engine +from sqlalchemy import Identity from sqlalchemy import Integer from sqlalchemy import String from sqlalchemy.ext.declarative import declarative_base @@ -18,7 +19,7 @@ class Customer(Base): __tablename__ = "customer" - id = Column(Integer, primary_key=True) + id = Column(Integer, Identity(), primary_key=True) name = Column(String(255)) description = Column(String(255)) diff --git a/examples/performance/large_resultsets.py b/examples/performance/large_resultsets.py index 9c0d9fc4e21..b93459150e5 100644 --- a/examples/performance/large_resultsets.py +++ b/examples/performance/large_resultsets.py @@ -15,6 +15,7 @@ """ from sqlalchemy import Column from sqlalchemy import create_engine +from sqlalchemy import Identity from sqlalchemy import Integer from sqlalchemy import String from sqlalchemy.ext.declarative import declarative_base @@ -29,7 +30,7 @@ class Customer(Base): __tablename__ = "customer" - id = Column(Integer, primary_key=True) + id = Column(Integer, Identity(), primary_key=True) name = Column(String(255)) description = Column(String(255)) diff --git a/examples/performance/short_selects.py b/examples/performance/short_selects.py index d0e5f6e9d22..553c2fed5f0 100644 --- a/examples/performance/short_selects.py +++ b/examples/performance/short_selects.py @@ -8,6 +8,7 @@ from sqlalchemy import bindparam from sqlalchemy import Column from sqlalchemy import create_engine +from sqlalchemy import Identity from sqlalchemy import Integer from sqlalchemy import select from sqlalchemy import String @@ -28,7 +29,7 @@ class Customer(Base): __tablename__ = "customer" - id = Column(Integer, primary_key=True) + id = Column(Integer, Identity(), primary_key=True) name = Column(String(255)) description = Column(String(255)) q = Column(Integer) diff --git a/examples/performance/single_inserts.py b/examples/performance/single_inserts.py index 991d213a07b..904fda2d039 100644 --- a/examples/performance/single_inserts.py +++ b/examples/performance/single_inserts.py @@ -7,6 +7,7 @@ from sqlalchemy import bindparam from sqlalchemy import Column from sqlalchemy import create_engine +from sqlalchemy import Identity from sqlalchemy import Integer from sqlalchemy import pool from sqlalchemy import String @@ -21,7 +22,7 @@ class Customer(Base): __tablename__ = "customer" - id = Column(Integer, primary_key=True) + id = Column(Integer, Identity(), primary_key=True) name = Column(String(255)) description = Column(String(255)) From 0f5144597501bf846b966af37d620616fc045a7b Mon Sep 17 00:00:00 2001 From: David Evans Date: Mon, 15 Jan 2024 10:13:53 -0500 Subject: [PATCH 076/544] Fix type of CASE expressions which include NULLs Fixed issues in :func:`_sql.case` where the logic for determining the type of the expression could result in :class:`.NullType` if the last element in the "whens" had no type, or in other cases where the type could resolve to ``None``. The logic has been updated to scan all given expressions so that the first non-null type is used, as well as to always ensure a type is present. Pull request courtesy David Evans. updates to test suite to use modern fixture patterns by Mike Fixes: #10843 Closes: #10847 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10847 Pull-request-sha: 4fd5c39ab56de046e68c08f6c20cd1f7b2cb0e0d Change-Id: I40f905ac336a8a42b617ff9473dbd9c22ac57505 (cherry picked from commit 8f4ac0c0f07509d2f8a4bce9cbb07ac08ad04044) --- doc/build/changelog/unreleased_20/10843.rst | 10 ++ lib/sqlalchemy/sql/elements.py | 22 ++-- test/sql/test_case_statement.py | 115 +++++++++++++++----- 3 files changed, 110 insertions(+), 37 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10843.rst diff --git a/doc/build/changelog/unreleased_20/10843.rst b/doc/build/changelog/unreleased_20/10843.rst new file mode 100644 index 00000000000..838f6a8beb1 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10843.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, sql + :tickets: 10843 + + Fixed issues in :func:`_sql.case` where the logic for determining the + type of the expression could result in :class:`.NullType` if the last + element in the "whens" had no type, or in other cases where the type + could resolve to ``None``. The logic has been updated to scan all + given expressions so that the first non-null type is used, as well as + to always ensure a type is present. Pull request courtesy David Evans. diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index f05b2887d72..92a040ca0bb 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -3409,7 +3409,7 @@ def __init__( except TypeError: pass - whenlist = [ + self.whens = [ ( coercions.expect( roles.ExpressionElementRole, @@ -3421,24 +3421,28 @@ def __init__( for (c, r) in new_whens ] - if whenlist: - type_ = whenlist[-1][-1].type - else: - type_ = None - if value is None: self.value = None else: self.value = coercions.expect(roles.ExpressionElementRole, value) - self.type = cast(_T, type_) - self.whens = whenlist - if else_ is not None: self.else_ = coercions.expect(roles.ExpressionElementRole, else_) else: self.else_ = None + type_ = next( + ( + then.type + # Iterate `whens` in reverse to match previous behaviour + # where type of final element took priority + for *_, then in reversed(self.whens) + if not then.type._isnull + ), + self.else_.type if self.else_ is not None else type_api.NULLTYPE, + ) + self.type = cast(_T, type_) + @util.ro_non_memoized_property def _from_objects(self) -> List[FromClause]: return list( diff --git a/test/sql/test_case_statement.py b/test/sql/test_case_statement.py index 6907d213257..5e95d3cb2f7 100644 --- a/test/sql/test_case_statement.py +++ b/test/sql/test_case_statement.py @@ -5,7 +5,6 @@ from sqlalchemy import func from sqlalchemy import Integer from sqlalchemy import literal_column -from sqlalchemy import MetaData from sqlalchemy import select from sqlalchemy import String from sqlalchemy import Table @@ -13,50 +12,48 @@ from sqlalchemy import text from sqlalchemy.sql import column from sqlalchemy.sql import table +from sqlalchemy.sql.sqltypes import NullType from sqlalchemy.testing import AssertsCompiledSQL from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures -info_table = None - - -class CaseTest(fixtures.TestBase, AssertsCompiledSQL): +class CaseTest(fixtures.TablesTest, AssertsCompiledSQL): __dialect__ = "default" + run_inserts = "once" + run_deletes = "never" + @classmethod - def setup_test_class(cls): - metadata = MetaData() - global info_table - info_table = Table( - "infos", + def define_tables(cls, metadata): + Table( + "info_table", metadata, Column("pk", Integer, primary_key=True), Column("info", String(30)), ) - with testing.db.begin() as conn: - info_table.create(conn) - - conn.execute( - info_table.insert(), - [ - {"pk": 1, "info": "pk_1_data"}, - {"pk": 2, "info": "pk_2_data"}, - {"pk": 3, "info": "pk_3_data"}, - {"pk": 4, "info": "pk_4_data"}, - {"pk": 5, "info": "pk_5_data"}, - {"pk": 6, "info": "pk_6_data"}, - ], - ) - @classmethod - def teardown_test_class(cls): - with testing.db.begin() as conn: - info_table.drop(conn) + def insert_data(cls, connection): + info_table = cls.tables.info_table + + connection.execute( + info_table.insert(), + [ + {"pk": 1, "info": "pk_1_data"}, + {"pk": 2, "info": "pk_2_data"}, + {"pk": 3, "info": "pk_3_data"}, + {"pk": 4, "info": "pk_4_data"}, + {"pk": 5, "info": "pk_5_data"}, + {"pk": 6, "info": "pk_6_data"}, + ], + ) + connection.commit() @testing.requires.subqueries def test_case(self, connection): + info_table = self.tables.info_table + inner = select( case( (info_table.c.pk < 3, "lessthan3"), @@ -222,6 +219,8 @@ def test_when_dicts(self, test_case, expected): ) def test_text_doesnt_explode(self, connection): + info_table = self.tables.info_table + for s in [ select( case( @@ -255,6 +254,8 @@ def test_text_doenst_explode_even_in_whenlist(self): ) def testcase_with_dict(self): + info_table = self.tables.info_table + query = select( case( { @@ -294,3 +295,61 @@ def testcase_with_dict(self): ("two", 2), ("other", 3), ] + + @testing.variation("add_else", [True, False]) + def test_type_of_case_expression_with_all_nulls(self, add_else): + info_table = self.tables.info_table + + expr = case( + (info_table.c.pk < 0, None), + (info_table.c.pk > 9, None), + else_=column("q") if add_else else None, + ) + + assert isinstance(expr.type, NullType) + + @testing.combinations( + lambda info_table: ( + [ + # test non-None in middle of WHENS takes precedence over Nones + (info_table.c.pk < 0, None), + (info_table.c.pk < 5, "five"), + (info_table.c.pk <= 9, info_table.c.pk), + (info_table.c.pk > 9, None), + ], + None, + ), + lambda info_table: ( + # test non-None ELSE takes precedence over WHENs that are None + [(info_table.c.pk < 0, None)], + info_table.c.pk, + ), + lambda info_table: ( + # test non-None WHEN takes precedence over non-None ELSE + [ + (info_table.c.pk < 0, None), + (info_table.c.pk <= 9, info_table.c.pk), + (info_table.c.pk > 9, None), + ], + column("q", String), + ), + lambda info_table: ( + # test last WHEN in list takes precedence + [ + (info_table.c.pk < 0, String), + (info_table.c.pk > 9, None), + (info_table.c.pk <= 9, info_table.c.pk), + ], + column("q", String), + ), + ) + def test_type_of_case_expression(self, when_lambda): + info_table = self.tables.info_table + + whens, else_ = testing.resolve_lambda( + when_lambda, info_table=info_table + ) + + expr = case(*whens, else_=else_) + + assert isinstance(expr.type, Integer) From 1c1f60a2e3f199b626edc565ae4a67687e192015 Mon Sep 17 00:00:00 2001 From: Gord Thompson Date: Sun, 14 Jan 2024 09:49:11 -0700 Subject: [PATCH 077/544] use ensure_closed() for async close, close() for terminate Fixed issue in asyncio dialects asyncmy and aiomysql, where their ``.close()`` method is apparently not a graceful close. replace with non-standard ``.ensure_closed()`` method that's awaitable and move ``.close()`` to the so-called "terminate" case. Fixes: #10893 Change-Id: I33d871e67854d85f770c46f699e41a6e73b6fbe0 (cherry picked from commit 4201b90210dcf60f9830df299be016dee315753b) --- doc/build/changelog/unreleased_20/10893.rst | 8 ++++++++ lib/sqlalchemy/dialects/mysql/aiomysql.py | 9 ++++++++- lib/sqlalchemy/dialects/mysql/asyncmy.py | 9 ++++++++- 3 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10893.rst diff --git a/doc/build/changelog/unreleased_20/10893.rst b/doc/build/changelog/unreleased_20/10893.rst new file mode 100644 index 00000000000..63507f38d56 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10893.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, mysql + :tickets: 10893 + + Fixed issue in asyncio dialects asyncmy and aiomysql, where their + ``.close()`` method is apparently not a graceful close. replace with + non-standard ``.ensure_closed()`` method that's awaitable and move + ``.close()`` to the so-called "terminate" case. diff --git a/lib/sqlalchemy/dialects/mysql/aiomysql.py b/lib/sqlalchemy/dialects/mysql/aiomysql.py index 65482a76b27..405fa82c8a5 100644 --- a/lib/sqlalchemy/dialects/mysql/aiomysql.py +++ b/lib/sqlalchemy/dialects/mysql/aiomysql.py @@ -202,10 +202,13 @@ def rollback(self): def commit(self): self.await_(self._connection.commit()) - def close(self): + def terminate(self): # it's not awaitable. self._connection.close() + def close(self) -> None: + self.await_(self._connection.ensure_closed()) + class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection): # TODO: base on connectors/asyncio.py @@ -285,6 +288,7 @@ class MySQLDialect_aiomysql(MySQLDialect_pymysql): _sscursor = AsyncAdapt_aiomysql_ss_cursor is_async = True + has_terminate = True @classmethod def import_dbapi(cls): @@ -301,6 +305,9 @@ def get_pool_class(cls, url): else: return pool.AsyncAdaptedQueuePool + def do_terminate(self, dbapi_connection) -> None: + dbapi_connection.terminate() + def create_connect_args(self, url): return super().create_connect_args( url, _translate_args=dict(username="user", database="db") diff --git a/lib/sqlalchemy/dialects/mysql/asyncmy.py b/lib/sqlalchemy/dialects/mysql/asyncmy.py index 9928a879ec5..7360044d20b 100644 --- a/lib/sqlalchemy/dialects/mysql/asyncmy.py +++ b/lib/sqlalchemy/dialects/mysql/asyncmy.py @@ -221,10 +221,13 @@ def rollback(self): def commit(self): self.await_(self._connection.commit()) - def close(self): + def terminate(self): # it's not awaitable. self._connection.close() + def close(self) -> None: + self.await_(self._connection.ensure_closed()) + class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection): __slots__ = () @@ -290,6 +293,7 @@ class MySQLDialect_asyncmy(MySQLDialect_pymysql): _sscursor = AsyncAdapt_asyncmy_ss_cursor is_async = True + has_terminate = True @classmethod def import_dbapi(cls): @@ -304,6 +308,9 @@ def get_pool_class(cls, url): else: return pool.AsyncAdaptedQueuePool + def do_terminate(self, dbapi_connection) -> None: + dbapi_connection.terminate() + def create_connect_args(self, url): return super().create_connect_args( url, _translate_args=dict(username="user", database="db") From deb71a9b4079dea57ce81875cbf34425c7f8e731 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Fri, 12 Jan 2024 19:17:30 +0100 Subject: [PATCH 078/544] Oracle default arraysize is now set by the driver Changed the default arraysize of the Oracle dialects so that the value set by the driver is used, that is 100 at the time of writing for both cx_oracle and oracledb. Previously the value was set to 50 by default. Fixes: #10877 Change-Id: Ie4c53f42437d3d7dbbad36398d7883472577f367 (cherry picked from commit c8214ad4389284dc9508e49aeca701e5bf164454) --- doc/build/changelog/unreleased_20/10877.rst | 7 +++++++ lib/sqlalchemy/dialects/oracle/cx_oracle.py | 15 ++++++++++----- lib/sqlalchemy/dialects/oracle/oracledb.py | 2 +- test/requirements.py | 2 +- 4 files changed, 19 insertions(+), 7 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10877.rst diff --git a/doc/build/changelog/unreleased_20/10877.rst b/doc/build/changelog/unreleased_20/10877.rst new file mode 100644 index 00000000000..8aaac983b45 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10877.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: oracle + :tickets: 10877 + + Changed the default arraysize of the Oracle dialects so that the value set + by the driver is used, that is 100 at the time of writing for both + cx_oracle and oracledb. Previously the value was set to 50 by default. diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index e8ed3ab5cb2..69ee82bd234 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -126,10 +126,15 @@ The parameters accepted by the cx_oracle dialect are as follows: -* ``arraysize`` - set the cx_oracle.arraysize value on cursors, defaulted - to 50. This setting is significant with cx_Oracle as the contents of LOB - objects are only readable within a "live" row (e.g. within a batch of - 50 rows). +* ``arraysize`` - set the cx_oracle.arraysize value on cursors; defaults + to ``None``, indicating that the driver default should be used (typically + the value is 100). This setting controls how many rows are buffered when + fetching rows, and can have a significant effect on performance when + modified. The setting is used for both ``cx_Oracle`` as well as + ``oracledb``. + + .. versionchanged:: 2.0.26 - changed the default value from 50 to None, + to use the default value of the driver itself. * ``auto_convert_lobs`` - defaults to True; See :ref:`cx_oracle_lob`. @@ -1033,7 +1038,7 @@ def __init__( self, auto_convert_lobs=True, coerce_to_decimal=True, - arraysize=50, + arraysize=None, encoding_errors=None, threaded=None, **kwargs, diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index 1229573ad1e..9cdec3b55ae 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -105,7 +105,7 @@ def __init__( self, auto_convert_lobs=True, coerce_to_decimal=True, - arraysize=50, + arraysize=None, encoding_errors=None, thick_mode=None, **kwargs, diff --git a/test/requirements.py b/test/requirements.py index 4a0b365c2b5..a5a389441dd 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -784,7 +784,7 @@ def order_by_col_from_union(self): #8221. """ - return fails_if(["mssql", "oracle>=12"]) + return fails_if(["mssql", "oracle < 23"]) @property def parens_in_union_contained_select_w_limit_offset(self): From dfbc8128b2f4047b03bb1b5fe6fb2b6cc4a7e096 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Jan 2024 21:15:37 -0500 Subject: [PATCH 079/544] remove loader depth warning, replace with logging message Replaced the "loader depth is excessively deep" warning with a shorter message added to the caching badge within SQL logging, for those statements where the ORM disabled the cache due to a too-deep chain of loader options. The condition which this warning highlights is difficult to resolve and is generally just a limitation in the ORM's application of SQL caching. A future feature may include the ability to tune the threshold where caching is disabled, but for now the warning will no longer be a nuisance. Fixes: #10896 Change-Id: Ic3be2086d1db16f9a75390323f00a43ef72aca12 (cherry picked from commit 90f8ff08a4b15c7706778eb10088cd239591e4cc) --- doc/build/changelog/unreleased_20/10896.rst | 11 +++++ lib/sqlalchemy/engine/default.py | 8 +++- lib/sqlalchemy/orm/context.py | 17 ++++--- test/orm/test_recursive_loaders.py | 52 ++++++++++++++------- 4 files changed, 61 insertions(+), 27 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10896.rst diff --git a/doc/build/changelog/unreleased_20/10896.rst b/doc/build/changelog/unreleased_20/10896.rst new file mode 100644 index 00000000000..77224d974ca --- /dev/null +++ b/doc/build/changelog/unreleased_20/10896.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, orm + :tickets: 10896 + + Replaced the "loader depth is excessively deep" warning with a shorter + message added to the caching badge within SQL logging, for those statements + where the ORM disabled the cache due to a too-deep chain of loader options. + The condition which this warning highlights is difficult to resolve and is + generally just a limitation in the ORM's application of SQL caching. A + future feature may include the ability to tune the threshold where caching + is disabled, but for now the warning will no longer be a nuisance. diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 0d8054a9db9..8fbfc234486 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -1577,7 +1577,13 @@ def _get_cache_stats(self) -> str: elif ch is CACHE_MISS: return "generated in %.5fs" % (now - gen_time,) elif ch is CACHING_DISABLED: - return "caching disabled %.5fs" % (now - gen_time,) + if "_cache_disable_reason" in self.execution_options: + return "caching disabled (%s) %.5fs " % ( + self.execution_options["_cache_disable_reason"], + now - gen_time, + ) + else: + return "caching disabled %.5fs" % (now - gen_time,) elif ch is NO_DIALECT_SUPPORT: return "dialect %s+%s does not support caching %.5fs" % ( self.dialect.name, diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 7ab7e6279ea..b4178253185 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -517,15 +517,14 @@ def orm_pre_session_exec( and len(statement._compile_options._current_path) > 10 and execution_options.get("compiled_cache", True) is not None ): - util.warn( - "Loader depth for query is excessively deep; caching will " - "be disabled for additional loaders. For recursive eager " - "loaders consider using the recursion_depth feature. " - "Use the compiled_cache=None execution option to " - "skip this warning." - ) - execution_options = execution_options.union( - {"compiled_cache": None} + execution_options: util.immutabledict[ + str, Any + ] = execution_options.union( + { + "compiled_cache": None, + "_cache_disable_reason": "excess depth for " + "ORM loader options", + } ) bind_arguments["clause"] = statement diff --git a/test/orm/test_recursive_loaders.py b/test/orm/test_recursive_loaders.py index 10582e71131..e6ce5ccd7ef 100644 --- a/test/orm/test_recursive_loaders.py +++ b/test/orm/test_recursive_loaders.py @@ -1,3 +1,5 @@ +import logging.handlers + import sqlalchemy as sa from sqlalchemy import ForeignKey from sqlalchemy import Integer @@ -11,7 +13,6 @@ from sqlalchemy.orm import Session from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_raises_message -from sqlalchemy.testing import expect_warnings from sqlalchemy.testing import fixtures from sqlalchemy.testing.fixtures import fixture_session from sqlalchemy.testing.schema import Column @@ -258,13 +259,27 @@ def test_unlimited_recursion(self, loader_fn, limited_cache_conn): result = s.scalars(stmt) self._assert_depth(result.one(), 200) + @testing.fixture + def capture_log(self, testing_engine): + existing_level = logging.getLogger("sqlalchemy.engine").level + + buf = logging.handlers.BufferingHandler(100) + logging.getLogger("sqlalchemy.engine").addHandler(buf) + logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) + yield buf + logging.getLogger("sqlalchemy.engine").setLevel(existing_level) + logging.getLogger("sqlalchemy.engine").removeHandler(buf) + @testing.combinations(selectinload, immediateload, argnames="loader_fn") @testing.combinations(4, 9, 12, 25, 41, 55, argnames="depth") @testing.variation("disable_cache", [True, False]) def test_warning_w_no_recursive_opt( - self, loader_fn, depth, limited_cache_conn, disable_cache + self, loader_fn, depth, limited_cache_conn, disable_cache, capture_log ): + buf = capture_log + connection = limited_cache_conn(27) + connection._echo = True Node = self.classes.Node @@ -280,21 +295,24 @@ def test_warning_w_no_recursive_opt( else: exec_opts = {} - # note this is a magic number, it's not important that it's exact, - # just that when someone makes a huge recursive thing, - # it warns - if depth > 8 and not disable_cache: - with expect_warnings( - "Loader depth for query is excessively deep; " - "caching will be disabled for additional loaders." - ): - with Session(connection) as s: - result = s.scalars(stmt, execution_options=exec_opts) - self._assert_depth(result.one(), depth) - else: - with Session(connection) as s: - result = s.scalars(stmt, execution_options=exec_opts) - self._assert_depth(result.one(), depth) + with Session(connection) as s: + result = s.scalars(stmt, execution_options=exec_opts) + self._assert_depth(result.one(), depth) + + if not disable_cache: + # note this is a magic number, it's not important that it's + # exact, just that when someone makes a huge recursive thing, + # it disables caching and notes in the logs + if depth > 8: + eq_( + buf.buffer[-1].message[0:55], + "[caching disabled (excess depth for " + "ORM loader options)", + ) + else: + assert buf.buffer[-1].message.startswith( + "[cached since" if i > 0 else "[generated in" + ) if disable_cache: clen = len(connection.engine._compiled_cache) From 8724111b526196d61a3233ece7303252750bcba6 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 20 Jan 2024 18:33:28 +0100 Subject: [PATCH 080/544] use sequence instead of list in result docs Change-Id: Iaed8505c495455f0d82e4b0cbcc7dffd2d833408 (cherry picked from commit 9fe5f4fcf2f36e35c7a6865bbaa29dc05617d01e) --- lib/sqlalchemy/engine/result.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index c9d51e06677..f1c18cf456f 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1349,7 +1349,7 @@ def fetchone(self) -> Optional[Row[_TP]]: def fetchmany(self, size: Optional[int] = None) -> Sequence[Row[_TP]]: """Fetch many rows. - When all rows are exhausted, returns an empty list. + When all rows are exhausted, returns an empty sequence. This method is provided for backwards compatibility with SQLAlchemy 1.x.x. @@ -1357,7 +1357,7 @@ def fetchmany(self, size: Optional[int] = None) -> Sequence[Row[_TP]]: To fetch rows in groups, use the :meth:`_engine.Result.partitions` method. - :return: a list of :class:`_engine.Row` objects. + :return: a sequence of :class:`_engine.Row` objects. .. seealso:: @@ -1368,14 +1368,14 @@ def fetchmany(self, size: Optional[int] = None) -> Sequence[Row[_TP]]: return self._manyrow_getter(self, size) def all(self) -> Sequence[Row[_TP]]: - """Return all rows in a list. + """Return all rows in a sequence. Closes the result set after invocation. Subsequent invocations - will return an empty list. + will return an empty sequence. .. versionadded:: 1.4 - :return: a list of :class:`_engine.Row` objects. + :return: a sequence of :class:`_engine.Row` objects. .. seealso:: @@ -1773,7 +1773,7 @@ def fetchmany(self, size: Optional[int] = None) -> Sequence[_R]: return self._manyrow_getter(self, size) def all(self) -> Sequence[_R]: - """Return all scalar values in a list. + """Return all scalar values in a sequence. Equivalent to :meth:`_engine.Result.all` except that scalar values, rather than :class:`_engine.Row` objects, @@ -1877,7 +1877,7 @@ def fetchmany(self, size: Optional[int] = None) -> Sequence[_R]: ... def all(self) -> Sequence[_R]: # noqa: A001 - """Return all scalar values in a list. + """Return all scalar values in a sequence. Equivalent to :meth:`_engine.Result.all` except that tuple values, rather than :class:`_engine.Row` objects, @@ -2083,7 +2083,7 @@ def fetchmany(self, size: Optional[int] = None) -> Sequence[RowMapping]: return self._manyrow_getter(self, size) def all(self) -> Sequence[RowMapping]: - """Return all scalar values in a list. + """Return all scalar values in a sequence. Equivalent to :meth:`_engine.Result.all` except that :class:`_engine.RowMapping` values, rather than :class:`_engine.Row` From ef3202e7bfce990051da20bff6313205e7000880 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edgar=20Ram=C3=ADrez=20Mondrag=C3=B3n?= Date: Mon, 22 Jan 2024 02:29:44 -0500 Subject: [PATCH 081/544] Support specifying access method when creating Postgres tables ### Description ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [x] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. Fixes #10904 **Have a nice day!** Closes: #10905 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10905 Pull-request-sha: 85f232a303a5543725dac42206cb2395fc34109e Change-Id: I5e2fc05a696eb6da71bbd695f0466e8552d203b6 (cherry picked from commit 46899918a6dda07cca07e30af2526134f9c38809) --- doc/build/changelog/unreleased_20/10904.rst | 11 ++++++ lib/sqlalchemy/dialects/postgresql/base.py | 38 +++++++++++++-------- test/dialect/postgresql/test_compiler.py | 16 ++++++++- 3 files changed, 50 insertions(+), 15 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10904.rst diff --git a/doc/build/changelog/unreleased_20/10904.rst b/doc/build/changelog/unreleased_20/10904.rst new file mode 100644 index 00000000000..3dc744dc185 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10904.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: usecase, postgresql + :tickets: 10904 + + Support the ``USING `` option for PostgreSQL ``CREATE TABLE`` to + specify the access method to use to store the contents for the new table. + Pull request courtesy Edgar Ramírez-Mondragón. + + .. seealso:: + + :ref:`postgresql_table_options` diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index a56a10c01ca..8ce6230e045 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1112,36 +1112,42 @@ def set_search_path(dbapi_connection, connection_record): Several options for CREATE TABLE are supported directly by the PostgreSQL dialect in conjunction with the :class:`_schema.Table` construct: -* ``TABLESPACE``:: +* ``INHERITS``:: - Table("some_table", metadata, ..., postgresql_tablespace='some_tablespace') + Table("some_table", metadata, ..., postgresql_inherits="some_supertable") - The above option is also available on the :class:`.Index` construct. + Table("some_table", metadata, ..., postgresql_inherits=("t1", "t2", ...)) * ``ON COMMIT``:: Table("some_table", metadata, ..., postgresql_on_commit='PRESERVE ROWS') -* ``WITH OIDS``:: +* ``PARTITION BY``:: - Table("some_table", metadata, ..., postgresql_with_oids=True) + Table("some_table", metadata, ..., + postgresql_partition_by='LIST (part_column)') -* ``WITHOUT OIDS``:: + .. versionadded:: 1.2.6 - Table("some_table", metadata, ..., postgresql_with_oids=False) +* ``TABLESPACE``:: -* ``INHERITS``:: + Table("some_table", metadata, ..., postgresql_tablespace='some_tablespace') - Table("some_table", metadata, ..., postgresql_inherits="some_supertable") + The above option is also available on the :class:`.Index` construct. - Table("some_table", metadata, ..., postgresql_inherits=("t1", "t2", ...)) +* ``USING``:: -* ``PARTITION BY``:: + Table("some_table", metadata, ..., postgresql_using='heap') - Table("some_table", metadata, ..., - postgresql_partition_by='LIST (part_column)') + .. versionadded:: 2.0.26 - .. versionadded:: 1.2.6 +* ``WITH OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=True) + +* ``WITHOUT OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=False) .. seealso:: @@ -2395,6 +2401,9 @@ def post_create_table(self, table): if pg_opts["partition_by"]: table_opts.append("\n PARTITION BY %s" % pg_opts["partition_by"]) + if pg_opts["using"]: + table_opts.append("\n USING %s" % pg_opts["using"]) + if pg_opts["with_oids"] is True: table_opts.append("\n WITH OIDS") elif pg_opts["with_oids"] is False: @@ -3006,6 +3015,7 @@ class PGDialect(default.DefaultDialect): "with_oids": None, "on_commit": None, "inherits": None, + "using": None, }, ), ( diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index 5851a86e6d6..f890b7ba9ce 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -582,6 +582,19 @@ def test_create_table_with_oncommit_option(self): "CREATE TABLE atable (id INTEGER) ON COMMIT DROP", ) + def test_create_table_with_using_option(self): + m = MetaData() + tbl = Table( + "atable", + m, + Column("id", Integer), + postgresql_using="heap", + ) + self.assert_compile( + schema.CreateTable(tbl), + "CREATE TABLE atable (id INTEGER) USING heap", + ) + def test_create_table_with_multiple_options(self): m = MetaData() tbl = Table( @@ -591,10 +604,11 @@ def test_create_table_with_multiple_options(self): postgresql_tablespace="sometablespace", postgresql_with_oids=False, postgresql_on_commit="preserve_rows", + postgresql_using="heap", ) self.assert_compile( schema.CreateTable(tbl), - "CREATE TABLE atable (id INTEGER) WITHOUT OIDS " + "CREATE TABLE atable (id INTEGER) USING heap WITHOUT OIDS " "ON COMMIT PRESERVE ROWS TABLESPACE sometablespace", ) From bc460b34c2f5a532fefb327ab790b31c1b6220bd Mon Sep 17 00:00:00 2001 From: Georg Wicke-Arndt Date: Mon, 22 Jan 2024 10:22:43 -0500 Subject: [PATCH 082/544] Parse NOT NULL for MySQL generated columns Fixed issue where NULL/NOT NULL would not be properly reflected from a MySQL column that also specified the VIRTUAL or STORED directives. Pull request courtesy Georg Wicke-Arndt. Fixes: #10850 Closes: #10851 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10851 Pull-request-sha: fb9a81020c393231ca90a1e88342b11cf64414a1 Change-Id: I9a80d0db722c15682e18f0390a7b58e5979e73a1 (cherry picked from commit 14f30b85c7bea7839111bbe54576b290457e3a8d) --- doc/build/changelog/unreleased_20/10850.rst | 7 + lib/sqlalchemy/dialects/mysql/reflection.py | 7 +- test/dialect/mysql/test_reflection.py | 215 ++++++++++++-------- test/requirements.py | 4 + 4 files changed, 149 insertions(+), 84 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10850.rst diff --git a/doc/build/changelog/unreleased_20/10850.rst b/doc/build/changelog/unreleased_20/10850.rst new file mode 100644 index 00000000000..6b6b323ce88 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10850.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, mysql + :tickets: 10850 + + Fixed issue where NULL/NOT NULL would not be properly reflected from a + MySQL column that also specified the VIRTUAL or STORED directives. Pull + request courtesy Georg Wicke-Arndt. diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py index 74c60f07b58..c764e8ccc7f 100644 --- a/lib/sqlalchemy/dialects/mysql/reflection.py +++ b/lib/sqlalchemy/dialects/mysql/reflection.py @@ -290,6 +290,9 @@ def _parse_column(self, line, state): # this can be "NULL" in the case of TIMESTAMP if spec.get("notnull", False) == "NOT NULL": col_kw["nullable"] = False + # For generated columns, the nullability is marked in a different place + if spec.get("notnull_generated", False) == "NOT NULL": + col_kw["nullable"] = False # AUTO_INCREMENT if spec.get("autoincr", False): @@ -452,7 +455,9 @@ def _prep_regexes(self): r"(?: +ON UPDATE [\-\w\.\(\)]+)?)" r"))?" r"(?: +(?:GENERATED ALWAYS)? ?AS +(?P\(" - r".*\))? ?(?PVIRTUAL|STORED)?)?" + r".*\))? ?(?PVIRTUAL|STORED)?" + r"(?: +(?P(?:NOT )?NULL))?" + r")?" r"(?: +(?PAUTO_INCREMENT))?" r"(?: +COMMENT +'(?P(?:''|[^'])*)')?" r"(?: +COLUMN_FORMAT +(?P\w+))?" diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py index f3d1f34599b..79e7198ef3d 100644 --- a/test/dialect/mysql/test_reflection.py +++ b/test/dialect/mysql/test_reflection.py @@ -764,103 +764,152 @@ def test_system_views(self): view_names = dialect.get_view_names(connection, "information_schema") self.assert_("TABLES" in view_names) - def test_nullable_reflection(self, metadata, connection): - """test reflection of NULL/NOT NULL, in particular with TIMESTAMP - defaults where MySQL is inconsistent in how it reports CREATE TABLE. - - """ - meta = metadata - - # this is ideally one table, but older MySQL versions choke - # on the multiple TIMESTAMP columns - row = connection.exec_driver_sql( - "show variables like '%%explicit_defaults_for_timestamp%%'" - ).first() - explicit_defaults_for_timestamp = row[1].lower() in ("on", "1", "true") - - reflected = [] - for idx, cols in enumerate( + @testing.combinations( + ( [ - [ - "x INTEGER NULL", - "y INTEGER NOT NULL", - "z INTEGER", - "q TIMESTAMP NULL", - ], - ["p TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP"], - ["r TIMESTAMP NOT NULL"], - ["s TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP"], - ["t TIMESTAMP"], - ["u TIMESTAMP DEFAULT CURRENT_TIMESTAMP"], - ] - ): - Table("nn_t%d" % idx, meta) # to allow DROP - - connection.exec_driver_sql( - """ - CREATE TABLE nn_t%d ( - %s - ) - """ - % (idx, ", \n".join(cols)) - ) - - reflected.extend( - { - "name": d["name"], - "nullable": d["nullable"], - "default": d["default"], - } - for d in inspect(connection).get_columns("nn_t%d" % idx) - ) - - if connection.dialect._is_mariadb_102: - current_timestamp = "current_timestamp()" - else: - current_timestamp = "CURRENT_TIMESTAMP" - - eq_( - reflected, + "x INTEGER NULL", + "y INTEGER NOT NULL", + "z INTEGER", + "q TIMESTAMP NULL", + ], [ {"name": "x", "nullable": True, "default": None}, {"name": "y", "nullable": False, "default": None}, {"name": "z", "nullable": True, "default": None}, {"name": "q", "nullable": True, "default": None}, - {"name": "p", "nullable": True, "default": current_timestamp}, + ], + ), + ( + ["p TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP"], + [ + { + "name": "p", + "nullable": True, + "default": "CURRENT_TIMESTAMP", + } + ], + ), + ( + ["r TIMESTAMP NOT NULL"], + [ { "name": "r", "nullable": False, - "default": None - if explicit_defaults_for_timestamp - else ( - "%(current_timestamp)s " - "ON UPDATE %(current_timestamp)s" - ) - % {"current_timestamp": current_timestamp}, - }, - {"name": "s", "nullable": False, "default": current_timestamp}, + "default": None, + "non_explicit_defaults_for_ts_default": ( + "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP" + ), + } + ], + ), + ( + ["s TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP"], + [ + { + "name": "s", + "nullable": False, + "default": "CURRENT_TIMESTAMP", + } + ], + ), + ( + ["t TIMESTAMP"], + [ { "name": "t", - "nullable": True - if explicit_defaults_for_timestamp - else False, - "default": None - if explicit_defaults_for_timestamp - else ( - "%(current_timestamp)s " - "ON UPDATE %(current_timestamp)s" - ) - % {"current_timestamp": current_timestamp}, - }, + "nullable": True, + "default": None, + "non_explicit_defaults_for_ts_nullable": False, + "non_explicit_defaults_for_ts_default": ( + "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP" + ), + } + ], + ), + ( + ["u TIMESTAMP DEFAULT CURRENT_TIMESTAMP"], + [ { "name": "u", - "nullable": True - if explicit_defaults_for_timestamp - else False, - "default": current_timestamp, - }, + "nullable": True, + "non_explicit_defaults_for_ts_nullable": False, + "default": "CURRENT_TIMESTAMP", + } ], - ) + ), + ( + ["v INTEGER GENERATED ALWAYS AS (4711) VIRTUAL NOT NULL"], + [ + { + "name": "v", + "nullable": False, + "default": None, + } + ], + testing.requires.mysql_notnull_generated_columns, + ), + argnames="ddl_columns,expected_reflected", + ) + def test_nullable_reflection( + self, metadata, connection, ddl_columns, expected_reflected + ): + """test reflection of NULL/NOT NULL, in particular with TIMESTAMP + defaults where MySQL is inconsistent in how it reports CREATE TABLE. + + """ + row = connection.exec_driver_sql( + "show variables like '%%explicit_defaults_for_timestamp%%'" + ).first() + explicit_defaults_for_timestamp = row[1].lower() in ("on", "1", "true") + + def get_expected_default(er): + if ( + not explicit_defaults_for_timestamp + and "non_explicit_defaults_for_ts_default" in er + ): + default = er["non_explicit_defaults_for_ts_default"] + else: + default = er["default"] + + if default is not None and connection.dialect._is_mariadb_102: + default = default.replace( + "CURRENT_TIMESTAMP", "current_timestamp()" + ) + + return default + + def get_expected_nullable(er): + if ( + not explicit_defaults_for_timestamp + and "non_explicit_defaults_for_ts_nullable" in er + ): + return er["non_explicit_defaults_for_ts_nullable"] + else: + return er["nullable"] + + expected_reflected = [ + { + "name": er["name"], + "nullable": get_expected_nullable(er), + "default": get_expected_default(er), + } + for er in expected_reflected + ] + + Table("nullable_refl", metadata) + + cols_ddl = ", \n".join(ddl_columns) + connection.exec_driver_sql(f"CREATE TABLE nullable_refl ({cols_ddl})") + + reflected = [ + { + "name": d["name"], + "nullable": d["nullable"], + "default": d["default"], + } + for d in inspect(connection).get_columns("nullable_refl") + ] + eq_(reflected, expected_reflected) def test_reflection_with_unique_constraint(self, metadata, connection): insp = inspect(connection) diff --git a/test/requirements.py b/test/requirements.py index a5a389441dd..8b137fe4675 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -1693,6 +1693,10 @@ def mysql_for_update(self): def mysql_fsp(self): return only_if(["mysql >= 5.6.4", "mariadb"]) + @property + def mysql_notnull_generated_columns(self): + return only_if(["mysql >= 5.7"]) + @property def mysql_fully_case_sensitive(self): return only_if(self._has_mysql_fully_case_sensitive) From 8b506451ac6bdf1fc257ff2069d2547260fd5dd6 Mon Sep 17 00:00:00 2001 From: Martijn Pieters Date: Tue, 16 Jan 2024 07:03:09 -0500 Subject: [PATCH 083/544] Correct type hint for FunctionElement.table_valued() ### Description The documentation and the type annotations for `TableValueType()` clearly state that both strings and column expression arguments are accepted but the annotation omits `str`, which is the most common use case. ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. Closes: #10886 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10886 Pull-request-sha: 624a97f051b378516518a30d88e7f216456d1c50 Change-Id: I2a1d2eb9b70815c33a27dd238ff2a9f11e5f5a64 (cherry picked from commit 48d3ad2d90308905709d886fb38dc1de2e2e2478) --- lib/sqlalchemy/sql/functions.py | 3 ++- test/typing/plain_files/sql/functions_again.py | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index 5cb5812d692..1ea68b87e60 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -68,6 +68,7 @@ from ._typing import _ByArgument from ._typing import _ColumnExpressionArgument from ._typing import _ColumnExpressionOrLiteralArgument + from ._typing import _ColumnExpressionOrStrLabelArgument from ._typing import _TypeEngineArgument from .base import _EntityNamespace from .elements import ClauseElement @@ -235,7 +236,7 @@ def scalar_table_valued( return ScalarFunctionColumn(self, name, type_) def table_valued( - self, *expr: _ColumnExpressionArgument[Any], **kw: Any + self, *expr: _ColumnExpressionOrStrLabelArgument[Any], **kw: Any ) -> TableValuedAlias: r"""Return a :class:`_sql.TableValuedAlias` representation of this :class:`_functions.FunctionElement` with table-valued expressions added. diff --git a/test/typing/plain_files/sql/functions_again.py b/test/typing/plain_files/sql/functions_again.py index da656f2d1d9..1919218f58d 100644 --- a/test/typing/plain_files/sql/functions_again.py +++ b/test/typing/plain_files/sql/functions_again.py @@ -54,3 +54,9 @@ class Foo(Base): ).group_by(Foo.a) # EXPECTED_TYPE: Select[Tuple[int, str]] reveal_type(stmt2) + + +# EXPECTED_TYPE: TableValuedAlias +reveal_type(func.json_each().table_valued("key", "value")) +# EXPECTED_TYPE: TableValuedAlias +reveal_type(func.json_each().table_valued(Foo.a, Foo.b)) From ba70a7d605c805ce9ea269873dd03f8b9aedfdd5 Mon Sep 17 00:00:00 2001 From: Gord Thompson Date: Tue, 23 Jan 2024 12:22:32 -0700 Subject: [PATCH 084/544] Update cascades.rst (#10918) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit "delete-cascade" → "delete-orphan" (cherry picked from commit d0bcf95cb022934d101aa94411f320c4e3bfb6aa) --- doc/build/orm/cascades.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/cascades.rst b/doc/build/orm/cascades.rst index 4c1e365ef7d..20f96001e33 100644 --- a/doc/build/orm/cascades.rst +++ b/doc/build/orm/cascades.rst @@ -301,7 +301,7 @@ The feature by default works completely independently of database-configured In order to integrate more efficiently with this configuration, additional directives described at :ref:`passive_deletes` should be used. -.. warning:: Note that the ORM's "delete" and "delete-cascade" behavior applies +.. warning:: Note that the ORM's "delete" and "delete-orphan" behavior applies **only** to the use of the :meth:`_orm.Session.delete` method to mark individual ORM instances for deletion within the :term:`unit of work` process. It does **not** apply to "bulk" deletes, which would be emitted using From 5d02b4152f61585da2c65e54ea6f33620183421b Mon Sep 17 00:00:00 2001 From: Jeff Balogh Date: Tue, 23 Jan 2024 13:25:10 -0600 Subject: [PATCH 085/544] fix a docs typo (#10912) The code following this declaration suggests the employee.type should be `sysadmin` (cherry picked from commit abadb149597e5891551b84e47d3085c3f1753ef2) --- doc/build/orm/inheritance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst index fe3e06bf0f0..574b4fc739a 100644 --- a/doc/build/orm/inheritance.rst +++ b/doc/build/orm/inheritance.rst @@ -638,7 +638,7 @@ using :paramref:`_orm.Mapper.polymorphic_abstract` as follows:: class SysAdmin(Technologist): """a systems administrator""" - __mapper_args__ = {"polymorphic_identity": "engineer"} + __mapper_args__ = {"polymorphic_identity": "sysadmin"} In the above example, the new classes ``Technologist`` and ``Executive`` are ordinary mapped classes, and also indicate new columns to be added to the From 20d7fb68c2a8871d86f868bd7867b987e57425fe Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 23 Jan 2024 14:06:01 -0500 Subject: [PATCH 086/544] suffix index names with "_history" just like tables Fixed regression in history_meta example where the use of :meth:`_schema.MetaData.to_metadata` to make a copy of the history table would also copy indexes (which is a good thing), but causing naming conflicts indexes regardless of naming scheme used for those indexes. A "_history" suffix is now added to these indexes in the same way as is achieved for the table name. Fixes: #10920 Change-Id: I78823650956ff979d500bedbdbce261048894ce9 (cherry picked from commit dab1da6049d210843c16d96b20ae0efc063eead3) --- doc/build/changelog/unreleased_20/10920.rst | 11 +++ examples/versioned_history/history_meta.py | 3 + examples/versioned_history/test_versioning.py | 97 ++++++++++++++++++- 3 files changed, 110 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/10920.rst diff --git a/doc/build/changelog/unreleased_20/10920.rst b/doc/build/changelog/unreleased_20/10920.rst new file mode 100644 index 00000000000..e7bc7b8acdb --- /dev/null +++ b/doc/build/changelog/unreleased_20/10920.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, examples + :tickets: 10920 + + Fixed regression in history_meta example where the use of + :meth:`_schema.MetaData.to_metadata` to make a copy of the history table + would also copy indexes (which is a good thing), but causing naming + conflicts indexes regardless of naming scheme used for those indexes. A + "_history" suffix is now added to these indexes in the same way as is + achieved for the table name. + diff --git a/examples/versioned_history/history_meta.py b/examples/versioned_history/history_meta.py index 806267cb414..3f26832b9ed 100644 --- a/examples/versioned_history/history_meta.py +++ b/examples/versioned_history/history_meta.py @@ -56,6 +56,9 @@ def _history_mapper(local_mapper): local_mapper.local_table.metadata, name=local_mapper.local_table.name + "_history", ) + for idx in history_table.indexes: + if idx.name is not None: + idx.name += "_history" for orig_c, history_c in zip( local_mapper.local_table.c, history_table.c diff --git a/examples/versioned_history/test_versioning.py b/examples/versioned_history/test_versioning.py index 7b9c82c60fa..ac122581a4f 100644 --- a/examples/versioned_history/test_versioning.py +++ b/examples/versioned_history/test_versioning.py @@ -8,11 +8,15 @@ from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import ForeignKey +from sqlalchemy import ForeignKeyConstraint +from sqlalchemy import Index from sqlalchemy import inspect from sqlalchemy import Integer from sqlalchemy import join from sqlalchemy import select from sqlalchemy import String +from sqlalchemy import testing +from sqlalchemy import UniqueConstraint from sqlalchemy.orm import clear_mappers from sqlalchemy.orm import column_property from sqlalchemy.orm import declarative_base @@ -31,7 +35,6 @@ from .history_meta import Versioned from .history_meta import versioned_session - warnings.simplefilter("error") @@ -127,6 +130,98 @@ class SomeClass(Versioned, self.Base, ComparableEntity): ], ) + @testing.variation( + "constraint_type", + [ + "index_single_col", + "composite_index", + "explicit_name_index", + "unique_constraint", + "unique_constraint_naming_conv", + "unique_constraint_explicit_name", + "fk_constraint", + "fk_constraint_naming_conv", + "fk_constraint_explicit_name", + ], + ) + def test_index_naming(self, constraint_type): + """test #10920""" + + if ( + constraint_type.unique_constraint_naming_conv + or constraint_type.fk_constraint_naming_conv + ): + self.Base.metadata.naming_convention = { + "ix": "ix_%(column_0_label)s", + "uq": "uq_%(table_name)s_%(column_0_name)s", + "fk": ( + "fk_%(table_name)s_%(column_0_name)s" + "_%(referred_table_name)s" + ), + } + + if ( + constraint_type.fk_constraint + or constraint_type.fk_constraint_naming_conv + or constraint_type.fk_constraint_explicit_name + ): + + class Related(self.Base): + __tablename__ = "related" + + id = Column(Integer, primary_key=True) + + class SomeClass(Versioned, self.Base): + __tablename__ = "sometable" + + id = Column(Integer, primary_key=True) + x = Column(Integer) + y = Column(Integer) + + # Index objects are copied and these have to have a new name + if constraint_type.index_single_col: + __table_args__ = ( + Index( + None, + x, + ), + ) + elif constraint_type.composite_index: + __table_args__ = (Index(None, x, y),) + elif constraint_type.explicit_name_index: + __table_args__ = (Index("my_index", x, y),) + # unique constraint objects are discarded. + elif ( + constraint_type.unique_constraint + or constraint_type.unique_constraint_naming_conv + ): + __table_args__ = (UniqueConstraint(x, y),) + elif constraint_type.unique_constraint_explicit_name: + __table_args__ = (UniqueConstraint(x, y, name="my_uq"),) + # foreign key constraint objects are copied and have the same + # name, but no database in Core has any problem with this as the + # names are local to the parent table. + elif ( + constraint_type.fk_constraint + or constraint_type.fk_constraint_naming_conv + ): + __table_args__ = (ForeignKeyConstraint([x], [Related.id]),) + elif constraint_type.fk_constraint_explicit_name: + __table_args__ = ( + ForeignKeyConstraint([x], [Related.id], name="my_fk"), + ) + else: + constraint_type.fail() + + eq_( + set(idx.name + "_history" for idx in SomeClass.__table__.indexes), + set( + idx.name + for idx in SomeClass.__history_mapper__.local_table.indexes + ), + ) + self.create_tables() + def test_discussion_9546(self): class ThingExternal(Versioned, self.Base): __tablename__ = "things_external" From e83f80d7154727fbc83e74993943c5e6b686661b Mon Sep 17 00:00:00 2001 From: Eugene Toder Date: Thu, 25 Jan 2024 17:19:44 -0500 Subject: [PATCH 087/544] Fix typo in a docstring (#10925) * Fix typo in a docstring It's "compiled_cache" not "query_cache". * Update async engine as well (cherry picked from commit e7cda85d81038cf390a15b93d5276754a8cc2514) --- lib/sqlalchemy/engine/base.py | 2 +- lib/sqlalchemy/ext/asyncio/engine.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index dcce3ed342b..4a5fd15e34a 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -2990,7 +2990,7 @@ def clear_compiled_cache(self) -> None: This applies **only** to the built-in cache that is established via the :paramref:`_engine.create_engine.query_cache_size` parameter. It will not impact any dictionary caches that were passed via the - :paramref:`.Connection.execution_options.query_cache` parameter. + :paramref:`.Connection.execution_options.compiled_cache` parameter. .. versionadded:: 1.4 diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 02b70ecd583..b5c8c7d76f7 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -1165,7 +1165,7 @@ def clear_compiled_cache(self) -> None: This applies **only** to the built-in cache that is established via the :paramref:`_engine.create_engine.query_cache_size` parameter. It will not impact any dictionary caches that were passed via the - :paramref:`.Connection.execution_options.query_cache` parameter. + :paramref:`.Connection.execution_options.compiled_cache` parameter. .. versionadded:: 1.4 From f5942427f394245c5c2e82ffe76ad7acce77dc7a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 26 Jan 2024 09:17:31 -0500 Subject: [PATCH 088/544] re-establish section on why __init__ not called on load this section got lost, leaving the FAQ to point to an empty document. Rewrite a new section introducing that __init__ is not called on load, illustrate strategies. I am not that happy with *where* this doc is, as this is supposed to be "mapping styles" high level introductory type stuff, but there's nowhere else for it. References: https://github.com/sqlalchemy/sqlalchemy/discussions/10923 Change-Id: Ie9260e4076bc82da0ef6dc11349a85beb0223a33 (cherry picked from commit f7b40c0102c33faf350917f5b98c61d4c6fbec90) --- doc/build/faq/sessions.rst | 2 +- doc/build/orm/mapping_styles.rst | 100 +++++++++++++++++++++++++++++++ lib/sqlalchemy/orm/events.py | 8 ++- 3 files changed, 107 insertions(+), 3 deletions(-) diff --git a/doc/build/faq/sessions.rst b/doc/build/faq/sessions.rst index a2c61c0a41d..a95580ef514 100644 --- a/doc/build/faq/sessions.rst +++ b/doc/build/faq/sessions.rst @@ -370,7 +370,7 @@ See :ref:`session_deleting_from_collections` for a description of this behavior. why isn't my ``__init__()`` called when I load objects? ------------------------------------------------------- -See :ref:`mapping_constructors` for a description of this behavior. +See :ref:`mapped_class_load_events` for a description of this behavior. how do I use ON DELETE CASCADE with SA's ORM? --------------------------------------------- diff --git a/doc/build/orm/mapping_styles.rst b/doc/build/orm/mapping_styles.rst index fbe4267be78..4e3e3183797 100644 --- a/doc/build/orm/mapping_styles.rst +++ b/doc/build/orm/mapping_styles.rst @@ -370,6 +370,13 @@ An object of type ``User`` above will have a constructor which allows Python dataclasses, and allows for a highly configurable constructor form. +.. warning:: + + The ``__init__()`` method of the class is called only when the object is + constructed in Python code, and **not when an object is loaded or refreshed + from the database**. See the next section :ref:`mapped_class_load_events` + for a primer on how to invoke special logic when objects are loaded. + A class that includes an explicit ``__init__()`` method will maintain that method, and no default constructor will be applied. @@ -404,6 +411,99 @@ will also feature the default constructor associated with the :class:`_orm.regis constructor when they are mapped via the :meth:`_orm.registry.map_imperatively` method. +.. _mapped_class_load_events: + +Maintaining Non-Mapped State Across Loads +------------------------------------------ + +The ``__init__()`` method of the mapped class is invoked when the object +is constructed directly in Python code:: + + u1 = User(name="some name", fullname="some fullname") + +However, when an object is loaded using the ORM :class:`_orm.Session`, +the ``__init__()`` method is **not** called:: + + u1 = session.scalars(select(User).where(User.name == "some name")).first() + +The reason for this is that when loaded from the database, the operation +used to construct the object, in the above example the ``User``, is more +analogous to **deserialization**, such as unpickling, rather than initial +construction. The majority of the object's important state is not being +assembled for the first time, it's being re-loaded from database rows. + +Therefore to maintain state within the object that is not part of the data +that's stored to the database, such that this state is present when objects +are loaded as well as constructed, there are two general approaches detailed +below. + +1. Use Python descriptors like ``@property``, rather than state, to dynamically + compute attributes as needed. + + For simple attributes, this is the simplest approach and the least error prone. + For example if an object ``Point`` with ``Point.x`` and ``Point.y`` wanted + an attribute with the sum of these attributes:: + + class Point(Base): + __tablename__ = "point" + id: Mapped[int] = mapped_column(primary_key=True) + x: Mapped[int] + y: Mapped[int] + + @property + def x_plus_y(self): + return self.x + self.y + + An advantage of using dynamic descriptors is that the value is computed + every time, meaning it maintains the correct value as the underlying + attributes (``x`` and ``y`` in this case) might change. + + Other forms of the above pattern include Python standard library + :ref:`cached_property ` + decorator (which is cached, and not re-computed each time), as well as SQLAlchemy's :class:`.hybrid_property` decorator which + allows for attributes that can work for SQL querying as well. + + +2. Establish state on-load using :meth:`.InstanceEvents.load`, and optionally + supplemental methods :meth:`.InstanceEvents.refresh` and :meth:`.InstanceEvents.refresh_flush`. + + These are event hooks that are invoked whenever the object is loaded + from the database, or when it is refreshed after being expired. Typically + only the :meth:`.InstanceEvents.load` is needed, since non-mapped local object + state is not affected by expiration operations. To revise the ``Point`` + example above looks like:: + + from sqlalchemy import event + + + class Point(Base): + __tablename__ = "point" + id: Mapped[int] = mapped_column(primary_key=True) + x: Mapped[int] + y: Mapped[int] + + def __init__(self, x, y, **kw): + super().__init__(x=x, y=y, **kw) + self.x_plus_y = x + y + + + @event.listens_for(Point, "load") + def receive_load(target, context): + target.x_plus_y = target.x + target.y + + If using the refresh events as well, the event hooks can be stacked on + top of one callable if needed, as:: + + @event.listens_for(Point, "load") + @event.listens_for(Point, "refresh") + @event.listens_for(Point, "refresh_flush") + def receive_load(target, context, attrs=None): + target.x_plus_y = target.x + target.y + + Above, the ``attrs`` attribute will be present for the ``refresh`` and + ``refresh_flush`` events and indicate a list of attribute names that are + being refreshed. + .. _orm_mapper_inspection: Runtime Introspection of Mapped classes, Instances and Mappers diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index f0ba693f443..b5c4f94a72a 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -494,14 +494,14 @@ def on_load(instance, context): .. seealso:: + :ref:`mapped_class_load_events` + :meth:`.InstanceEvents.init` :meth:`.InstanceEvents.refresh` :meth:`.SessionEvents.loaded_as_persistent` - :ref:`mapping_constructors` - """ def refresh( @@ -534,6 +534,8 @@ def refresh( .. seealso:: + :ref:`mapped_class_load_events` + :meth:`.InstanceEvents.load` """ @@ -577,6 +579,8 @@ def refresh_flush( .. seealso:: + :ref:`mapped_class_load_events` + :ref:`orm_server_defaults` :ref:`metadata_defaults_toplevel` From 50d0ac28fbdf98c57118caa868f0e51914791567 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 29 Jan 2024 20:09:56 +0100 Subject: [PATCH 089/544] Export array module from postgresql Before this module was shadowed by same named array classe. Change-Id: I6fc56795c9363a9a07466fd36fcd49d0fb9658f7 (cherry picked from commit 47716f5a45eb91361a5fdabb420144a1807ca8ae) --- lib/sqlalchemy/dialects/postgresql/__init__.py | 2 ++ lib/sqlalchemy/dialects/postgresql/base.py | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index f85c1e990da..8dfa54d3aca 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -8,6 +8,7 @@ from types import ModuleType +from . import array as arraylib # noqa # must be above base and other dialects from . import asyncpg # noqa from . import base from . import pg8000 # noqa @@ -86,6 +87,7 @@ from .types import TSQUERY from .types import TSVECTOR + # Alias psycopg also as psycopg_async psycopg_async = type( "psycopg_async", (ModuleType,), {"dialect": psycopg.dialect_async} diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 8ce6230e045..91d0bc417f7 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1418,13 +1418,13 @@ def update(): from typing import TYPE_CHECKING from typing import Union -from . import array as _array -from . import hstore as _hstore +from . import arraylib as _array from . import json as _json from . import pg_catalog from . import ranges as _ranges from .ext import _regconfig_fn from .ext import aggregate_order_by +from .hstore import HSTORE from .named_types import CreateDomainType as CreateDomainType # noqa: F401 from .named_types import CreateEnumType as CreateEnumType # noqa: F401 from .named_types import DOMAIN as DOMAIN # noqa: F401 @@ -1614,7 +1614,7 @@ def update(): ischema_names = { "_array": _array.ARRAY, - "hstore": _hstore.HSTORE, + "hstore": HSTORE, "json": _json.JSON, "jsonb": _json.JSONB, "int4range": _ranges.INT4RANGE, From 573d004e5f210a199d8b25335c71f973fee21a4b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 31 Jan 2024 23:20:26 +0100 Subject: [PATCH 090/544] Update black to 24.1.1 Change-Id: Iadaea7b798d8e99302e1acb430dc7b758ca61137 --- .pre-commit-config.yaml | 2 +- doc/build/changelog/migration_05.rst | 6 +- doc/build/changelog/migration_08.rst | 3 +- doc/build/changelog/migration_14.rst | 6 +- doc/build/core/connections.rst | 6 +- doc/build/errors.rst | 6 +- doc/build/orm/basic_relationships.rst | 6 +- doc/build/orm/collection_api.rst | 3 +- doc/build/orm/extensions/mypy.rst | 3 +- doc/build/orm/inheritance.rst | 9 +- doc/build/orm/persistence_techniques.rst | 12 +- examples/asyncio/async_orm.py | 1 + examples/asyncio/async_orm_writeonly.py | 1 + examples/asyncio/basic.py | 1 - .../custom_attributes/custom_management.py | 1 + examples/dogpile_caching/caching_query.py | 2 +- examples/dogpile_caching/environment.py | 1 + examples/dogpile_caching/fixture_data.py | 1 + examples/dogpile_caching/model.py | 1 + .../dogpile_caching/relationship_caching.py | 1 + .../discriminator_on_association.py | 1 + examples/generic_associations/generic_fk.py | 1 + .../table_per_association.py | 1 + .../generic_associations/table_per_related.py | 1 + examples/inheritance/concrete.py | 1 + examples/inheritance/joined.py | 1 + examples/inheritance/single.py | 1 + .../materialized_paths/materialized_paths.py | 1 + examples/performance/__init__.py | 1 + examples/performance/bulk_updates.py | 1 + examples/performance/large_resultsets.py | 1 + examples/performance/short_selects.py | 1 + examples/performance/single_inserts.py | 1 + examples/sharding/asyncio.py | 1 + examples/sharding/separate_databases.py | 1 + .../sharding/separate_schema_translates.py | 1 + examples/sharding/separate_tables.py | 1 + examples/versioned_rows/versioned_rows.py | 1 + .../versioned_rows_w_versionid.py | 1 + lib/sqlalchemy/connectors/pyodbc.py | 8 +- lib/sqlalchemy/dialects/mssql/base.py | 20 +- .../dialects/mssql/information_schema.py | 1 + lib/sqlalchemy/dialects/mssql/pyodbc.py | 1 - lib/sqlalchemy/dialects/mysql/base.py | 36 +- lib/sqlalchemy/dialects/oracle/base.py | 40 +- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 6 +- lib/sqlalchemy/dialects/postgresql/array.py | 14 +- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 6 +- lib/sqlalchemy/dialects/postgresql/base.py | 66 +-- lib/sqlalchemy/dialects/postgresql/dml.py | 6 +- .../dialects/postgresql/named_types.py | 1 - lib/sqlalchemy/dialects/postgresql/ranges.py | 8 +- lib/sqlalchemy/dialects/postgresql/types.py | 20 +- lib/sqlalchemy/dialects/sqlite/base.py | 6 +- lib/sqlalchemy/dialects/sqlite/dml.py | 6 +- lib/sqlalchemy/engine/base.py | 60 ++- lib/sqlalchemy/engine/create.py | 12 +- lib/sqlalchemy/engine/cursor.py | 20 +- lib/sqlalchemy/engine/default.py | 48 +- lib/sqlalchemy/engine/interfaces.py | 53 +-- lib/sqlalchemy/engine/result.py | 68 +-- lib/sqlalchemy/engine/row.py | 12 +- lib/sqlalchemy/engine/url.py | 6 +- lib/sqlalchemy/event/attr.py | 6 +- lib/sqlalchemy/event/base.py | 15 +- lib/sqlalchemy/event/legacy.py | 12 +- lib/sqlalchemy/event/registry.py | 6 +- lib/sqlalchemy/exc.py | 9 +- lib/sqlalchemy/ext/associationproxy.py | 109 ++--- lib/sqlalchemy/ext/asyncio/base.py | 12 +- lib/sqlalchemy/ext/asyncio/engine.py | 48 +- lib/sqlalchemy/ext/asyncio/result.py | 53 +-- lib/sqlalchemy/ext/asyncio/scoping.py | 33 +- lib/sqlalchemy/ext/asyncio/session.py | 39 +- lib/sqlalchemy/ext/automap.py | 26 +- lib/sqlalchemy/ext/horizontal_shard.py | 6 +- lib/sqlalchemy/ext/hybrid.py | 54 +-- lib/sqlalchemy/ext/instrumentation.py | 28 +- lib/sqlalchemy/ext/mutable.py | 18 +- lib/sqlalchemy/ext/mypy/apply.py | 14 +- lib/sqlalchemy/ext/mypy/decl_class.py | 6 +- lib/sqlalchemy/ext/mypy/util.py | 14 +- lib/sqlalchemy/inspection.py | 23 +- lib/sqlalchemy/log.py | 6 +- lib/sqlalchemy/orm/_orm_constructors.py | 24 +- lib/sqlalchemy/orm/_typing.py | 34 +- lib/sqlalchemy/orm/attributes.py | 29 +- lib/sqlalchemy/orm/base.py | 91 ++-- lib/sqlalchemy/orm/bulk_persistence.py | 52 +-- lib/sqlalchemy/orm/clsregistry.py | 10 +- lib/sqlalchemy/orm/collections.py | 25 +- lib/sqlalchemy/orm/context.py | 64 +-- lib/sqlalchemy/orm/decl_api.py | 60 +-- lib/sqlalchemy/orm/decl_base.py | 21 +- lib/sqlalchemy/orm/dependency.py | 8 +- lib/sqlalchemy/orm/descriptor_props.py | 12 +- lib/sqlalchemy/orm/dynamic.py | 3 +- lib/sqlalchemy/orm/events.py | 6 +- lib/sqlalchemy/orm/instrumentation.py | 6 +- lib/sqlalchemy/orm/interfaces.py | 7 +- lib/sqlalchemy/orm/loading.py | 28 +- lib/sqlalchemy/orm/mapped_collection.py | 4 +- lib/sqlalchemy/orm/mapper.py | 22 +- lib/sqlalchemy/orm/path_registry.py | 33 +- lib/sqlalchemy/orm/persistence.py | 44 +- lib/sqlalchemy/orm/properties.py | 11 +- lib/sqlalchemy/orm/query.py | 67 +-- lib/sqlalchemy/orm/relationships.py | 32 +- lib/sqlalchemy/orm/scoping.py | 54 +-- lib/sqlalchemy/orm/session.py | 94 ++-- lib/sqlalchemy/orm/state.py | 6 +- lib/sqlalchemy/orm/strategies.py | 24 +- lib/sqlalchemy/orm/strategy_options.py | 20 +- lib/sqlalchemy/orm/util.py | 34 +- lib/sqlalchemy/orm/writeonly.py | 9 +- lib/sqlalchemy/pool/base.py | 30 +- lib/sqlalchemy/pool/impl.py | 17 +- lib/sqlalchemy/sql/_elements_constructors.py | 6 +- .../sql/_selectable_constructors.py | 37 +- lib/sqlalchemy/sql/_typing.py | 78 ++-- lib/sqlalchemy/sql/annotation.py | 47 +- lib/sqlalchemy/sql/base.py | 51 +-- lib/sqlalchemy/sql/cache_key.py | 77 ++-- lib/sqlalchemy/sql/coercions.py | 65 ++- lib/sqlalchemy/sql/compiler.py | 200 +++++---- lib/sqlalchemy/sql/crud.py | 38 +- lib/sqlalchemy/sql/ddl.py | 13 +- lib/sqlalchemy/sql/default_comparator.py | 16 +- lib/sqlalchemy/sql/dml.py | 162 +++---- lib/sqlalchemy/sql/elements.py | 413 +++++++----------- lib/sqlalchemy/sql/functions.py | 178 +++----- lib/sqlalchemy/sql/lambdas.py | 27 +- lib/sqlalchemy/sql/operators.py | 89 ++-- lib/sqlalchemy/sql/roles.py | 6 +- lib/sqlalchemy/sql/schema.py | 119 +++-- lib/sqlalchemy/sql/selectable.py | 139 +++--- lib/sqlalchemy/sql/sqltypes.py | 113 ++--- lib/sqlalchemy/sql/traversals.py | 25 +- lib/sqlalchemy/sql/type_api.py | 70 ++- lib/sqlalchemy/sql/util.py | 45 +- lib/sqlalchemy/sql/visitors.py | 72 ++- lib/sqlalchemy/testing/assertsql.py | 6 +- lib/sqlalchemy/testing/config.py | 19 +- lib/sqlalchemy/testing/engines.py | 6 +- lib/sqlalchemy/testing/exclusions.py | 12 +- lib/sqlalchemy/testing/fixtures/mypy.py | 16 +- lib/sqlalchemy/testing/plugin/pytestplugin.py | 6 +- lib/sqlalchemy/testing/suite/test_insert.py | 16 +- .../testing/suite/test_reflection.py | 6 +- .../testing/suite/test_update_delete.py | 16 +- lib/sqlalchemy/util/_collections.py | 18 +- lib/sqlalchemy/util/_concurrency_py3k.py | 9 +- lib/sqlalchemy/util/_py_collections.py | 6 +- lib/sqlalchemy/util/langhelpers.py | 24 +- lib/sqlalchemy/util/queue.py | 6 +- lib/sqlalchemy/util/typing.py | 51 +-- setup.cfg | 2 +- test/aaa_profiling/test_orm.py | 1 - test/dialect/mssql/test_compiler.py | 40 +- test/dialect/mssql/test_reflection.py | 11 +- test/dialect/mysql/test_compiler.py | 1 - test/dialect/mysql/test_for_update.py | 1 + test/dialect/postgresql/test_compiler.py | 2 - test/dialect/postgresql/test_dialect.py | 12 +- test/dialect/postgresql/test_query.py | 1 - test/dialect/postgresql/test_types.py | 36 +- test/dialect/test_sqlite.py | 4 +- test/engine/test_execute.py | 12 +- test/engine/test_reconnect.py | 6 +- test/ext/declarative/test_inheritance.py | 35 +- .../mypy/plugin_files/mapped_attr_assign.py | 1 + test/ext/mypy/plugin_files/typing_err3.py | 1 + test/ext/test_associationproxy.py | 22 +- test/ext/test_automap.py | 13 +- test/ext/test_compiler.py | 8 +- test/ext/test_extendedattr.py | 1 - test/orm/declarative/test_abs_import_only.py | 6 +- test/orm/declarative/test_dc_transforms.py | 6 +- test/orm/declarative/test_inheritance.py | 3 - test/orm/declarative/test_mixin.py | 2 - .../test_tm_future_annotations_sync.py | 48 +- test/orm/declarative/test_typed_mapping.py | 48 +- test/orm/inheritance/test_assorted_poly.py | 6 +- test/orm/inheritance/test_basic.py | 2 +- test/orm/inheritance/test_relationship.py | 8 +- test/orm/inheritance/test_single.py | 16 +- test/orm/test_assorted_eager.py | 1 + test/orm/test_composites.py | 34 +- test/orm/test_cycles.py | 1 + test/orm/test_deprecations.py | 13 +- test/orm/test_dynamic.py | 24 +- test/orm/test_eager_relations.py | 4 - test/orm/test_events.py | 15 +- test/orm/test_hasparent.py | 1 + test/orm/test_lazy_relations.py | 13 +- test/orm/test_mapper.py | 1 - test/orm/test_merge.py | 4 +- test/orm/test_options.py | 24 +- test/orm/test_relationship_criteria.py | 48 +- test/orm/test_relationships.py | 23 - test/orm/test_selectable.py | 1 + test/orm/test_transaction.py | 15 +- test/orm/test_unitofwork.py | 6 +- test/orm/test_unitofworkv2.py | 2 - test/perf/many_table_reflection.py | 6 +- test/sql/test_compiler.py | 13 +- test/sql/test_cte.py | 2 +- test/sql/test_defaults.py | 1 - test/sql/test_external_traversal.py | 5 - test/sql/test_insert_exec.py | 9 +- test/sql/test_lambdas.py | 22 +- test/sql/test_metadata.py | 10 +- test/sql/test_operators.py | 57 ++- test/sql/test_query.py | 2 - test/sql/test_quote.py | 1 - test/sql/test_resultset.py | 14 +- test/sql/test_returning.py | 9 +- test/sql/test_selectable.py | 2 +- test/sql/test_text.py | 1 - test/sql/test_types.py | 18 +- .../ext/asyncio/async_sessionmaker.py | 1 + test/typing/plain_files/orm/issue_9340.py | 3 +- .../plain_files/orm/mapped_covariant.py | 3 +- test/typing/plain_files/orm/relationship.py | 1 + .../orm/trad_relationship_uselist.py | 1 + .../orm/traditional_relationship.py | 1 + .../plain_files/sql/common_sql_element.py | 1 - tox.ini | 2 +- 228 files changed, 2307 insertions(+), 2899 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f169100aa60..d523c0499af 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/python/black - rev: 23.3.0 + rev: 24.1.1 hooks: - id: black diff --git a/doc/build/changelog/migration_05.rst b/doc/build/changelog/migration_05.rst index d26a22c0d00..8b48f13f6b4 100644 --- a/doc/build/changelog/migration_05.rst +++ b/doc/build/changelog/migration_05.rst @@ -443,8 +443,7 @@ Schema/Types :: - class MyType(AdaptOldConvertMethods, TypeEngine): - ... + class MyType(AdaptOldConvertMethods, TypeEngine): ... * The ``quote`` flag on ``Column`` and ``Table`` as well as the ``quote_schema`` flag on ``Table`` now control quoting @@ -589,8 +588,7 @@ Removed :: class MyQuery(Query): - def get(self, ident): - ... + def get(self, ident): ... session = sessionmaker(query_cls=MyQuery)() diff --git a/doc/build/changelog/migration_08.rst b/doc/build/changelog/migration_08.rst index 0f661cca790..7b42aae4744 100644 --- a/doc/build/changelog/migration_08.rst +++ b/doc/build/changelog/migration_08.rst @@ -1394,8 +1394,7 @@ yet, we'll be adding the ``inspector`` argument into it directly:: @event.listens_for(Table, "column_reflect") - def listen_for_col(inspector, table, column_info): - ... + def listen_for_col(inspector, table, column_info): ... :ticket:`2418` diff --git a/doc/build/changelog/migration_14.rst b/doc/build/changelog/migration_14.rst index ae93003ae65..aef07864d60 100644 --- a/doc/build/changelog/migration_14.rst +++ b/doc/build/changelog/migration_14.rst @@ -552,8 +552,7 @@ SQLAlchemy has for a long time used a parameter-injecting decorator to help reso mutually-dependent module imports, like this:: @util.dependency_for("sqlalchemy.sql.dml") - def insert(self, dml, *args, **kw): - ... + def insert(self, dml, *args, **kw): ... Where the above function would be rewritten to no longer have the ``dml`` parameter on the outside. This would confuse code-linting tools into seeing a missing parameter @@ -2274,8 +2273,7 @@ in any way:: addresses = relationship(Address, backref=backref("user", viewonly=True)) - class Address(Base): - ... + class Address(Base): ... u1 = session.query(User).filter_by(name="x").first() diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst index 994daa8f541..1de53fdc85a 100644 --- a/doc/build/core/connections.rst +++ b/doc/build/core/connections.rst @@ -1490,10 +1490,8 @@ Basic guidelines include: def my_stmt(parameter, thing=False): stmt = lambda_stmt(lambda: select(table)) - stmt += ( - lambda s: s.where(table.c.x > parameter) - if thing - else s.where(table.c.y == parameter) + stmt += lambda s: ( + s.where(table.c.x > parameter) if thing else s.where(table.c.y == parameter) ) return stmt diff --git a/doc/build/errors.rst b/doc/build/errors.rst index 48fdedeace0..55ac40ae5f6 100644 --- a/doc/build/errors.rst +++ b/doc/build/errors.rst @@ -1777,8 +1777,7 @@ and associating the :class:`_engine.Engine` with the Base = declarative_base(metadata=metadata_obj) - class MyClass(Base): - ... + class MyClass(Base): ... session = Session() @@ -1796,8 +1795,7 @@ engine:: Base = declarative_base() - class MyClass(Base): - ... + class MyClass(Base): ... session = Session() diff --git a/doc/build/orm/basic_relationships.rst b/doc/build/orm/basic_relationships.rst index 7e3ce5ec551..0860f69fcf5 100644 --- a/doc/build/orm/basic_relationships.rst +++ b/doc/build/orm/basic_relationships.rst @@ -1116,15 +1116,13 @@ class were available, we could also apply it afterwards:: # we create a Parent class which knows nothing about Child - class Parent(Base): - ... + class Parent(Base): ... # ... later, in Module B, which is imported after module A: - class Child(Base): - ... + class Child(Base): ... from module_a import Parent diff --git a/doc/build/orm/collection_api.rst b/doc/build/orm/collection_api.rst index 2d56bb9b2b0..3d05981a819 100644 --- a/doc/build/orm/collection_api.rst +++ b/doc/build/orm/collection_api.rst @@ -533,8 +533,7 @@ methods can be changed as well: ... @collection.iterator - def hey_use_this_instead_for_iteration(self): - ... + def hey_use_this_instead_for_iteration(self): ... There is no requirement to be "list-like" or "set-like" at all. Collection classes can be any shape, so long as they have the append, remove and iterate diff --git a/doc/build/orm/extensions/mypy.rst b/doc/build/orm/extensions/mypy.rst index 042af370914..8275e94866b 100644 --- a/doc/build/orm/extensions/mypy.rst +++ b/doc/build/orm/extensions/mypy.rst @@ -179,8 +179,7 @@ following:: ) name: Mapped[Optional[str]] = Mapped._special_method(Column(String)) - def __init__(self, id: Optional[int] = ..., name: Optional[str] = ...) -> None: - ... + def __init__(self, id: Optional[int] = ..., name: Optional[str] = ...) -> None: ... some_user = User(id=5, name="user") diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst index 574b4fc739a..3764270d8c4 100644 --- a/doc/build/orm/inheritance.rst +++ b/doc/build/orm/inheritance.rst @@ -203,12 +203,10 @@ and ``Employee``:: } - class Manager(Employee): - ... + class Manager(Employee): ... - class Engineer(Employee): - ... + class Engineer(Employee): ... If the foreign key constraint is on a table corresponding to a subclass, the relationship should target that subclass instead. In the example @@ -248,8 +246,7 @@ established between the ``Manager`` and ``Company`` classes:: } - class Engineer(Employee): - ... + class Engineer(Employee): ... Above, the ``Manager`` class will have a ``Manager.company`` attribute; ``Company`` will have a ``Company.managers`` attribute that always diff --git a/doc/build/orm/persistence_techniques.rst b/doc/build/orm/persistence_techniques.rst index 982f27ebdc6..69fad33b22a 100644 --- a/doc/build/orm/persistence_techniques.rst +++ b/doc/build/orm/persistence_techniques.rst @@ -713,20 +713,16 @@ connections:: pass - class User(BaseA): - ... + class User(BaseA): ... - class Address(BaseA): - ... + class Address(BaseA): ... - class GameInfo(BaseB): - ... + class GameInfo(BaseB): ... - class GameStats(BaseB): - ... + class GameStats(BaseB): ... Session = sessionmaker() diff --git a/examples/asyncio/async_orm.py b/examples/asyncio/async_orm.py index 592323be429..daf810c65d2 100644 --- a/examples/asyncio/async_orm.py +++ b/examples/asyncio/async_orm.py @@ -2,6 +2,7 @@ for asynchronous ORM use. """ + from __future__ import annotations import asyncio diff --git a/examples/asyncio/async_orm_writeonly.py b/examples/asyncio/async_orm_writeonly.py index 263c0d29198..8ddc0ecdb23 100644 --- a/examples/asyncio/async_orm_writeonly.py +++ b/examples/asyncio/async_orm_writeonly.py @@ -2,6 +2,7 @@ of ORM collections under asyncio. """ + from __future__ import annotations import asyncio diff --git a/examples/asyncio/basic.py b/examples/asyncio/basic.py index 6cfa9ed0144..5994fc765e7 100644 --- a/examples/asyncio/basic.py +++ b/examples/asyncio/basic.py @@ -6,7 +6,6 @@ """ - import asyncio from sqlalchemy import Column diff --git a/examples/custom_attributes/custom_management.py b/examples/custom_attributes/custom_management.py index aa9ea7a6899..da22ee3276c 100644 --- a/examples/custom_attributes/custom_management.py +++ b/examples/custom_attributes/custom_management.py @@ -9,6 +9,7 @@ """ + from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import ForeignKey diff --git a/examples/dogpile_caching/caching_query.py b/examples/dogpile_caching/caching_query.py index b1848631565..8c85d74811c 100644 --- a/examples/dogpile_caching/caching_query.py +++ b/examples/dogpile_caching/caching_query.py @@ -19,6 +19,7 @@ dogpile.cache constructs. """ + from dogpile.cache.api import NO_VALUE from sqlalchemy import event @@ -28,7 +29,6 @@ class ORMCache: - """An add-on for an ORM :class:`.Session` optionally loads full results from a dogpile cache region. diff --git a/examples/dogpile_caching/environment.py b/examples/dogpile_caching/environment.py index 4b5a317917b..4962826280a 100644 --- a/examples/dogpile_caching/environment.py +++ b/examples/dogpile_caching/environment.py @@ -2,6 +2,7 @@ bootstrap fixture data if necessary. """ + from hashlib import md5 import os diff --git a/examples/dogpile_caching/fixture_data.py b/examples/dogpile_caching/fixture_data.py index 8387a2cb275..775fb63b1a8 100644 --- a/examples/dogpile_caching/fixture_data.py +++ b/examples/dogpile_caching/fixture_data.py @@ -3,6 +3,7 @@ with a randomly selected postal code. """ + import random from .environment import Base diff --git a/examples/dogpile_caching/model.py b/examples/dogpile_caching/model.py index cae2ae27762..926a5fa5d68 100644 --- a/examples/dogpile_caching/model.py +++ b/examples/dogpile_caching/model.py @@ -7,6 +7,7 @@ City --(has a)--> Country """ + from sqlalchemy import Column from sqlalchemy import ForeignKey from sqlalchemy import Integer diff --git a/examples/dogpile_caching/relationship_caching.py b/examples/dogpile_caching/relationship_caching.py index 058d5522259..a5b654b06c8 100644 --- a/examples/dogpile_caching/relationship_caching.py +++ b/examples/dogpile_caching/relationship_caching.py @@ -6,6 +6,7 @@ term cache. """ + import os from sqlalchemy import select diff --git a/examples/generic_associations/discriminator_on_association.py b/examples/generic_associations/discriminator_on_association.py index f0f1d7ed99c..93c1b29ef98 100644 --- a/examples/generic_associations/discriminator_on_association.py +++ b/examples/generic_associations/discriminator_on_association.py @@ -15,6 +15,7 @@ objects, but is also slightly more complex. """ + from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import ForeignKey diff --git a/examples/generic_associations/generic_fk.py b/examples/generic_associations/generic_fk.py index 5c70f93aac5..d45166d333f 100644 --- a/examples/generic_associations/generic_fk.py +++ b/examples/generic_associations/generic_fk.py @@ -17,6 +17,7 @@ or "table_per_association" instead of this approach. """ + from sqlalchemy import and_ from sqlalchemy import Column from sqlalchemy import create_engine diff --git a/examples/generic_associations/table_per_association.py b/examples/generic_associations/table_per_association.py index 2e412869f08..04786bd49be 100644 --- a/examples/generic_associations/table_per_association.py +++ b/examples/generic_associations/table_per_association.py @@ -11,6 +11,7 @@ """ + from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import ForeignKey diff --git a/examples/generic_associations/table_per_related.py b/examples/generic_associations/table_per_related.py index 5b83e6e68f3..23c75b0b9d6 100644 --- a/examples/generic_associations/table_per_related.py +++ b/examples/generic_associations/table_per_related.py @@ -16,6 +16,7 @@ is completely automated. """ + from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import ForeignKey diff --git a/examples/inheritance/concrete.py b/examples/inheritance/concrete.py index f7f6b3ac641..e718e2fc350 100644 --- a/examples/inheritance/concrete.py +++ b/examples/inheritance/concrete.py @@ -1,4 +1,5 @@ """Concrete-table (table-per-class) inheritance example.""" + from __future__ import annotations from typing import Annotated diff --git a/examples/inheritance/joined.py b/examples/inheritance/joined.py index 7dee935fab2..c2ba6942cc8 100644 --- a/examples/inheritance/joined.py +++ b/examples/inheritance/joined.py @@ -1,4 +1,5 @@ """Joined-table (table-per-subclass) inheritance example.""" + from __future__ import annotations from typing import Annotated diff --git a/examples/inheritance/single.py b/examples/inheritance/single.py index 8da75dd7c45..6337bb4b2e4 100644 --- a/examples/inheritance/single.py +++ b/examples/inheritance/single.py @@ -1,4 +1,5 @@ """Single-table (table-per-hierarchy) inheritance example.""" + from __future__ import annotations from typing import Annotated diff --git a/examples/materialized_paths/materialized_paths.py b/examples/materialized_paths/materialized_paths.py index f458270c726..19d3ed491c1 100644 --- a/examples/materialized_paths/materialized_paths.py +++ b/examples/materialized_paths/materialized_paths.py @@ -26,6 +26,7 @@ descendants and changing the prefix. """ + from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import func diff --git a/examples/performance/__init__.py b/examples/performance/__init__.py index 7e24b9b8fdd..34db251e5c7 100644 --- a/examples/performance/__init__.py +++ b/examples/performance/__init__.py @@ -205,6 +205,7 @@ def test_subqueryload(n): """ # noqa + import argparse import cProfile import gc diff --git a/examples/performance/bulk_updates.py b/examples/performance/bulk_updates.py index 8b782353df0..de5e6dc27da 100644 --- a/examples/performance/bulk_updates.py +++ b/examples/performance/bulk_updates.py @@ -3,6 +3,7 @@ """ + from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import Identity diff --git a/examples/performance/large_resultsets.py b/examples/performance/large_resultsets.py index b93459150e5..36171411276 100644 --- a/examples/performance/large_resultsets.py +++ b/examples/performance/large_resultsets.py @@ -13,6 +13,7 @@ provide a huge amount of functionality. """ + from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import Identity diff --git a/examples/performance/short_selects.py b/examples/performance/short_selects.py index 553c2fed5f0..bc6a9c79ac4 100644 --- a/examples/performance/short_selects.py +++ b/examples/performance/short_selects.py @@ -3,6 +3,7 @@ """ + import random from sqlalchemy import bindparam diff --git a/examples/performance/single_inserts.py b/examples/performance/single_inserts.py index 904fda2d039..4b8132c50af 100644 --- a/examples/performance/single_inserts.py +++ b/examples/performance/single_inserts.py @@ -4,6 +4,7 @@ a database connection, inserts the row, commits and closes. """ + from sqlalchemy import bindparam from sqlalchemy import Column from sqlalchemy import create_engine diff --git a/examples/sharding/asyncio.py b/examples/sharding/asyncio.py index 4b32034c9f1..a63b0fcaaae 100644 --- a/examples/sharding/asyncio.py +++ b/examples/sharding/asyncio.py @@ -8,6 +8,7 @@ the routine that generates new primary keys. """ + from __future__ import annotations import asyncio diff --git a/examples/sharding/separate_databases.py b/examples/sharding/separate_databases.py index f836aaec00a..9a700734c51 100644 --- a/examples/sharding/separate_databases.py +++ b/examples/sharding/separate_databases.py @@ -1,4 +1,5 @@ """Illustrates sharding using distinct SQLite databases.""" + from __future__ import annotations import datetime diff --git a/examples/sharding/separate_schema_translates.py b/examples/sharding/separate_schema_translates.py index 095ae1cc698..fd754356e5d 100644 --- a/examples/sharding/separate_schema_translates.py +++ b/examples/sharding/separate_schema_translates.py @@ -4,6 +4,7 @@ In this example we will set a "shard id" at all times. """ + from __future__ import annotations import datetime diff --git a/examples/sharding/separate_tables.py b/examples/sharding/separate_tables.py index 1caaaf329b0..3084e9f0693 100644 --- a/examples/sharding/separate_tables.py +++ b/examples/sharding/separate_tables.py @@ -1,5 +1,6 @@ """Illustrates sharding using a single SQLite database, that will however have multiple tables using a naming convention.""" + from __future__ import annotations import datetime diff --git a/examples/versioned_rows/versioned_rows.py b/examples/versioned_rows/versioned_rows.py index 96d2e399ec1..80803b39329 100644 --- a/examples/versioned_rows/versioned_rows.py +++ b/examples/versioned_rows/versioned_rows.py @@ -3,6 +3,7 @@ row is inserted with the new data, keeping the old row intact. """ + from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import event diff --git a/examples/versioned_rows/versioned_rows_w_versionid.py b/examples/versioned_rows/versioned_rows_w_versionid.py index fcf8082814a..d030ed065cc 100644 --- a/examples/versioned_rows/versioned_rows_w_versionid.py +++ b/examples/versioned_rows/versioned_rows_w_versionid.py @@ -6,6 +6,7 @@ as the ability to see which row is the most "current" version. """ + from sqlalchemy import Boolean from sqlalchemy import Column from sqlalchemy import create_engine diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py index 7e1cd3afe8f..f204d80a8e9 100644 --- a/lib/sqlalchemy/connectors/pyodbc.py +++ b/lib/sqlalchemy/connectors/pyodbc.py @@ -217,9 +217,11 @@ def do_set_input_sizes( cursor.setinputsizes( [ - (dbtype, None, None) - if not isinstance(dbtype, tuple) - else dbtype + ( + (dbtype, None, None) + if not isinstance(dbtype, tuple) + else dbtype + ) for key, dbtype, sqltype in list_of_tuples ] ) diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index e015dccdc99..9f5b010dd7f 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1426,7 +1426,6 @@ class ROWVERSION(TIMESTAMP): class NTEXT(sqltypes.UnicodeText): - """MSSQL NTEXT type, for variable-length unicode text up to 2^30 characters.""" @@ -1596,12 +1595,12 @@ class UNIQUEIDENTIFIER(sqltypes.Uuid[sqltypes._UUID_RETURN]): @overload def __init__( self: UNIQUEIDENTIFIER[_python_UUID], as_uuid: Literal[True] = ... - ): - ... + ): ... @overload - def __init__(self: UNIQUEIDENTIFIER[str], as_uuid: Literal[False] = ...): - ... + def __init__( + self: UNIQUEIDENTIFIER[str], as_uuid: Literal[False] = ... + ): ... def __init__(self, as_uuid: bool = True): """Construct a :class:`_mssql.UNIQUEIDENTIFIER` type. @@ -2483,10 +2482,12 @@ def _render_json_extract_from_binary(self, binary, operator, **kw): type_expression = "ELSE CAST(JSON_VALUE(%s, %s) AS %s)" % ( self.process(binary.left, **kw), self.process(binary.right, **kw), - "FLOAT" - if isinstance(binary.type, sqltypes.Float) - else "NUMERIC(%s, %s)" - % (binary.type.precision, binary.type.scale), + ( + "FLOAT" + if isinstance(binary.type, sqltypes.Float) + else "NUMERIC(%s, %s)" + % (binary.type.precision, binary.type.scale) + ), ) elif binary.type._type_affinity is sqltypes.Boolean: # the NULL handling is particularly weird with boolean, so @@ -2522,7 +2523,6 @@ def visit_sequence(self, seq, **kw): class MSSQLStrictCompiler(MSSQLCompiler): - """A subclass of MSSQLCompiler which disables the usage of bind parameters where not allowed natively by MS-SQL. diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py index 11771638832..0c5f2372de8 100644 --- a/lib/sqlalchemy/dialects/mssql/information_schema.py +++ b/lib/sqlalchemy/dialects/mssql/information_schema.py @@ -207,6 +207,7 @@ class NumericSqlVariant(TypeDecorator): int 1 is returned as "\x01\x00\x00\x00". On python 3 it returns the correct value as string. """ + impl = Unicode cache_ok = True diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index f27dee1bd59..76ea046de99 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -369,7 +369,6 @@ def provide_token(dialect, conn_rec, cargs, cparams): class _ms_numeric_pyodbc: - """Turns Decimals with adjusted() < 0 or > 7 into strings. The routines here are needed for older pyodbc versions diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 78bf4636afd..dacbb7afa27 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1945,17 +1945,19 @@ def visit_create_index(self, create, **kw): columns = [ self.sql_compiler.process( - elements.Grouping(expr) - if ( - isinstance(expr, elements.BinaryExpression) - or ( - isinstance(expr, elements.UnaryExpression) - and expr.modifier - not in (operators.desc_op, operators.asc_op) + ( + elements.Grouping(expr) + if ( + isinstance(expr, elements.BinaryExpression) + or ( + isinstance(expr, elements.UnaryExpression) + and expr.modifier + not in (operators.desc_op, operators.asc_op) + ) + or isinstance(expr, functions.FunctionElement) ) - or isinstance(expr, functions.FunctionElement) - ) - else expr, + else expr + ), include_table=False, literal_binds=True, ) @@ -1984,12 +1986,14 @@ def visit_create_index(self, create, **kw): # mapping specifying the prefix length for each column of the # index columns = ", ".join( - "%s(%d)" % (expr, length[col.name]) - if col.name in length - else ( - "%s(%d)" % (expr, length[expr]) - if expr in length - else "%s" % expr + ( + "%s(%d)" % (expr, length[col.name]) + if col.name in length + else ( + "%s(%d)" % (expr, length[expr]) + if expr in length + else "%s" % expr + ) ) for col, expr in zip(index.expressions, columns) ) diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 1a6144a28eb..a6a81384154 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -1464,9 +1464,9 @@ def __init__( self.use_ansi = use_ansi self.optimize_limits = optimize_limits self.exclude_tablespaces = exclude_tablespaces - self.enable_offset_fetch = ( - self._supports_offset_fetch - ) = enable_offset_fetch + self.enable_offset_fetch = self._supports_offset_fetch = ( + enable_offset_fetch + ) def initialize(self, connection): super().initialize(connection) @@ -2523,10 +2523,12 @@ def get_multi_table_comment( return ( ( (schema, self.normalize_name(table)), - {"text": comment} - if comment is not None - and not comment.startswith(ignore_mat_view) - else default(), + ( + {"text": comment} + if comment is not None + and not comment.startswith(ignore_mat_view) + else default() + ), ) for table, comment in result ) @@ -3068,9 +3070,11 @@ def get_multi_unique_constraints( table_uc[constraint_name] = uc = { "name": constraint_name, "column_names": [], - "duplicates_index": constraint_name - if constraint_name_orig in index_names - else None, + "duplicates_index": ( + constraint_name + if constraint_name_orig in index_names + else None + ), } else: uc = table_uc[constraint_name] @@ -3082,9 +3086,11 @@ def get_multi_unique_constraints( return ( ( key, - list(unique_cons[key].values()) - if key in unique_cons - else default(), + ( + list(unique_cons[key].values()) + if key in unique_cons + else default() + ), ) for key in ( (schema, self.normalize_name(obj_name)) @@ -3207,9 +3213,11 @@ def get_multi_check_constraints( return ( ( key, - check_constraints[key] - if key in check_constraints - else default(), + ( + check_constraints[key] + if key in check_constraints + else default() + ), ) for key in ( (schema, self.normalize_name(obj_name)) diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 69ee82bd234..93462246647 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -840,9 +840,9 @@ def _generate_out_parameter_vars(self): ) for param in self.parameters: - param[ - quoted_bind_names.get(name, name) - ] = out_parameters[name] + param[quoted_bind_names.get(name, name)] = ( + out_parameters[name] + ) def _generate_cursor_outputtype_handler(self): output_handlers = {} diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 9e81e8368c0..e88c27d2de7 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -46,7 +46,6 @@ def All(other, arrexpr, operator=operators.eq): class array(expression.ExpressionClauseList[_T]): - """A PostgreSQL ARRAY literal. This is used to produce ARRAY literals in SQL expressions, e.g.:: @@ -110,17 +109,17 @@ def __init__(self, clauses, **kw): main_type = ( type_arg if type_arg is not None - else self._type_tuple[0] - if self._type_tuple - else sqltypes.NULLTYPE + else self._type_tuple[0] if self._type_tuple else sqltypes.NULLTYPE ) if isinstance(main_type, ARRAY): self.type = ARRAY( main_type.item_type, - dimensions=main_type.dimensions + 1 - if main_type.dimensions is not None - else 2, + dimensions=( + main_type.dimensions + 1 + if main_type.dimensions is not None + else 2 + ), ) else: self.type = ARRAY(main_type) @@ -226,7 +225,6 @@ class SomeOrmClass(Base): """ class Comparator(sqltypes.ARRAY.Comparator): - """Define comparison operations for :class:`_types.ARRAY`. Note that these operations are in addition to those provided diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 85affdca3aa..2c460412c09 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -791,9 +791,9 @@ def _handle_exception(self, error): translated_error = exception_mapping[super_]( "%s: %s" % (type(error), error) ) - translated_error.pgcode = ( - translated_error.sqlstate - ) = getattr(error, "sqlstate", None) + translated_error.pgcode = translated_error.sqlstate = ( + getattr(error, "sqlstate", None) + ) raise translated_error from error else: raise error diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 91d0bc417f7..3fe8900ac44 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -2092,9 +2092,11 @@ def fetch_clause(self, select, **kw): text += "\n FETCH FIRST (%s)%s ROWS %s" % ( self.process(select._fetch_clause, **kw), " PERCENT" if select._fetch_clause_options["percent"] else "", - "WITH TIES" - if select._fetch_clause_options["with_ties"] - else "ONLY", + ( + "WITH TIES" + if select._fetch_clause_options["with_ties"] + else "ONLY" + ), ) return text @@ -2264,9 +2266,11 @@ def visit_create_index(self, create, **kw): ", ".join( [ self.sql_compiler.process( - expr.self_group() - if not isinstance(expr, expression.ColumnClause) - else expr, + ( + expr.self_group() + if not isinstance(expr, expression.ColumnClause) + else expr + ), include_table=False, literal_binds=True, ) @@ -2591,17 +2595,21 @@ def visit_DOMAIN(self, type_, identifier_preparer=None, **kw): def visit_TIMESTAMP(self, type_, **kw): return "TIMESTAMP%s %s" % ( - "(%d)" % type_.precision - if getattr(type_, "precision", None) is not None - else "", + ( + "(%d)" % type_.precision + if getattr(type_, "precision", None) is not None + else "" + ), (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE", ) def visit_TIME(self, type_, **kw): return "TIME%s %s" % ( - "(%d)" % type_.precision - if getattr(type_, "precision", None) is not None - else "", + ( + "(%d)" % type_.precision + if getattr(type_, "precision", None) is not None + else "" + ), (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE", ) @@ -3107,9 +3115,7 @@ def set_deferrable(self, connection, value): def get_deferrable(self, connection): raise NotImplementedError() - def _split_multihost_from_url( - self, url: URL - ) -> Union[ + def _split_multihost_from_url(self, url: URL) -> Union[ Tuple[None, None], Tuple[Tuple[Optional[str], ...], Tuple[Optional[int], ...]], ]: @@ -3641,9 +3647,11 @@ def get_multi_columns( # dictionary with (name, ) if default search path or (schema, name) # as keys enums = dict( - ((rec["name"],), rec) - if rec["visible"] - else ((rec["schema"], rec["name"]), rec) + ( + ((rec["name"],), rec) + if rec["visible"] + else ((rec["schema"], rec["name"]), rec) + ) for rec in self._load_enums( connection, schema="*", info_cache=kw.get("info_cache") ) @@ -3671,9 +3679,9 @@ def _handle_array_type(attype): for row_dict in rows: # ensure that each table has an entry, even if it has no columns if row_dict["name"] is None: - columns[ - (schema, row_dict["table_name"]) - ] = ReflectionDefaults.columns() + columns[(schema, row_dict["table_name"])] = ( + ReflectionDefaults.columns() + ) continue table_cols = columns[(schema, row_dict["table_name"])] @@ -4036,13 +4044,15 @@ def get_multi_pk_constraint( return ( ( (schema, table_name), - { - "constrained_columns": [] if cols is None else cols, - "name": pk_name, - "comment": comment, - } - if pk_name is not None - else default(), + ( + { + "constrained_columns": [] if cols is None else cols, + "name": pk_name, + "comment": comment, + } + if pk_name is not None + else default() + ), ) for table_name, cols, pk_name, comment, _ in result ) diff --git a/lib/sqlalchemy/dialects/postgresql/dml.py b/lib/sqlalchemy/dialects/postgresql/dml.py index f227d0fac52..4404ecd37bf 100644 --- a/lib/sqlalchemy/dialects/postgresql/dml.py +++ b/lib/sqlalchemy/dialects/postgresql/dml.py @@ -257,9 +257,9 @@ def __init__( self.inferred_target_elements = index_elements self.inferred_target_whereclause = index_where elif constraint is None: - self.constraint_target = ( - self.inferred_target_elements - ) = self.inferred_target_whereclause = None + self.constraint_target = self.inferred_target_elements = ( + self.inferred_target_whereclause + ) = None class OnConflictDoNothing(OnConflictClause): diff --git a/lib/sqlalchemy/dialects/postgresql/named_types.py b/lib/sqlalchemy/dialects/postgresql/named_types.py index a0a34a96488..56bec1dc732 100644 --- a/lib/sqlalchemy/dialects/postgresql/named_types.py +++ b/lib/sqlalchemy/dialects/postgresql/named_types.py @@ -163,7 +163,6 @@ def visit_enum(self, enum): class ENUM(NamedType, type_api.NativeForEmulated, sqltypes.Enum): - """PostgreSQL ENUM type. This is a subclass of :class:`_types.Enum` which includes diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index 6faf5e11cd0..980f1449359 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -723,12 +723,12 @@ class AbstractRange(sqltypes.TypeEngine[Range[_T]]): __abstract__ = True @overload - def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: - ... + def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ... @overload - def adapt(self, cls: Type[TypeEngineMixin], **kw: Any) -> TypeEngine[Any]: - ... + def adapt( + self, cls: Type[TypeEngineMixin], **kw: Any + ) -> TypeEngine[Any]: ... def adapt( self, diff --git a/lib/sqlalchemy/dialects/postgresql/types.py b/lib/sqlalchemy/dialects/postgresql/types.py index 879389989c0..2acf63bef61 100644 --- a/lib/sqlalchemy/dialects/postgresql/types.py +++ b/lib/sqlalchemy/dialects/postgresql/types.py @@ -38,15 +38,14 @@ class PGUuid(sqltypes.UUID[sqltypes._UUID_RETURN]): @overload def __init__( self: PGUuid[_python_UUID], as_uuid: Literal[True] = ... - ) -> None: - ... + ) -> None: ... @overload - def __init__(self: PGUuid[str], as_uuid: Literal[False] = ...) -> None: - ... + def __init__( + self: PGUuid[str], as_uuid: Literal[False] = ... + ) -> None: ... - def __init__(self, as_uuid: bool = True) -> None: - ... + def __init__(self, as_uuid: bool = True) -> None: ... class BYTEA(sqltypes.LargeBinary): @@ -129,14 +128,12 @@ def column_expression(self, column: Any): class OID(sqltypes.TypeEngine[int]): - """Provide the PostgreSQL OID type.""" __visit_name__ = "OID" class REGCONFIG(sqltypes.TypeEngine[str]): - """Provide the PostgreSQL REGCONFIG type. .. versionadded:: 2.0.0rc1 @@ -147,7 +144,6 @@ class REGCONFIG(sqltypes.TypeEngine[str]): class TSQUERY(sqltypes.TypeEngine[str]): - """Provide the PostgreSQL TSQUERY type. .. versionadded:: 2.0.0rc1 @@ -158,7 +154,6 @@ class TSQUERY(sqltypes.TypeEngine[str]): class REGCLASS(sqltypes.TypeEngine[str]): - """Provide the PostgreSQL REGCLASS type. .. versionadded:: 1.2.7 @@ -169,7 +164,6 @@ class REGCLASS(sqltypes.TypeEngine[str]): class TIMESTAMP(sqltypes.TIMESTAMP): - """Provide the PostgreSQL TIMESTAMP type.""" __visit_name__ = "TIMESTAMP" @@ -190,7 +184,6 @@ def __init__( class TIME(sqltypes.TIME): - """PostgreSQL TIME type.""" __visit_name__ = "TIME" @@ -211,7 +204,6 @@ def __init__( class INTERVAL(type_api.NativeForEmulated, sqltypes._AbstractInterval): - """PostgreSQL INTERVAL type.""" __visit_name__ = "INTERVAL" @@ -281,7 +273,6 @@ def __init__( class TSVECTOR(sqltypes.TypeEngine[str]): - """The :class:`_postgresql.TSVECTOR` type implements the PostgreSQL text search type TSVECTOR. @@ -298,7 +289,6 @@ class TSVECTOR(sqltypes.TypeEngine[str]): class CITEXT(sqltypes.TEXT): - """Provide the PostgreSQL CITEXT type. .. versionadded:: 2.0.7 diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 59ba49c25ec..6db8214652a 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -2030,9 +2030,9 @@ def __init__( ) if self.dbapi.sqlite_version_info < (3, 35) or util.pypy: - self.update_returning = ( - self.delete_returning - ) = self.insert_returning = False + self.update_returning = self.delete_returning = ( + self.insert_returning + ) = False if self.dbapi.sqlite_version_info < (3, 32, 0): # https://www.sqlite.org/limits.html diff --git a/lib/sqlalchemy/dialects/sqlite/dml.py b/lib/sqlalchemy/dialects/sqlite/dml.py index 42e5b0fc7a5..dcf5e4482ee 100644 --- a/lib/sqlalchemy/dialects/sqlite/dml.py +++ b/lib/sqlalchemy/dialects/sqlite/dml.py @@ -198,9 +198,9 @@ def __init__( self.inferred_target_elements = index_elements self.inferred_target_whereclause = index_where else: - self.constraint_target = ( - self.inferred_target_elements - ) = self.inferred_target_whereclause = None + self.constraint_target = self.inferred_target_elements = ( + self.inferred_target_whereclause + ) = None class OnConflictDoNothing(OnConflictClause): diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 4a5fd15e34a..e577839c17d 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -205,9 +205,9 @@ def _log_debug(self, message: str, *arg: Any, **kw: Any) -> None: @property def _schema_translate_map(self) -> Optional[SchemaTranslateMapType]: - schema_translate_map: Optional[ - SchemaTranslateMapType - ] = self._execution_options.get("schema_translate_map", None) + schema_translate_map: Optional[SchemaTranslateMapType] = ( + self._execution_options.get("schema_translate_map", None) + ) return schema_translate_map @@ -218,9 +218,9 @@ def schema_for_object(self, obj: HasSchemaAttr) -> Optional[str]: """ name = obj.schema - schema_translate_map: Optional[ - SchemaTranslateMapType - ] = self._execution_options.get("schema_translate_map", None) + schema_translate_map: Optional[SchemaTranslateMapType] = ( + self._execution_options.get("schema_translate_map", None) + ) if ( schema_translate_map @@ -251,12 +251,10 @@ def execution_options( insertmanyvalues_page_size: int = ..., schema_translate_map: Optional[SchemaTranslateMapType] = ..., **opt: Any, - ) -> Connection: - ... + ) -> Connection: ... @overload - def execution_options(self, **opt: Any) -> Connection: - ... + def execution_options(self, **opt: Any) -> Connection: ... def execution_options(self, **opt: Any) -> Connection: r"""Set non-SQL options for the connection which take effect @@ -1262,8 +1260,7 @@ def scalar( parameters: Optional[_CoreSingleExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload def scalar( @@ -1272,8 +1269,7 @@ def scalar( parameters: Optional[_CoreSingleExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> Any: - ... + ) -> Any: ... def scalar( self, @@ -1311,8 +1307,7 @@ def scalars( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> ScalarResult[_T]: - ... + ) -> ScalarResult[_T]: ... @overload def scalars( @@ -1321,8 +1316,7 @@ def scalars( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> ScalarResult[Any]: - ... + ) -> ScalarResult[Any]: ... def scalars( self, @@ -1356,8 +1350,7 @@ def execute( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> CursorResult[_T]: - ... + ) -> CursorResult[_T]: ... @overload def execute( @@ -1366,8 +1359,7 @@ def execute( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> CursorResult[Any]: - ... + ) -> CursorResult[Any]: ... def execute( self, @@ -2017,9 +2009,9 @@ def _exec_insertmany_context( engine_events = self._has_events or self.engine._has_events if self.dialect._has_events: - do_execute_dispatch: Iterable[ - Any - ] = self.dialect.dispatch.do_execute + do_execute_dispatch: Iterable[Any] = ( + self.dialect.dispatch.do_execute + ) else: do_execute_dispatch = () @@ -2380,9 +2372,9 @@ def _handle_dbapi_exception_noconnection( None, cast(Exception, e), dialect.loaded_dbapi.Error, - hide_parameters=engine.hide_parameters - if engine is not None - else False, + hide_parameters=( + engine.hide_parameters if engine is not None else False + ), connection_invalidated=is_disconnect, dialect=dialect, ) @@ -2419,9 +2411,9 @@ def _handle_dbapi_exception_noconnection( break if sqlalchemy_exception and is_disconnect != ctx.is_disconnect: - sqlalchemy_exception.connection_invalidated = ( - is_disconnect - ) = ctx.is_disconnect + sqlalchemy_exception.connection_invalidated = is_disconnect = ( + ctx.is_disconnect + ) if newraise: raise newraise.with_traceback(exc_info[2]) from e @@ -3029,12 +3021,10 @@ def execution_options( insertmanyvalues_page_size: int = ..., schema_translate_map: Optional[SchemaTranslateMapType] = ..., **opt: Any, - ) -> OptionEngine: - ... + ) -> OptionEngine: ... @overload - def execution_options(self, **opt: Any) -> OptionEngine: - ... + def execution_options(self, **opt: Any) -> OptionEngine: ... def execution_options(self, **opt: Any) -> OptionEngine: """Return a new :class:`_engine.Engine` that will provide diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py index 16130ca4f10..fb3c3b79c72 100644 --- a/lib/sqlalchemy/engine/create.py +++ b/lib/sqlalchemy/engine/create.py @@ -82,13 +82,11 @@ def create_engine( query_cache_size: int = ..., use_insertmanyvalues: bool = ..., **kwargs: Any, -) -> Engine: - ... +) -> Engine: ... @overload -def create_engine(url: Union[str, URL], **kwargs: Any) -> Engine: - ... +def create_engine(url: Union[str, URL], **kwargs: Any) -> Engine: ... @util.deprecated_params( @@ -824,13 +822,11 @@ def create_pool_from_url( timeout: float = ..., use_lifo: bool = ..., **kwargs: Any, -) -> Pool: - ... +) -> Pool: ... @overload -def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: - ... +def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: ... def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index a46a9af16ff..c9390a9f11d 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -151,7 +151,7 @@ class CursorResultMetaData(ResultMetaData): "_translated_indexes", "_safe_for_cache", "_unpickled", - "_key_to_index" + "_key_to_index", # don't need _unique_filters support here for now. Can be added # if a need arises. ) @@ -225,9 +225,11 @@ def _splice_horizontally( { key: ( # int index should be None for ambiguous key - value[0] + offset - if value[0] is not None and key not in keymap - else None, + ( + value[0] + offset + if value[0] is not None and key not in keymap + else None + ), value[1] + offset, *value[2:], ) @@ -362,13 +364,11 @@ def __init__( ) = context.result_column_struct num_ctx_cols = len(result_columns) else: - result_columns = ( # type: ignore - cols_are_ordered - ) = ( + result_columns = cols_are_ordered = ( # type: ignore num_ctx_cols - ) = ( - ad_hoc_textual - ) = loose_column_name_matching = textual_ordered = False + ) = ad_hoc_textual = loose_column_name_matching = ( + textual_ordered + ) = False # merge cursor.description with the column info # present in the compiled structure, if any diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 8fbfc234486..d64f05cdf52 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -844,9 +844,11 @@ def _deliver_insertmanyvalues_batches( ordered_rows = [ rows_by_sentinel[ tuple( - _resolver(parameters[_spk]) # type: ignore # noqa: E501 - if _resolver - else parameters[_spk] # type: ignore # noqa: E501 + ( + _resolver(parameters[_spk]) # type: ignore # noqa: E501 + if _resolver + else parameters[_spk] # type: ignore # noqa: E501 + ) for _resolver, _spk in zip( sentinel_value_resolvers, imv.sentinel_param_keys, @@ -1453,9 +1455,11 @@ def _init_compiled( assert positiontup is not None for compiled_params in self.compiled_parameters: l_param: List[Any] = [ - flattened_processors[key](compiled_params[key]) - if key in flattened_processors - else compiled_params[key] + ( + flattened_processors[key](compiled_params[key]) + if key in flattened_processors + else compiled_params[key] + ) for key in positiontup ] core_positional_parameters.append( @@ -1476,18 +1480,20 @@ def _init_compiled( for compiled_params in self.compiled_parameters: if escaped_names: d_param = { - escaped_names.get(key, key): flattened_processors[key]( - compiled_params[key] + escaped_names.get(key, key): ( + flattened_processors[key](compiled_params[key]) + if key in flattened_processors + else compiled_params[key] ) - if key in flattened_processors - else compiled_params[key] for key in compiled_params } else: d_param = { - key: flattened_processors[key](compiled_params[key]) - if key in flattened_processors - else compiled_params[key] + key: ( + flattened_processors[key](compiled_params[key]) + if key in flattened_processors + else compiled_params[key] + ) for key in compiled_params } @@ -2149,17 +2155,21 @@ def _exec_default_clause_element(self, column, default, type_): if compiled.positional: parameters = self.dialect.execute_sequence_format( [ - processors[key](compiled_params[key]) # type: ignore - if key in processors - else compiled_params[key] + ( + processors[key](compiled_params[key]) # type: ignore + if key in processors + else compiled_params[key] + ) for key in compiled.positiontup or () ] ) else: parameters = { - key: processors[key](compiled_params[key]) # type: ignore - if key in processors - else compiled_params[key] + key: ( + processors[key](compiled_params[key]) # type: ignore + if key in processors + else compiled_params[key] + ) for key in compiled_params } return self._execute_scalar( diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 9060f599e01..67eb68ae949 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -118,17 +118,13 @@ class DBAPIConnection(Protocol): """ # noqa: E501 - def close(self) -> None: - ... + def close(self) -> None: ... - def commit(self) -> None: - ... + def commit(self) -> None: ... - def cursor(self) -> DBAPICursor: - ... + def cursor(self) -> DBAPICursor: ... - def rollback(self) -> None: - ... + def rollback(self) -> None: ... autocommit: bool @@ -174,53 +170,43 @@ def description( ... @property - def rowcount(self) -> int: - ... + def rowcount(self) -> int: ... arraysize: int lastrowid: int - def close(self) -> None: - ... + def close(self) -> None: ... def execute( self, operation: Any, parameters: Optional[_DBAPISingleExecuteParams] = None, - ) -> Any: - ... + ) -> Any: ... def executemany( self, operation: Any, parameters: _DBAPIMultiExecuteParams, - ) -> Any: - ... + ) -> Any: ... - def fetchone(self) -> Optional[Any]: - ... + def fetchone(self) -> Optional[Any]: ... - def fetchmany(self, size: int = ...) -> Sequence[Any]: - ... + def fetchmany(self, size: int = ...) -> Sequence[Any]: ... - def fetchall(self) -> Sequence[Any]: - ... + def fetchall(self) -> Sequence[Any]: ... - def setinputsizes(self, sizes: Sequence[Any]) -> None: - ... + def setinputsizes(self, sizes: Sequence[Any]) -> None: ... - def setoutputsize(self, size: Any, column: Any) -> None: - ... + def setoutputsize(self, size: Any, column: Any) -> None: ... - def callproc(self, procname: str, parameters: Sequence[Any] = ...) -> Any: - ... + def callproc( + self, procname: str, parameters: Sequence[Any] = ... + ) -> Any: ... - def nextset(self) -> Optional[bool]: - ... + def nextset(self) -> Optional[bool]: ... - def __getattr__(self, key: str) -> Any: - ... + def __getattr__(self, key: str) -> Any: ... _CoreSingleExecuteParams = Mapping[str, Any] @@ -1303,8 +1289,7 @@ def initialize(self, connection: Connection) -> None: if TYPE_CHECKING: - def _overrides_default(self, method_name: str) -> bool: - ... + def _overrides_default(self, method_name: str) -> bool: ... def get_columns( self, diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index f1c18cf456f..56b3a68bc65 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -116,8 +116,7 @@ def _for_freeze(self) -> ResultMetaData: @overload def _key_fallback( self, key: Any, err: Optional[Exception], raiseerr: Literal[True] = ... - ) -> NoReturn: - ... + ) -> NoReturn: ... @overload def _key_fallback( @@ -125,14 +124,12 @@ def _key_fallback( key: Any, err: Optional[Exception], raiseerr: Literal[False] = ..., - ) -> None: - ... + ) -> None: ... @overload def _key_fallback( self, key: Any, err: Optional[Exception], raiseerr: bool = ... - ) -> Optional[NoReturn]: - ... + ) -> Optional[NoReturn]: ... def _key_fallback( self, key: Any, err: Optional[Exception], raiseerr: bool = True @@ -731,8 +728,7 @@ def _only_one_row( raise_for_second_row: bool, raise_for_none: Literal[True], scalar: bool, - ) -> _R: - ... + ) -> _R: ... @overload def _only_one_row( @@ -740,8 +736,7 @@ def _only_one_row( raise_for_second_row: bool, raise_for_none: bool, scalar: bool, - ) -> Optional[_R]: - ... + ) -> Optional[_R]: ... def _only_one_row( self, @@ -1129,18 +1124,15 @@ def columns(self, *col_expressions: _KeyIndexType) -> Self: return self._column_slices(col_expressions) @overload - def scalars(self: Result[Tuple[_T]]) -> ScalarResult[_T]: - ... + def scalars(self: Result[Tuple[_T]]) -> ScalarResult[_T]: ... @overload def scalars( self: Result[Tuple[_T]], index: Literal[0] - ) -> ScalarResult[_T]: - ... + ) -> ScalarResult[_T]: ... @overload - def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]: - ... + def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]: ... def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]: """Return a :class:`_engine.ScalarResult` filtering object which @@ -1451,12 +1443,10 @@ def one_or_none(self) -> Optional[Row[_TP]]: ) @overload - def scalar_one(self: Result[Tuple[_T]]) -> _T: - ... + def scalar_one(self: Result[Tuple[_T]]) -> _T: ... @overload - def scalar_one(self) -> Any: - ... + def scalar_one(self) -> Any: ... def scalar_one(self) -> Any: """Return exactly one scalar result or raise an exception. @@ -1476,12 +1466,10 @@ def scalar_one(self) -> Any: ) @overload - def scalar_one_or_none(self: Result[Tuple[_T]]) -> Optional[_T]: - ... + def scalar_one_or_none(self: Result[Tuple[_T]]) -> Optional[_T]: ... @overload - def scalar_one_or_none(self) -> Optional[Any]: - ... + def scalar_one_or_none(self) -> Optional[Any]: ... def scalar_one_or_none(self) -> Optional[Any]: """Return exactly one scalar result or ``None``. @@ -1534,12 +1522,10 @@ def one(self) -> Row[_TP]: ) @overload - def scalar(self: Result[Tuple[_T]]) -> Optional[_T]: - ... + def scalar(self: Result[Tuple[_T]]) -> Optional[_T]: ... @overload - def scalar(self) -> Any: - ... + def scalar(self) -> Any: ... def scalar(self) -> Any: """Fetch the first column of the first row, and close the result set. @@ -1886,11 +1872,9 @@ def all(self) -> Sequence[_R]: # noqa: A001 """ ... - def __iter__(self) -> Iterator[_R]: - ... + def __iter__(self) -> Iterator[_R]: ... - def __next__(self) -> _R: - ... + def __next__(self) -> _R: ... def first(self) -> Optional[_R]: """Fetch the first object or ``None`` if no object is present. @@ -1924,12 +1908,10 @@ def one(self) -> _R: ... @overload - def scalar_one(self: TupleResult[Tuple[_T]]) -> _T: - ... + def scalar_one(self: TupleResult[Tuple[_T]]) -> _T: ... @overload - def scalar_one(self) -> Any: - ... + def scalar_one(self) -> Any: ... def scalar_one(self) -> Any: """Return exactly one scalar result or raise an exception. @@ -1947,12 +1929,12 @@ def scalar_one(self) -> Any: ... @overload - def scalar_one_or_none(self: TupleResult[Tuple[_T]]) -> Optional[_T]: - ... + def scalar_one_or_none( + self: TupleResult[Tuple[_T]], + ) -> Optional[_T]: ... @overload - def scalar_one_or_none(self) -> Optional[Any]: - ... + def scalar_one_or_none(self) -> Optional[Any]: ... def scalar_one_or_none(self) -> Optional[Any]: """Return exactly one or no scalar result. @@ -1970,12 +1952,10 @@ def scalar_one_or_none(self) -> Optional[Any]: ... @overload - def scalar(self: TupleResult[Tuple[_T]]) -> Optional[_T]: - ... + def scalar(self: TupleResult[Tuple[_T]]) -> Optional[_T]: ... @overload - def scalar(self) -> Any: - ... + def scalar(self) -> Any: ... def scalar(self) -> Any: """Fetch the first column of the first row, and close the result diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py index f6209352288..bcaffee44f2 100644 --- a/lib/sqlalchemy/engine/row.py +++ b/lib/sqlalchemy/engine/row.py @@ -213,15 +213,12 @@ def _op(self, other: Any, op: Callable[[Any, Any], bool]) -> bool: if TYPE_CHECKING: @overload - def __getitem__(self, index: int) -> Any: - ... + def __getitem__(self, index: int) -> Any: ... @overload - def __getitem__(self, index: slice) -> Sequence[Any]: - ... + def __getitem__(self, index: slice) -> Sequence[Any]: ... - def __getitem__(self, index: Union[int, slice]) -> Any: - ... + def __getitem__(self, index: Union[int, slice]) -> Any: ... def __lt__(self, other: Any) -> bool: return self._op(other, operator.lt) @@ -359,8 +356,7 @@ class RowMapping(BaseRow, typing.Mapping["_KeyType", Any]): if TYPE_CHECKING: - def __getitem__(self, key: _KeyType) -> Any: - ... + def __getitem__(self, key: _KeyType) -> Any: ... else: __getitem__ = BaseRow._get_by_key_impl_mapping diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index db4f2879c7f..1eeb73a2368 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -253,14 +253,12 @@ def _str_dict( @overload def _assert_value( val: str, - ) -> str: - ... + ) -> str: ... @overload def _assert_value( val: Sequence[str], - ) -> Union[str, Tuple[str, ...]]: - ... + ) -> Union[str, Tuple[str, ...]]: ... def _assert_value( val: Union[str, Sequence[str]], diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py index 6d6b0fe136b..ef2b334d1b0 100644 --- a/lib/sqlalchemy/event/attr.py +++ b/lib/sqlalchemy/event/attr.py @@ -391,16 +391,14 @@ def __bool__(self) -> bool: class _MutexProtocol(Protocol): - def __enter__(self) -> bool: - ... + def __enter__(self) -> bool: ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], - ) -> Optional[bool]: - ... + ) -> Optional[bool]: ... class _CompoundListener(_InstanceLevelDispatch[_ET]): diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py index 18a34624783..1f52e2eb799 100644 --- a/lib/sqlalchemy/event/base.py +++ b/lib/sqlalchemy/event/base.py @@ -42,9 +42,9 @@ from .. import util from ..util.typing import Literal -_registrars: MutableMapping[ - str, List[Type[_HasEventsDispatch[Any]]] -] = util.defaultdict(list) +_registrars: MutableMapping[str, List[Type[_HasEventsDispatch[Any]]]] = ( + util.defaultdict(list) +) def _is_event_name(name: str) -> bool: @@ -240,8 +240,7 @@ class _HasEventsDispatch(Generic[_ET]): if typing.TYPE_CHECKING: - def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]: - ... + def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]: ... def __init_subclass__(cls) -> None: """Intercept new Event subclasses and create associated _Dispatch @@ -430,12 +429,10 @@ def __init__(self, events: Type[_HasEventsDispatch[_ET]]): @overload def __get__( self, obj: Literal[None], cls: Type[Any] - ) -> Type[_Dispatch[_ET]]: - ... + ) -> Type[_Dispatch[_ET]]: ... @overload - def __get__(self, obj: Any, cls: Type[Any]) -> _DispatchCommon[_ET]: - ... + def __get__(self, obj: Any, cls: Type[Any]) -> _DispatchCommon[_ET]: ... def __get__(self, obj: Any, cls: Type[Any]) -> Any: if obj is None: diff --git a/lib/sqlalchemy/event/legacy.py b/lib/sqlalchemy/event/legacy.py index 067b7205840..57e561c390d 100644 --- a/lib/sqlalchemy/event/legacy.py +++ b/lib/sqlalchemy/event/legacy.py @@ -147,9 +147,9 @@ def _standard_listen_example( ) text %= { - "current_since": " (arguments as of %s)" % current_since - if current_since - else "", + "current_since": ( + " (arguments as of %s)" % current_since if current_since else "" + ), "event_name": fn.__name__, "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "", "named_event_arguments": ", ".join(dispatch_collection.arg_names), @@ -177,9 +177,9 @@ def _legacy_listen_examples( % { "since": since, "event_name": fn.__name__, - "has_kw_arguments": " **kw" - if dispatch_collection.has_kw - else "", + "has_kw_arguments": ( + " **kw" if dispatch_collection.has_kw else "" + ), "named_event_arguments": ", ".join(args), "sample_target": sample_target, } diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py index c048735e21a..773620f8bbc 100644 --- a/lib/sqlalchemy/event/registry.py +++ b/lib/sqlalchemy/event/registry.py @@ -66,9 +66,9 @@ class EventTarget: "weakref.ref[_ListenerFnType]", ] -_key_to_collection: Dict[ - _EventKeyTupleType, _RefCollectionToListenerType -] = collections.defaultdict(dict) +_key_to_collection: Dict[_EventKeyTupleType, _RefCollectionToListenerType] = ( + collections.defaultdict(dict) +) """ Given an original listen() argument, can locate all listener collections and the listener fn contained diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index c4025a2b8cb..7d7eff3606c 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -571,8 +571,7 @@ def instance( connection_invalidated: bool = False, dialect: Optional[Dialect] = None, ismulti: Optional[bool] = None, - ) -> StatementError: - ... + ) -> StatementError: ... @overload @classmethod @@ -586,8 +585,7 @@ def instance( connection_invalidated: bool = False, dialect: Optional[Dialect] = None, ismulti: Optional[bool] = None, - ) -> DontWrapMixin: - ... + ) -> DontWrapMixin: ... @overload @classmethod @@ -601,8 +599,7 @@ def instance( connection_invalidated: bool = False, dialect: Optional[Dialect] = None, ismulti: Optional[bool] = None, - ) -> BaseException: - ... + ) -> BaseException: ... @classmethod def instance( diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 9c5a0e4bd40..86043ba7992 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -254,45 +254,39 @@ class AssociationProxyExtensionType(InspectionAttrExtensionType): class _GetterProtocol(Protocol[_T_co]): - def __call__(self, instance: Any) -> _T_co: - ... + def __call__(self, instance: Any) -> _T_co: ... # mypy 0.990 we are no longer allowed to make this Protocol[_T_con] -class _SetterProtocol(Protocol): - ... +class _SetterProtocol(Protocol): ... class _PlainSetterProtocol(_SetterProtocol, Protocol[_T_con]): - def __call__(self, instance: Any, value: _T_con) -> None: - ... + def __call__(self, instance: Any, value: _T_con) -> None: ... class _DictSetterProtocol(_SetterProtocol, Protocol[_T_con]): - def __call__(self, instance: Any, key: Any, value: _T_con) -> None: - ... + def __call__(self, instance: Any, key: Any, value: _T_con) -> None: ... # mypy 0.990 we are no longer allowed to make this Protocol[_T_con] -class _CreatorProtocol(Protocol): - ... +class _CreatorProtocol(Protocol): ... class _PlainCreatorProtocol(_CreatorProtocol, Protocol[_T_con]): - def __call__(self, value: _T_con) -> Any: - ... + def __call__(self, value: _T_con) -> Any: ... class _KeyCreatorProtocol(_CreatorProtocol, Protocol[_T_con]): - def __call__(self, key: Any, value: Optional[_T_con]) -> Any: - ... + def __call__(self, key: Any, value: Optional[_T_con]) -> Any: ... class _LazyCollectionProtocol(Protocol[_T]): def __call__( self, - ) -> Union[MutableSet[_T], MutableMapping[Any, _T], MutableSequence[_T]]: - ... + ) -> Union[ + MutableSet[_T], MutableMapping[Any, _T], MutableSequence[_T] + ]: ... class _GetSetFactoryProtocol(Protocol): @@ -300,8 +294,7 @@ def __call__( self, collection_class: Optional[Type[Any]], assoc_instance: AssociationProxyInstance[Any], - ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: - ... + ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: ... class _ProxyFactoryProtocol(Protocol): @@ -311,15 +304,13 @@ def __call__( creator: _CreatorProtocol, value_attr: str, parent: AssociationProxyInstance[Any], - ) -> Any: - ... + ) -> Any: ... class _ProxyBulkSetProtocol(Protocol): def __call__( self, proxy: _AssociationCollection[Any], collection: Iterable[Any] - ) -> None: - ... + ) -> None: ... class _AssociationProxyProtocol(Protocol[_T]): @@ -337,18 +328,15 @@ class _AssociationProxyProtocol(Protocol[_T]): proxy_bulk_set: Optional[_ProxyBulkSetProtocol] @util.ro_memoized_property - def info(self) -> _InfoType: - ... + def info(self) -> _InfoType: ... def for_class( self, class_: Type[Any], obj: Optional[object] = None - ) -> AssociationProxyInstance[_T]: - ... + ) -> AssociationProxyInstance[_T]: ... def _default_getset( self, collection_class: Any - ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: - ... + ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: ... class AssociationProxy( @@ -419,18 +407,17 @@ def __init__( self._attribute_options = _DEFAULT_ATTRIBUTE_OPTIONS @overload - def __get__(self, instance: Literal[None], owner: Literal[None]) -> Self: - ... + def __get__( + self, instance: Literal[None], owner: Literal[None] + ) -> Self: ... @overload def __get__( self, instance: Literal[None], owner: Any - ) -> AssociationProxyInstance[_T]: - ... + ) -> AssociationProxyInstance[_T]: ... @overload - def __get__(self, instance: object, owner: Any) -> _T: - ... + def __get__(self, instance: object, owner: Any) -> _T: ... def __get__( self, instance: object, owner: Any @@ -861,12 +848,10 @@ def info(self) -> _InfoType: return self.parent.info @overload - def get(self: _Self, obj: Literal[None]) -> _Self: - ... + def get(self: _Self, obj: Literal[None]) -> _Self: ... @overload - def get(self, obj: Any) -> _T: - ... + def get(self, obj: Any) -> _T: ... def get( self, obj: Any @@ -1432,12 +1417,10 @@ def _set(self, object_: Any, value: _T) -> None: self.setter(object_, value) @overload - def __getitem__(self, index: int) -> _T: - ... + def __getitem__(self, index: int) -> _T: ... @overload - def __getitem__(self, index: slice) -> MutableSequence[_T]: - ... + def __getitem__(self, index: slice) -> MutableSequence[_T]: ... def __getitem__( self, index: Union[int, slice] @@ -1448,12 +1431,10 @@ def __getitem__( return [self._get(member) for member in self.col[index]] @overload - def __setitem__(self, index: int, value: _T) -> None: - ... + def __setitem__(self, index: int, value: _T) -> None: ... @overload - def __setitem__(self, index: slice, value: Iterable[_T]) -> None: - ... + def __setitem__(self, index: slice, value: Iterable[_T]) -> None: ... def __setitem__( self, index: Union[int, slice], value: Union[_T, Iterable[_T]] @@ -1492,12 +1473,10 @@ def __setitem__( self._set(self.col[i], item) @overload - def __delitem__(self, index: int) -> None: - ... + def __delitem__(self, index: int) -> None: ... @overload - def __delitem__(self, index: slice) -> None: - ... + def __delitem__(self, index: slice) -> None: ... def __delitem__(self, index: Union[slice, int]) -> None: del self.col[index] @@ -1624,8 +1603,9 @@ def __imul__(self, n: SupportsIndex) -> Self: if typing.TYPE_CHECKING: # TODO: no idea how to do this without separate "stub" - def index(self, value: Any, start: int = ..., stop: int = ...) -> int: - ... + def index( + self, value: Any, start: int = ..., stop: int = ... + ) -> int: ... else: @@ -1701,12 +1681,10 @@ def __repr__(self) -> str: return repr(dict(self)) @overload - def get(self, __key: _KT) -> Optional[_VT]: - ... + def get(self, __key: _KT) -> Optional[_VT]: ... @overload - def get(self, __key: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: - ... + def get(self, __key: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: ... def get( self, key: _KT, default: Optional[Union[_VT, _T]] = None @@ -1738,12 +1716,12 @@ def values(self) -> ValuesView[_VT]: return ValuesView(self) @overload - def pop(self, __key: _KT) -> _VT: - ... + def pop(self, __key: _KT) -> _VT: ... @overload - def pop(self, __key: _KT, default: Union[_VT, _T] = ...) -> Union[_VT, _T]: - ... + def pop( + self, __key: _KT, default: Union[_VT, _T] = ... + ) -> Union[_VT, _T]: ... def pop(self, __key: _KT, *arg: Any, **kw: Any) -> Union[_VT, _T]: member = self.col.pop(__key, *arg, **kw) @@ -1756,16 +1734,15 @@ def popitem(self) -> Tuple[_KT, _VT]: @overload def update( self, __m: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT - ) -> None: - ... + ) -> None: ... @overload - def update(self, __m: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> None: - ... + def update( + self, __m: Iterable[tuple[_KT, _VT]], **kwargs: _VT + ) -> None: ... @overload - def update(self, **kwargs: _VT) -> None: - ... + def update(self, **kwargs: _VT) -> None: ... def update(self, *a: Any, **kw: Any) -> None: up: Dict[_KT, _VT] = {} diff --git a/lib/sqlalchemy/ext/asyncio/base.py b/lib/sqlalchemy/ext/asyncio/base.py index 330651b074f..9899364d1ff 100644 --- a/lib/sqlalchemy/ext/asyncio/base.py +++ b/lib/sqlalchemy/ext/asyncio/base.py @@ -44,12 +44,10 @@ class ReversibleProxy(Generic[_PT]): __slots__ = ("__weakref__",) @overload - def _assign_proxied(self, target: _PT) -> _PT: - ... + def _assign_proxied(self, target: _PT) -> _PT: ... @overload - def _assign_proxied(self, target: None) -> None: - ... + def _assign_proxied(self, target: None) -> None: ... def _assign_proxied(self, target: Optional[_PT]) -> Optional[_PT]: if target is not None: @@ -82,15 +80,13 @@ def _retrieve_proxy_for_target( cls, target: _PT, regenerate: Literal[True] = ..., - ) -> Self: - ... + ) -> Self: ... @overload @classmethod def _retrieve_proxy_for_target( cls, target: _PT, regenerate: bool = True - ) -> Optional[Self]: - ... + ) -> Optional[Self]: ... @classmethod def _retrieve_proxy_for_target( diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index b5c8c7d76f7..3c718fad3d5 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -415,12 +415,10 @@ async def execution_options( insertmanyvalues_page_size: int = ..., schema_translate_map: Optional[SchemaTranslateMapType] = ..., **opt: Any, - ) -> AsyncConnection: - ... + ) -> AsyncConnection: ... @overload - async def execution_options(self, **opt: Any) -> AsyncConnection: - ... + async def execution_options(self, **opt: Any) -> AsyncConnection: ... async def execution_options(self, **opt: Any) -> AsyncConnection: r"""Set non-SQL options for the connection which take effect @@ -518,8 +516,7 @@ def stream( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> GeneratorStartableContext[AsyncResult[_T]]: - ... + ) -> GeneratorStartableContext[AsyncResult[_T]]: ... @overload def stream( @@ -528,8 +525,7 @@ def stream( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> GeneratorStartableContext[AsyncResult[Any]]: - ... + ) -> GeneratorStartableContext[AsyncResult[Any]]: ... @asyncstartablecontext async def stream( @@ -605,8 +601,7 @@ async def execute( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> CursorResult[_T]: - ... + ) -> CursorResult[_T]: ... @overload async def execute( @@ -615,8 +610,7 @@ async def execute( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> CursorResult[Any]: - ... + ) -> CursorResult[Any]: ... async def execute( self, @@ -672,8 +666,7 @@ async def scalar( parameters: Optional[_CoreSingleExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload async def scalar( @@ -682,8 +675,7 @@ async def scalar( parameters: Optional[_CoreSingleExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> Any: - ... + ) -> Any: ... async def scalar( self, @@ -714,8 +706,7 @@ async def scalars( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> ScalarResult[_T]: - ... + ) -> ScalarResult[_T]: ... @overload async def scalars( @@ -724,8 +715,7 @@ async def scalars( parameters: Optional[_CoreAnyExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> ScalarResult[Any]: - ... + ) -> ScalarResult[Any]: ... async def scalars( self, @@ -757,8 +747,7 @@ def stream_scalars( parameters: Optional[_CoreSingleExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> GeneratorStartableContext[AsyncScalarResult[_T]]: - ... + ) -> GeneratorStartableContext[AsyncScalarResult[_T]]: ... @overload def stream_scalars( @@ -767,8 +756,7 @@ def stream_scalars( parameters: Optional[_CoreSingleExecuteParams] = None, *, execution_options: Optional[CoreExecuteOptionsParameter] = None, - ) -> GeneratorStartableContext[AsyncScalarResult[Any]]: - ... + ) -> GeneratorStartableContext[AsyncScalarResult[Any]]: ... @asyncstartablecontext async def stream_scalars( @@ -1105,12 +1093,10 @@ def execution_options( insertmanyvalues_page_size: int = ..., schema_translate_map: Optional[SchemaTranslateMapType] = ..., **opt: Any, - ) -> AsyncEngine: - ... + ) -> AsyncEngine: ... @overload - def execution_options(self, **opt: Any) -> AsyncEngine: - ... + def execution_options(self, **opt: Any) -> AsyncEngine: ... def execution_options(self, **opt: Any) -> AsyncEngine: """Return a new :class:`_asyncio.AsyncEngine` that will provide @@ -1423,15 +1409,13 @@ async def __aexit__(self, type_: Any, value: Any, traceback: Any) -> None: @overload -def _get_sync_engine_or_connection(async_engine: AsyncEngine) -> Engine: - ... +def _get_sync_engine_or_connection(async_engine: AsyncEngine) -> Engine: ... @overload def _get_sync_engine_or_connection( async_engine: AsyncConnection, -) -> Connection: - ... +) -> Connection: ... def _get_sync_engine_or_connection( diff --git a/lib/sqlalchemy/ext/asyncio/result.py b/lib/sqlalchemy/ext/asyncio/result.py index 2f664bcd623..7dcbe3280e7 100644 --- a/lib/sqlalchemy/ext/asyncio/result.py +++ b/lib/sqlalchemy/ext/asyncio/result.py @@ -324,12 +324,10 @@ async def one_or_none(self) -> Optional[Row[_TP]]: return await greenlet_spawn(self._only_one_row, True, False, False) @overload - async def scalar_one(self: AsyncResult[Tuple[_T]]) -> _T: - ... + async def scalar_one(self: AsyncResult[Tuple[_T]]) -> _T: ... @overload - async def scalar_one(self) -> Any: - ... + async def scalar_one(self) -> Any: ... async def scalar_one(self) -> Any: """Return exactly one scalar result or raise an exception. @@ -349,12 +347,10 @@ async def scalar_one(self) -> Any: @overload async def scalar_one_or_none( self: AsyncResult[Tuple[_T]], - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload - async def scalar_one_or_none(self) -> Optional[Any]: - ... + async def scalar_one_or_none(self) -> Optional[Any]: ... async def scalar_one_or_none(self) -> Optional[Any]: """Return exactly one scalar result or ``None``. @@ -403,12 +399,10 @@ async def one(self) -> Row[_TP]: return await greenlet_spawn(self._only_one_row, True, True, False) @overload - async def scalar(self: AsyncResult[Tuple[_T]]) -> Optional[_T]: - ... + async def scalar(self: AsyncResult[Tuple[_T]]) -> Optional[_T]: ... @overload - async def scalar(self) -> Any: - ... + async def scalar(self) -> Any: ... async def scalar(self) -> Any: """Fetch the first column of the first row, and close the result set. @@ -452,16 +446,13 @@ async def freeze(self) -> FrozenResult[_TP]: @overload def scalars( self: AsyncResult[Tuple[_T]], index: Literal[0] - ) -> AsyncScalarResult[_T]: - ... + ) -> AsyncScalarResult[_T]: ... @overload - def scalars(self: AsyncResult[Tuple[_T]]) -> AsyncScalarResult[_T]: - ... + def scalars(self: AsyncResult[Tuple[_T]]) -> AsyncScalarResult[_T]: ... @overload - def scalars(self, index: _KeyIndexType = 0) -> AsyncScalarResult[Any]: - ... + def scalars(self, index: _KeyIndexType = 0) -> AsyncScalarResult[Any]: ... def scalars(self, index: _KeyIndexType = 0) -> AsyncScalarResult[Any]: """Return an :class:`_asyncio.AsyncScalarResult` filtering object which @@ -833,11 +824,9 @@ async def all(self) -> Sequence[_R]: # noqa: A001 """ ... - async def __aiter__(self) -> AsyncIterator[_R]: - ... + async def __aiter__(self) -> AsyncIterator[_R]: ... - async def __anext__(self) -> _R: - ... + async def __anext__(self) -> _R: ... async def first(self) -> Optional[_R]: """Fetch the first object or ``None`` if no object is present. @@ -871,12 +860,10 @@ async def one(self) -> _R: ... @overload - async def scalar_one(self: AsyncTupleResult[Tuple[_T]]) -> _T: - ... + async def scalar_one(self: AsyncTupleResult[Tuple[_T]]) -> _T: ... @overload - async def scalar_one(self) -> Any: - ... + async def scalar_one(self) -> Any: ... async def scalar_one(self) -> Any: """Return exactly one scalar result or raise an exception. @@ -896,12 +883,10 @@ async def scalar_one(self) -> Any: @overload async def scalar_one_or_none( self: AsyncTupleResult[Tuple[_T]], - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload - async def scalar_one_or_none(self) -> Optional[Any]: - ... + async def scalar_one_or_none(self) -> Optional[Any]: ... async def scalar_one_or_none(self) -> Optional[Any]: """Return exactly one or no scalar result. @@ -919,12 +904,12 @@ async def scalar_one_or_none(self) -> Optional[Any]: ... @overload - async def scalar(self: AsyncTupleResult[Tuple[_T]]) -> Optional[_T]: - ... + async def scalar( + self: AsyncTupleResult[Tuple[_T]], + ) -> Optional[_T]: ... @overload - async def scalar(self) -> Any: - ... + async def scalar(self) -> Any: ... async def scalar(self) -> Any: """Fetch the first column of the first row, and close the result diff --git a/lib/sqlalchemy/ext/asyncio/scoping.py b/lib/sqlalchemy/ext/asyncio/scoping.py index a5127b86613..e879a1654e9 100644 --- a/lib/sqlalchemy/ext/asyncio/scoping.py +++ b/lib/sqlalchemy/ext/asyncio/scoping.py @@ -536,8 +536,7 @@ async def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> Result[_T]: - ... + ) -> Result[_T]: ... @overload async def execute( @@ -549,8 +548,7 @@ async def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> CursorResult[Any]: - ... + ) -> CursorResult[Any]: ... @overload async def execute( @@ -562,8 +560,7 @@ async def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> Result[Any]: - ... + ) -> Result[Any]: ... async def execute( self, @@ -1015,8 +1012,7 @@ async def scalar( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload async def scalar( @@ -1027,8 +1023,7 @@ async def scalar( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> Any: - ... + ) -> Any: ... async def scalar( self, @@ -1070,8 +1065,7 @@ async def scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> ScalarResult[_T]: - ... + ) -> ScalarResult[_T]: ... @overload async def scalars( @@ -1082,8 +1076,7 @@ async def scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> ScalarResult[Any]: - ... + ) -> ScalarResult[Any]: ... async def scalars( self, @@ -1213,8 +1206,7 @@ async def stream( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> AsyncResult[_T]: - ... + ) -> AsyncResult[_T]: ... @overload async def stream( @@ -1225,8 +1217,7 @@ async def stream( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> AsyncResult[Any]: - ... + ) -> AsyncResult[Any]: ... async def stream( self, @@ -1265,8 +1256,7 @@ async def stream_scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> AsyncScalarResult[_T]: - ... + ) -> AsyncScalarResult[_T]: ... @overload async def stream_scalars( @@ -1277,8 +1267,7 @@ async def stream_scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> AsyncScalarResult[Any]: - ... + ) -> AsyncScalarResult[Any]: ... async def stream_scalars( self, diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index cdca94a9abb..a9ea55e4966 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -398,8 +398,7 @@ async def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> Result[_T]: - ... + ) -> Result[_T]: ... @overload async def execute( @@ -411,8 +410,7 @@ async def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> CursorResult[Any]: - ... + ) -> CursorResult[Any]: ... @overload async def execute( @@ -424,8 +422,7 @@ async def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> Result[Any]: - ... + ) -> Result[Any]: ... async def execute( self, @@ -471,8 +468,7 @@ async def scalar( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload async def scalar( @@ -483,8 +479,7 @@ async def scalar( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> Any: - ... + ) -> Any: ... async def scalar( self, @@ -528,8 +523,7 @@ async def scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> ScalarResult[_T]: - ... + ) -> ScalarResult[_T]: ... @overload async def scalars( @@ -540,8 +534,7 @@ async def scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> ScalarResult[Any]: - ... + ) -> ScalarResult[Any]: ... async def scalars( self, @@ -655,8 +648,7 @@ async def stream( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> AsyncResult[_T]: - ... + ) -> AsyncResult[_T]: ... @overload async def stream( @@ -667,8 +659,7 @@ async def stream( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> AsyncResult[Any]: - ... + ) -> AsyncResult[Any]: ... async def stream( self, @@ -710,8 +701,7 @@ async def stream_scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> AsyncScalarResult[_T]: - ... + ) -> AsyncScalarResult[_T]: ... @overload async def stream_scalars( @@ -722,8 +712,7 @@ async def stream_scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> AsyncScalarResult[Any]: - ... + ) -> AsyncScalarResult[Any]: ... async def stream_scalars( self, @@ -1686,8 +1675,7 @@ def __init__( expire_on_commit: bool = ..., info: Optional[_InfoType] = ..., **kw: Any, - ): - ... + ): ... @overload def __init__( @@ -1698,8 +1686,7 @@ def __init__( expire_on_commit: bool = ..., info: Optional[_InfoType] = ..., **kw: Any, - ): - ... + ): ... def __init__( self, diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index 9247c730e7f..7d1f92534a3 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -715,8 +715,9 @@ def column_reflect(inspector, table, column_info): class PythonNameForTableType(Protocol): - def __call__(self, base: Type[Any], tablename: str, table: Table) -> str: - ... + def __call__( + self, base: Type[Any], tablename: str, table: Table + ) -> str: ... def classname_for_table( @@ -763,8 +764,7 @@ def __call__( local_cls: Type[Any], referred_cls: Type[Any], constraint: ForeignKeyConstraint, - ) -> str: - ... + ) -> str: ... def name_for_scalar_relationship( @@ -804,8 +804,7 @@ def __call__( local_cls: Type[Any], referred_cls: Type[Any], constraint: ForeignKeyConstraint, - ) -> str: - ... + ) -> str: ... def name_for_collection_relationship( @@ -850,8 +849,7 @@ def __call__( local_cls: Type[Any], referred_cls: Type[Any], **kw: Any, - ) -> Relationship[Any]: - ... + ) -> Relationship[Any]: ... @overload def __call__( @@ -863,8 +861,7 @@ def __call__( local_cls: Type[Any], referred_cls: Type[Any], **kw: Any, - ) -> ORMBackrefArgument: - ... + ) -> ORMBackrefArgument: ... def __call__( self, @@ -877,8 +874,7 @@ def __call__( local_cls: Type[Any], referred_cls: Type[Any], **kw: Any, - ) -> Union[ORMBackrefArgument, Relationship[Any]]: - ... + ) -> Union[ORMBackrefArgument, Relationship[Any]]: ... @overload @@ -890,8 +886,7 @@ def generate_relationship( local_cls: Type[Any], referred_cls: Type[Any], **kw: Any, -) -> Relationship[Any]: - ... +) -> Relationship[Any]: ... @overload @@ -903,8 +898,7 @@ def generate_relationship( local_cls: Type[Any], referred_cls: Type[Any], **kw: Any, -) -> ORMBackrefArgument: - ... +) -> ORMBackrefArgument: ... def generate_relationship( diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index 4fff1bf8970..d8ee819f268 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -83,8 +83,7 @@ def __call__( mapper: Optional[Mapper[_T]], instance: Any, clause: Optional[ClauseElement], - ) -> Any: - ... + ) -> Any: ... class IdentityChooser(Protocol): @@ -97,8 +96,7 @@ def __call__( execution_options: OrmExecuteOptionsParameter, bind_arguments: _BindArguments, **kw: Any, - ) -> Any: - ... + ) -> Any: ... class ShardedQuery(Query[_T]): diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 5a35c0a27e6..25b74d8d6e3 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -904,13 +904,11 @@ class HybridExtensionType(InspectionAttrExtensionType): class _HybridGetterType(Protocol[_T_co]): - def __call__(s, self: Any) -> _T_co: - ... + def __call__(s, self: Any) -> _T_co: ... class _HybridSetterType(Protocol[_T_con]): - def __call__(s, self: Any, value: _T_con) -> None: - ... + def __call__(s, self: Any, value: _T_con) -> None: ... class _HybridUpdaterType(Protocol[_T_con]): @@ -918,25 +916,21 @@ def __call__( s, cls: Any, value: Union[_T_con, _ColumnExpressionArgument[_T_con]], - ) -> List[Tuple[_DMLColumnArgument, Any]]: - ... + ) -> List[Tuple[_DMLColumnArgument, Any]]: ... class _HybridDeleterType(Protocol[_T_co]): - def __call__(s, self: Any) -> None: - ... + def __call__(s, self: Any) -> None: ... class _HybridExprCallableType(Protocol[_T_co]): def __call__( s, cls: Any - ) -> Union[_HasClauseElement[_T_co], SQLColumnExpression[_T_co]]: - ... + ) -> Union[_HasClauseElement[_T_co], SQLColumnExpression[_T_co]]: ... class _HybridComparatorCallableType(Protocol[_T]): - def __call__(self, cls: Any) -> Comparator[_T]: - ... + def __call__(self, cls: Any) -> Comparator[_T]: ... class _HybridClassLevelAccessor(QueryableAttribute[_T]): @@ -947,23 +941,24 @@ class _HybridClassLevelAccessor(QueryableAttribute[_T]): if TYPE_CHECKING: - def getter(self, fget: _HybridGetterType[_T]) -> hybrid_property[_T]: - ... + def getter( + self, fget: _HybridGetterType[_T] + ) -> hybrid_property[_T]: ... - def setter(self, fset: _HybridSetterType[_T]) -> hybrid_property[_T]: - ... + def setter( + self, fset: _HybridSetterType[_T] + ) -> hybrid_property[_T]: ... - def deleter(self, fdel: _HybridDeleterType[_T]) -> hybrid_property[_T]: - ... + def deleter( + self, fdel: _HybridDeleterType[_T] + ) -> hybrid_property[_T]: ... @property - def overrides(self) -> hybrid_property[_T]: - ... + def overrides(self) -> hybrid_property[_T]: ... def update_expression( self, meth: _HybridUpdaterType[_T] - ) -> hybrid_property[_T]: - ... + ) -> hybrid_property[_T]: ... class hybrid_method(interfaces.InspectionAttrInfo, Generic[_P, _R]): @@ -1025,14 +1020,12 @@ def inplace(self) -> Self: @overload def __get__( self, instance: Literal[None], owner: Type[object] - ) -> Callable[_P, SQLCoreOperations[_R]]: - ... + ) -> Callable[_P, SQLCoreOperations[_R]]: ... @overload def __get__( self, instance: object, owner: Type[object] - ) -> Callable[_P, _R]: - ... + ) -> Callable[_P, _R]: ... def __get__( self, instance: Optional[object], owner: Type[object] @@ -1106,18 +1099,15 @@ def value(self, value): util.update_wrapper(self, fget) @overload - def __get__(self, instance: Any, owner: Literal[None]) -> Self: - ... + def __get__(self, instance: Any, owner: Literal[None]) -> Self: ... @overload def __get__( self, instance: Literal[None], owner: Type[object] - ) -> _HybridClassLevelAccessor[_T]: - ... + ) -> _HybridClassLevelAccessor[_T]: ... @overload - def __get__(self, instance: object, owner: Type[object]) -> _T: - ... + def __get__(self, instance: object, owner: Type[object]) -> _T: ... def __get__( self, instance: Optional[object], owner: Optional[Type[object]] diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py index e84dde26877..5f3c71282b7 100644 --- a/lib/sqlalchemy/ext/instrumentation.py +++ b/lib/sqlalchemy/ext/instrumentation.py @@ -214,9 +214,9 @@ def dict_of(self, instance): )(instance) -orm_instrumentation._instrumentation_factory = ( - _instrumentation_factory -) = ExtendedInstrumentationRegistry() +orm_instrumentation._instrumentation_factory = _instrumentation_factory = ( + ExtendedInstrumentationRegistry() +) orm_instrumentation.instrumentation_finders = instrumentation_finders @@ -436,17 +436,15 @@ def _install_lookups(lookups): instance_dict = lookups["instance_dict"] manager_of_class = lookups["manager_of_class"] opt_manager_of_class = lookups["opt_manager_of_class"] - orm_base.instance_state = ( - attributes.instance_state - ) = orm_instrumentation.instance_state = instance_state - orm_base.instance_dict = ( - attributes.instance_dict - ) = orm_instrumentation.instance_dict = instance_dict - orm_base.manager_of_class = ( - attributes.manager_of_class - ) = orm_instrumentation.manager_of_class = manager_of_class - orm_base.opt_manager_of_class = ( - orm_util.opt_manager_of_class - ) = ( + orm_base.instance_state = attributes.instance_state = ( + orm_instrumentation.instance_state + ) = instance_state + orm_base.instance_dict = attributes.instance_dict = ( + orm_instrumentation.instance_dict + ) = instance_dict + orm_base.manager_of_class = attributes.manager_of_class = ( + orm_instrumentation.manager_of_class + ) = manager_of_class + orm_base.opt_manager_of_class = orm_util.opt_manager_of_class = ( attributes.opt_manager_of_class ) = orm_instrumentation.opt_manager_of_class = opt_manager_of_class diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index 0dc65c007b3..7da5075a177 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -800,15 +800,12 @@ def __setitem__(self, key: _KT, value: _VT) -> None: @overload def setdefault( self: MutableDict[_KT, Optional[_T]], key: _KT, value: None = None - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload - def setdefault(self, key: _KT, value: _VT) -> _VT: - ... + def setdefault(self, key: _KT, value: _VT) -> _VT: ... - def setdefault(self, key: _KT, value: object = None) -> object: - ... + def setdefault(self, key: _KT, value: object = None) -> object: ... else: @@ -829,17 +826,14 @@ def update(self, *a: Any, **kw: _VT) -> None: if TYPE_CHECKING: @overload - def pop(self, __key: _KT) -> _VT: - ... + def pop(self, __key: _KT) -> _VT: ... @overload - def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: - ... + def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... def pop( self, __key: _KT, __default: _VT | _T | None = None - ) -> _VT | _T: - ... + ) -> _VT | _T: ... else: diff --git a/lib/sqlalchemy/ext/mypy/apply.py b/lib/sqlalchemy/ext/mypy/apply.py index 4185d29b948..eb9019453d5 100644 --- a/lib/sqlalchemy/ext/mypy/apply.py +++ b/lib/sqlalchemy/ext/mypy/apply.py @@ -161,9 +161,9 @@ def re_apply_declarative_assignments( # update the SQLAlchemyAttribute with the better # information - mapped_attr_lookup[ - stmt.lvalues[0].name - ].type = python_type_for_type + mapped_attr_lookup[stmt.lvalues[0].name].type = ( + python_type_for_type + ) update_cls_metadata = True @@ -223,9 +223,11 @@ class User(Base): lvalue.is_inferred_def = False left_node.type = api.named_type( NAMED_TYPE_SQLA_MAPPED, - [AnyType(TypeOfAny.special_form)] - if python_type_for_type is None - else [python_type_for_type], + ( + [AnyType(TypeOfAny.special_form)] + if python_type_for_type is None + else [python_type_for_type] + ), ) # so to have it skip the right side totally, we can do this: diff --git a/lib/sqlalchemy/ext/mypy/decl_class.py b/lib/sqlalchemy/ext/mypy/decl_class.py index d7dff91cbd8..3d578b346e9 100644 --- a/lib/sqlalchemy/ext/mypy/decl_class.py +++ b/lib/sqlalchemy/ext/mypy/decl_class.py @@ -58,9 +58,9 @@ def scan_declarative_assignments_and_apply_types( elif cls.fullname.startswith("builtins"): return None - mapped_attributes: Optional[ - List[util.SQLAlchemyAttribute] - ] = util.get_mapped_attributes(info, api) + mapped_attributes: Optional[List[util.SQLAlchemyAttribute]] = ( + util.get_mapped_attributes(info, api) + ) # used by assign.add_additional_orm_attributes among others util.establish_as_sqlalchemy(info) diff --git a/lib/sqlalchemy/ext/mypy/util.py b/lib/sqlalchemy/ext/mypy/util.py index 10cdb56b050..7f04c481d34 100644 --- a/lib/sqlalchemy/ext/mypy/util.py +++ b/lib/sqlalchemy/ext/mypy/util.py @@ -212,8 +212,7 @@ def add_global( @overload def get_callexpr_kwarg( callexpr: CallExpr, name: str, *, expr_types: None = ... -) -> Optional[Union[CallExpr, NameExpr]]: - ... +) -> Optional[Union[CallExpr, NameExpr]]: ... @overload @@ -222,8 +221,7 @@ def get_callexpr_kwarg( name: str, *, expr_types: Tuple[TypingType[_TArgType], ...], -) -> Optional[_TArgType]: - ... +) -> Optional[_TArgType]: ... def get_callexpr_kwarg( @@ -315,9 +313,11 @@ def unbound_to_instance( return Instance( bound_type, [ - unbound_to_instance(api, arg) - if isinstance(arg, UnboundType) - else arg + ( + unbound_to_instance(api, arg) + if isinstance(arg, UnboundType) + else arg + ) for arg in typ.args ], ) diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py index 7123d0608db..4ee48f38517 100644 --- a/lib/sqlalchemy/inspection.py +++ b/lib/sqlalchemy/inspection.py @@ -74,8 +74,7 @@ class _InspectableTypeProtocol(Protocol[_TCov]): """ - def _sa_inspect_type(self) -> _TCov: - ... + def _sa_inspect_type(self) -> _TCov: ... class _InspectableProtocol(Protocol[_TCov]): @@ -84,35 +83,31 @@ class _InspectableProtocol(Protocol[_TCov]): """ - def _sa_inspect_instance(self) -> _TCov: - ... + def _sa_inspect_instance(self) -> _TCov: ... @overload def inspect( subject: Type[_InspectableTypeProtocol[_IN]], raiseerr: bool = True -) -> _IN: - ... +) -> _IN: ... @overload -def inspect(subject: _InspectableProtocol[_IN], raiseerr: bool = True) -> _IN: - ... +def inspect( + subject: _InspectableProtocol[_IN], raiseerr: bool = True +) -> _IN: ... @overload -def inspect(subject: Inspectable[_IN], raiseerr: bool = True) -> _IN: - ... +def inspect(subject: Inspectable[_IN], raiseerr: bool = True) -> _IN: ... @overload -def inspect(subject: Any, raiseerr: Literal[False] = ...) -> Optional[Any]: - ... +def inspect(subject: Any, raiseerr: Literal[False] = ...) -> Optional[Any]: ... @overload -def inspect(subject: Any, raiseerr: bool = True) -> Any: - ... +def inspect(subject: Any, raiseerr: bool = True) -> Any: ... def inspect(subject: Any, raiseerr: bool = True) -> Any: diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py index 9046e33b75f..e6922b81af9 100644 --- a/lib/sqlalchemy/log.py +++ b/lib/sqlalchemy/log.py @@ -269,14 +269,12 @@ class echo_property: @overload def __get__( self, instance: Literal[None], owner: Type[Identified] - ) -> echo_property: - ... + ) -> echo_property: ... @overload def __get__( self, instance: Identified, owner: Type[Identified] - ) -> _EchoFlagType: - ... + ) -> _EchoFlagType: ... def __get__( self, instance: Optional[Identified], owner: Type[Identified] diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index a541c1fdf1e..f74de91c1d9 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -385,9 +385,9 @@ def orm_insert_sentinel( return mapped_column( name=name, - default=default - if default is not None - else _InsertSentinelColumnDefault(), + default=( + default if default is not None else _InsertSentinelColumnDefault() + ), _omit_from_statements=omit_from_statements, insert_sentinel=True, use_existing_column=True, @@ -559,8 +559,7 @@ def composite( info: Optional[_InfoType] = None, doc: Optional[str] = None, **__kw: Any, -) -> Composite[Any]: - ... +) -> Composite[Any]: ... @overload @@ -581,8 +580,7 @@ def composite( info: Optional[_InfoType] = None, doc: Optional[str] = None, **__kw: Any, -) -> Composite[_CC]: - ... +) -> Composite[_CC]: ... @overload @@ -603,8 +601,7 @@ def composite( info: Optional[_InfoType] = None, doc: Optional[str] = None, **__kw: Any, -) -> Composite[_CC]: - ... +) -> Composite[_CC]: ... def composite( @@ -2177,8 +2174,7 @@ def aliased( name: Optional[str] = None, flat: bool = False, adapt_on_names: bool = False, -) -> AliasedType[_O]: - ... +) -> AliasedType[_O]: ... @overload @@ -2188,8 +2184,7 @@ def aliased( name: Optional[str] = None, flat: bool = False, adapt_on_names: bool = False, -) -> AliasedClass[_O]: - ... +) -> AliasedClass[_O]: ... @overload @@ -2199,8 +2194,7 @@ def aliased( name: Optional[str] = None, flat: bool = False, adapt_on_names: bool = False, -) -> FromClause: - ... +) -> FromClause: ... def aliased( diff --git a/lib/sqlalchemy/orm/_typing.py b/lib/sqlalchemy/orm/_typing.py index fa6dd7c3c3a..f8ac0590a62 100644 --- a/lib/sqlalchemy/orm/_typing.py +++ b/lib/sqlalchemy/orm/_typing.py @@ -108,13 +108,13 @@ class _ORMAdapterProto(Protocol): """ - def __call__(self, obj: _CE, key: Optional[str] = None) -> _CE: - ... + def __call__(self, obj: _CE, key: Optional[str] = None) -> _CE: ... class _LoaderCallable(Protocol): - def __call__(self, state: InstanceState[Any], passive: PassiveFlag) -> Any: - ... + def __call__( + self, state: InstanceState[Any], passive: PassiveFlag + ) -> Any: ... def is_orm_option( @@ -138,39 +138,33 @@ def is_composite_class(obj: Any) -> bool: if TYPE_CHECKING: - def insp_is_mapper_property(obj: Any) -> TypeGuard[MapperProperty[Any]]: - ... + def insp_is_mapper_property( + obj: Any, + ) -> TypeGuard[MapperProperty[Any]]: ... - def insp_is_mapper(obj: Any) -> TypeGuard[Mapper[Any]]: - ... + def insp_is_mapper(obj: Any) -> TypeGuard[Mapper[Any]]: ... - def insp_is_aliased_class(obj: Any) -> TypeGuard[AliasedInsp[Any]]: - ... + def insp_is_aliased_class(obj: Any) -> TypeGuard[AliasedInsp[Any]]: ... def insp_is_attribute( obj: InspectionAttr, - ) -> TypeGuard[QueryableAttribute[Any]]: - ... + ) -> TypeGuard[QueryableAttribute[Any]]: ... def attr_is_internal_proxy( obj: InspectionAttr, - ) -> TypeGuard[QueryableAttribute[Any]]: - ... + ) -> TypeGuard[QueryableAttribute[Any]]: ... def prop_is_relationship( prop: MapperProperty[Any], - ) -> TypeGuard[RelationshipProperty[Any]]: - ... + ) -> TypeGuard[RelationshipProperty[Any]]: ... def is_collection_impl( impl: AttributeImpl, - ) -> TypeGuard[CollectionAttributeImpl]: - ... + ) -> TypeGuard[CollectionAttributeImpl]: ... def is_has_collection_adapter( impl: AttributeImpl, - ) -> TypeGuard[HasCollectionAdapter]: - ... + ) -> TypeGuard[HasCollectionAdapter]: ... else: insp_is_mapper_property = operator.attrgetter("is_property") diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index dc9743b8b3d..d9b2d8213d1 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -542,12 +542,12 @@ def __delete__(self, instance: object) -> None: self.impl.delete(instance_state(instance), instance_dict(instance)) @overload - def __get__(self, instance: None, owner: Any) -> InstrumentedAttribute[_T]: - ... + def __get__( + self, instance: None, owner: Any + ) -> InstrumentedAttribute[_T]: ... @overload - def __get__(self, instance: object, owner: Any) -> _T: - ... + def __get__(self, instance: object, owner: Any) -> _T: ... def __get__( self, instance: Optional[object], owner: Any @@ -1538,8 +1538,7 @@ def get_collection( dict_: _InstanceDict, user_data: Literal[None] = ..., passive: Literal[PassiveFlag.PASSIVE_OFF] = ..., - ) -> CollectionAdapter: - ... + ) -> CollectionAdapter: ... @overload def get_collection( @@ -1548,8 +1547,7 @@ def get_collection( dict_: _InstanceDict, user_data: _AdaptedCollectionProtocol = ..., passive: PassiveFlag = ..., - ) -> CollectionAdapter: - ... + ) -> CollectionAdapter: ... @overload def get_collection( @@ -1560,8 +1558,7 @@ def get_collection( passive: PassiveFlag = ..., ) -> Union[ Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter - ]: - ... + ]: ... def get_collection( self, @@ -1592,8 +1589,7 @@ def set( def _is_collection_attribute_impl( impl: AttributeImpl, - ) -> TypeGuard[CollectionAttributeImpl]: - ... + ) -> TypeGuard[CollectionAttributeImpl]: ... else: _is_collection_attribute_impl = operator.attrgetter("collection") @@ -2049,8 +2045,7 @@ def get_collection( dict_: _InstanceDict, user_data: Literal[None] = ..., passive: Literal[PassiveFlag.PASSIVE_OFF] = ..., - ) -> CollectionAdapter: - ... + ) -> CollectionAdapter: ... @overload def get_collection( @@ -2059,8 +2054,7 @@ def get_collection( dict_: _InstanceDict, user_data: _AdaptedCollectionProtocol = ..., passive: PassiveFlag = ..., - ) -> CollectionAdapter: - ... + ) -> CollectionAdapter: ... @overload def get_collection( @@ -2071,8 +2065,7 @@ def get_collection( passive: PassiveFlag = PASSIVE_OFF, ) -> Union[ Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter - ]: - ... + ]: ... def get_collection( self, diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index 50f6703b5ed..86af81cd6ef 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -308,29 +308,23 @@ def generate(fn: _F, self: _Self, *args: Any, **kw: Any) -> _Self: if TYPE_CHECKING: - def manager_of_class(cls: Type[_O]) -> ClassManager[_O]: - ... + def manager_of_class(cls: Type[_O]) -> ClassManager[_O]: ... @overload - def opt_manager_of_class(cls: AliasedClass[Any]) -> None: - ... + def opt_manager_of_class(cls: AliasedClass[Any]) -> None: ... @overload def opt_manager_of_class( cls: _ExternalEntityType[_O], - ) -> Optional[ClassManager[_O]]: - ... + ) -> Optional[ClassManager[_O]]: ... def opt_manager_of_class( cls: _ExternalEntityType[_O], - ) -> Optional[ClassManager[_O]]: - ... + ) -> Optional[ClassManager[_O]]: ... - def instance_state(instance: _O) -> InstanceState[_O]: - ... + def instance_state(instance: _O) -> InstanceState[_O]: ... - def instance_dict(instance: object) -> Dict[str, Any]: - ... + def instance_dict(instance: object) -> Dict[str, Any]: ... else: # these can be replaced by sqlalchemy.ext.instrumentation @@ -512,8 +506,7 @@ def _entity_descriptor(entity: _EntityType[Any], key: str) -> Any: if TYPE_CHECKING: - def _state_mapper(state: InstanceState[_O]) -> Mapper[_O]: - ... + def _state_mapper(state: InstanceState[_O]) -> Mapper[_O]: ... else: _state_mapper = util.dottedgetter("manager.mapper") @@ -684,27 +677,25 @@ class SQLORMOperations(SQLCoreOperations[_T_co], TypingOnly): if typing.TYPE_CHECKING: - def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T_co]: - ... + def of_type( + self, class_: _EntityType[Any] + ) -> PropComparator[_T_co]: ... def and_( self, *criteria: _ColumnExpressionArgument[bool] - ) -> PropComparator[bool]: - ... + ) -> PropComparator[bool]: ... def any( # noqa: A001 self, criterion: Optional[_ColumnExpressionArgument[bool]] = None, **kwargs: Any, - ) -> ColumnElement[bool]: - ... + ) -> ColumnElement[bool]: ... def has( self, criterion: Optional[_ColumnExpressionArgument[bool]] = None, **kwargs: Any, - ) -> ColumnElement[bool]: - ... + ) -> ColumnElement[bool]: ... class ORMDescriptor(Generic[_T_co], TypingOnly): @@ -718,23 +709,19 @@ class ORMDescriptor(Generic[_T_co], TypingOnly): @overload def __get__( self, instance: Any, owner: Literal[None] - ) -> ORMDescriptor[_T_co]: - ... + ) -> ORMDescriptor[_T_co]: ... @overload def __get__( self, instance: Literal[None], owner: Any - ) -> SQLCoreOperations[_T_co]: - ... + ) -> SQLCoreOperations[_T_co]: ... @overload - def __get__(self, instance: object, owner: Any) -> _T_co: - ... + def __get__(self, instance: object, owner: Any) -> _T_co: ... def __get__( self, instance: object, owner: Any - ) -> Union[ORMDescriptor[_T_co], SQLCoreOperations[_T_co], _T_co]: - ... + ) -> Union[ORMDescriptor[_T_co], SQLCoreOperations[_T_co], _T_co]: ... class _MappedAnnotationBase(Generic[_T_co], TypingOnly): @@ -820,29 +807,23 @@ class Mapped( @overload def __get__( self, instance: None, owner: Any - ) -> InstrumentedAttribute[_T_co]: - ... + ) -> InstrumentedAttribute[_T_co]: ... @overload - def __get__(self, instance: object, owner: Any) -> _T_co: - ... + def __get__(self, instance: object, owner: Any) -> _T_co: ... def __get__( self, instance: Optional[object], owner: Any - ) -> Union[InstrumentedAttribute[_T_co], _T_co]: - ... + ) -> Union[InstrumentedAttribute[_T_co], _T_co]: ... @classmethod - def _empty_constructor(cls, arg1: Any) -> Mapped[_T_co]: - ... + def _empty_constructor(cls, arg1: Any) -> Mapped[_T_co]: ... def __set__( self, instance: Any, value: Union[SQLCoreOperations[_T_co], _T_co] - ) -> None: - ... + ) -> None: ... - def __delete__(self, instance: Any) -> None: - ... + def __delete__(self, instance: Any) -> None: ... class _MappedAttribute(Generic[_T_co], TypingOnly): @@ -919,24 +900,20 @@ class User(Base): @overload def __get__( self, instance: None, owner: Any - ) -> InstrumentedAttribute[_T_co]: - ... + ) -> InstrumentedAttribute[_T_co]: ... @overload def __get__( self, instance: object, owner: Any - ) -> AppenderQuery[_T_co]: - ... + ) -> AppenderQuery[_T_co]: ... def __get__( self, instance: Optional[object], owner: Any - ) -> Union[InstrumentedAttribute[_T_co], AppenderQuery[_T_co]]: - ... + ) -> Union[InstrumentedAttribute[_T_co], AppenderQuery[_T_co]]: ... def __set__( self, instance: Any, value: typing.Collection[_T_co] - ) -> None: - ... + ) -> None: ... class WriteOnlyMapped(_MappedAnnotationBase[_T_co]): @@ -975,21 +952,19 @@ class User(Base): @overload def __get__( self, instance: None, owner: Any - ) -> InstrumentedAttribute[_T_co]: - ... + ) -> InstrumentedAttribute[_T_co]: ... @overload def __get__( self, instance: object, owner: Any - ) -> WriteOnlyCollection[_T_co]: - ... + ) -> WriteOnlyCollection[_T_co]: ... def __get__( self, instance: Optional[object], owner: Any - ) -> Union[InstrumentedAttribute[_T_co], WriteOnlyCollection[_T_co]]: - ... + ) -> Union[ + InstrumentedAttribute[_T_co], WriteOnlyCollection[_T_co] + ]: ... def __set__( self, instance: Any, value: typing.Collection[_T_co] - ) -> None: - ... + ) -> None: ... diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 3f558d2d405..5d2558d9530 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -81,8 +81,7 @@ def _bulk_insert( render_nulls: bool, use_orm_insert_stmt: Literal[None] = ..., execution_options: Optional[OrmExecuteOptionsParameter] = ..., -) -> None: - ... +) -> None: ... @overload @@ -95,8 +94,7 @@ def _bulk_insert( render_nulls: bool, use_orm_insert_stmt: Optional[dml.Insert] = ..., execution_options: Optional[OrmExecuteOptionsParameter] = ..., -) -> cursor.CursorResult[Any]: - ... +) -> cursor.CursorResult[Any]: ... def _bulk_insert( @@ -236,8 +234,7 @@ def _bulk_update( update_changed_only: bool, use_orm_update_stmt: Literal[None] = ..., enable_check_rowcount: bool = True, -) -> None: - ... +) -> None: ... @overload @@ -249,8 +246,7 @@ def _bulk_update( update_changed_only: bool, use_orm_update_stmt: Optional[dml.Update] = ..., enable_check_rowcount: bool = True, -) -> _result.Result[Any]: - ... +) -> _result.Result[Any]: ... def _bulk_update( @@ -377,14 +373,16 @@ def _get_orm_crud_kv_pairs( if desc is NO_VALUE: yield ( coercions.expect(roles.DMLColumnRole, k), - coercions.expect( - roles.ExpressionElementRole, - v, - type_=sqltypes.NullType(), - is_crud=True, - ) - if needs_to_be_cacheable - else v, + ( + coercions.expect( + roles.ExpressionElementRole, + v, + type_=sqltypes.NullType(), + is_crud=True, + ) + if needs_to_be_cacheable + else v + ), ) else: yield from core_get_crud_kv_pairs( @@ -405,13 +403,15 @@ def _get_orm_crud_kv_pairs( else: yield ( k, - v - if not needs_to_be_cacheable - else coercions.expect( - roles.ExpressionElementRole, - v, - type_=sqltypes.NullType(), - is_crud=True, + ( + v + if not needs_to_be_cacheable + else coercions.expect( + roles.ExpressionElementRole, + v, + type_=sqltypes.NullType(), + is_crud=True, + ) ), ) @@ -528,9 +528,9 @@ def _setup_orm_returning( fs = fs.execution_options(**orm_level_statement._execution_options) fs = fs.options(*orm_level_statement._with_options) self.select_statement = fs - self.from_statement_ctx = ( - fsc - ) = ORMFromStatementCompileState.create_for_statement(fs, compiler) + self.from_statement_ctx = fsc = ( + ORMFromStatementCompileState.create_for_statement(fs, compiler) + ) fsc.setup_dml_returning_compile_state(dml_mapper) dml_level_statement = dml_level_statement._generate() diff --git a/lib/sqlalchemy/orm/clsregistry.py b/lib/sqlalchemy/orm/clsregistry.py index 2cce129cbfe..26113d8b24d 100644 --- a/lib/sqlalchemy/orm/clsregistry.py +++ b/lib/sqlalchemy/orm/clsregistry.py @@ -83,9 +83,9 @@ def add_class( _ModuleMarker, decl_class_registry["_sa_module_registry"] ) except KeyError: - decl_class_registry[ - "_sa_module_registry" - ] = root_module = _ModuleMarker("_sa_module_registry", None) + decl_class_registry["_sa_module_registry"] = root_module = ( + _ModuleMarker("_sa_module_registry", None) + ) tokens = cls.__module__.split(".") @@ -542,9 +542,7 @@ def __call__(self) -> Any: _fallback_dict: Mapping[str, Any] = None # type: ignore -def _resolver( - cls: Type[Any], prop: RelationshipProperty[Any] -) -> Tuple[ +def _resolver(cls: Type[Any], prop: RelationshipProperty[Any]) -> Tuple[ Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]], Callable[[str, bool], _class_resolver], ]: diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index 534a1b64861..6fefd787a82 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -167,8 +167,7 @@ def shift(self): class _CollectionConverterProtocol(Protocol): - def __call__(self, collection: _COL) -> _COL: - ... + def __call__(self, collection: _COL) -> _COL: ... class _AdaptedCollectionProtocol(Protocol): @@ -548,9 +547,9 @@ def _reset_empty(self) -> None: self.empty ), "This collection adapter is not in the 'empty' state" self.empty = False - self.owner_state.dict[ - self._key - ] = self.owner_state._empty_collections.pop(self._key) + self.owner_state.dict[self._key] = ( + self.owner_state._empty_collections.pop(self._key) + ) def _refuse_empty(self) -> NoReturn: raise sa_exc.InvalidRequestError( @@ -1554,14 +1553,14 @@ class InstrumentedDict(Dict[_KT, _VT]): """An instrumented version of the built-in dict.""" -__canned_instrumentation: util.immutabledict[ - Any, _CollectionFactoryType -] = util.immutabledict( - { - list: InstrumentedList, - set: InstrumentedSet, - dict: InstrumentedDict, - } +__canned_instrumentation: util.immutabledict[Any, _CollectionFactoryType] = ( + util.immutabledict( + { + list: InstrumentedList, + set: InstrumentedSet, + dict: InstrumentedDict, + } + ) ) __interfaces: util.immutabledict[ diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index b4178253185..3056016e729 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -432,8 +432,7 @@ def create_for_statement( statement: Union[Select, FromStatement], compiler: Optional[SQLCompiler], **kw: Any, - ) -> ORMCompileState: - ... + ) -> ORMCompileState: ... def _append_dedupe_col_collection(self, obj, col_collection): dedupe = self.dedupe_columns @@ -517,14 +516,14 @@ def orm_pre_session_exec( and len(statement._compile_options._current_path) > 10 and execution_options.get("compiled_cache", True) is not None ): - execution_options: util.immutabledict[ - str, Any - ] = execution_options.union( - { - "compiled_cache": None, - "_cache_disable_reason": "excess depth for " - "ORM loader options", - } + execution_options: util.immutabledict[str, Any] = ( + execution_options.union( + { + "compiled_cache": None, + "_cache_disable_reason": "excess depth for " + "ORM loader options", + } + ) ) bind_arguments["clause"] = statement @@ -750,9 +749,11 @@ def create_for_statement( self.statement = statement self._label_convention = self._column_naming_convention( - statement._label_style - if not statement._is_textual and not statement.is_dml - else LABEL_STYLE_NONE, + ( + statement._label_style + if not statement._is_textual and not statement.is_dml + else LABEL_STYLE_NONE + ), self.use_legacy_query_style, ) @@ -798,9 +799,9 @@ def create_for_statement( for entity in self._entities: entity.setup_compile_state(self) - compiler._ordered_columns = ( - compiler._textual_ordered_columns - ) = False + compiler._ordered_columns = compiler._textual_ordered_columns = ( + False + ) # enable looser result column matching. this is shown to be # needed by test_query.py::TextTest @@ -1367,11 +1368,15 @@ def all_selected_columns(cls, statement): def get_columns_clause_froms(cls, statement): return cls._normalize_froms( itertools.chain.from_iterable( - element._from_objects - if "parententity" not in element._annotations - else [ - element._annotations["parententity"].__clause_element__() - ] + ( + element._from_objects + if "parententity" not in element._annotations + else [ + element._annotations[ + "parententity" + ].__clause_element__() + ] + ) for element in statement._raw_columns ) ) @@ -1500,9 +1505,11 @@ def _compound_eager_statement(self): # the original expressions outside of the label references # in order to have them render. unwrapped_order_by = [ - elem.element - if isinstance(elem, sql.elements._label_reference) - else elem + ( + elem.element + if isinstance(elem, sql.elements._label_reference) + else elem + ) for elem in self.order_by ] @@ -2421,9 +2428,12 @@ def _column_descriptions( "type": ent.type, "aliased": getattr(insp_ent, "is_aliased_class", False), "expr": ent.expr, - "entity": getattr(insp_ent, "entity", None) - if ent.entity_zero is not None and not insp_ent.is_clause_element - else None, + "entity": ( + getattr(insp_ent, "entity", None) + if ent.entity_zero is not None + and not insp_ent.is_clause_element + else None + ), } for ent, insp_ent in [ (_ent, _ent.entity_zero) for _ent in ctx._entities diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 719d21a188c..09128ea8fb4 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -313,17 +313,13 @@ def __init__( self, fn: Callable[..., _T], cascading: bool = False, - ): - ... + ): ... - def __get__(self, instance: Optional[object], owner: Any) -> _T: - ... + def __get__(self, instance: Optional[object], owner: Any) -> _T: ... - def __set__(self, instance: Any, value: Any) -> None: - ... + def __set__(self, instance: Any, value: Any) -> None: ... - def __delete__(self, instance: Any) -> None: - ... + def __delete__(self, instance: Any) -> None: ... def __call__(self, fn: Callable[..., _TT]) -> _declared_directive[_TT]: # extensive fooling of mypy underway... @@ -428,14 +424,11 @@ def __init__( self, fn: _DeclaredAttrDecorated[_T], cascading: bool = False, - ): - ... + ): ... - def __set__(self, instance: Any, value: Any) -> None: - ... + def __set__(self, instance: Any, value: Any) -> None: ... - def __delete__(self, instance: Any) -> None: - ... + def __delete__(self, instance: Any) -> None: ... # this is the Mapped[] API where at class descriptor get time we want # the type checker to see InstrumentedAttribute[_T]. However the @@ -444,17 +437,14 @@ def __delete__(self, instance: Any) -> None: @overload def __get__( self, instance: None, owner: Any - ) -> InstrumentedAttribute[_T]: - ... + ) -> InstrumentedAttribute[_T]: ... @overload - def __get__(self, instance: object, owner: Any) -> _T: - ... + def __get__(self, instance: object, owner: Any) -> _T: ... def __get__( self, instance: Optional[object], owner: Any - ) -> Union[InstrumentedAttribute[_T], _T]: - ... + ) -> Union[InstrumentedAttribute[_T], _T]: ... @hybridmethod def _stateful(cls, **kw: Any) -> _stateful_declared_attr[_T]: @@ -620,9 +610,9 @@ def __init_subclass__( for k, v in apply_dc_transforms.items() } else: - cls._sa_apply_dc_transforms = ( - current_transforms - ) = apply_dc_transforms + cls._sa_apply_dc_transforms = current_transforms = ( + apply_dc_transforms + ) super().__init_subclass__(**kw) @@ -753,11 +743,9 @@ def __init__(self, id=None, name=None): if typing.TYPE_CHECKING: - def _sa_inspect_type(self) -> Mapper[Self]: - ... + def _sa_inspect_type(self) -> Mapper[Self]: ... - def _sa_inspect_instance(self) -> InstanceState[Self]: - ... + def _sa_inspect_instance(self) -> InstanceState[Self]: ... _sa_registry: ClassVar[_RegistryType] @@ -838,8 +826,7 @@ def _sa_inspect_instance(self) -> InstanceState[Self]: """ - def __init__(self, **kw: Any): - ... + def __init__(self, **kw: Any): ... def __init_subclass__(cls, **kw: Any) -> None: if DeclarativeBase in cls.__bases__: @@ -924,11 +911,9 @@ class DeclarativeBaseNoMeta( if typing.TYPE_CHECKING: - def _sa_inspect_type(self) -> Mapper[Self]: - ... + def _sa_inspect_type(self) -> Mapper[Self]: ... - def _sa_inspect_instance(self) -> InstanceState[Self]: - ... + def _sa_inspect_instance(self) -> InstanceState[Self]: ... __tablename__: Any """String name to assign to the generated @@ -963,8 +948,7 @@ def _sa_inspect_instance(self) -> InstanceState[Self]: """ - def __init__(self, **kw: Any): - ... + def __init__(self, **kw: Any): ... def __init_subclass__(cls, **kw: Any) -> None: if DeclarativeBaseNoMeta in cls.__bases__: @@ -1585,8 +1569,7 @@ def __class_getitem__(cls: Type[_T], key: Any) -> Type[_T]: ), ) @overload - def mapped_as_dataclass(self, __cls: Type[_O]) -> Type[_O]: - ... + def mapped_as_dataclass(self, __cls: Type[_O]) -> Type[_O]: ... @overload def mapped_as_dataclass( @@ -1601,8 +1584,7 @@ def mapped_as_dataclass( match_args: Union[_NoArg, bool] = ..., kw_only: Union[_NoArg, bool] = ..., dataclass_callable: Union[_NoArg, Callable[..., Type[Any]]] = ..., - ) -> Callable[[Type[_O]], Type[_O]]: - ... + ) -> Callable[[Type[_O]], Type[_O]]: ... def mapped_as_dataclass( self, diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index 3786cfe4a8f..96530c3ac47 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -98,8 +98,7 @@ class MappedClassProtocol(Protocol[_O]): __mapper__: Mapper[_O] __table__: FromClause - def __call__(self, **kw: Any) -> _O: - ... + def __call__(self, **kw: Any) -> _O: ... class _DeclMappedClassProtocol(MappedClassProtocol[_O], Protocol): @@ -111,11 +110,9 @@ class _DeclMappedClassProtocol(MappedClassProtocol[_O], Protocol): _sa_apply_dc_transforms: Optional[_DataclassArguments] - def __declare_first__(self) -> None: - ... + def __declare_first__(self) -> None: ... - def __declare_last__(self) -> None: - ... + def __declare_last__(self) -> None: ... class _DataclassArguments(TypedDict): @@ -908,9 +905,9 @@ def _mapper_args_fn() -> Dict[str, Any]: "@declared_attr.cascading; " "skipping" % (name, cls) ) - collected_attributes[name] = column_copies[ - obj - ] = ret = obj.__get__(obj, cls) + collected_attributes[name] = column_copies[obj] = ( + ret + ) = obj.__get__(obj, cls) setattr(cls, name, ret) else: if is_dataclass_field: @@ -947,9 +944,9 @@ def _mapper_args_fn() -> Dict[str, Any]: ): ret = ret.descriptor - collected_attributes[name] = column_copies[ - obj - ] = ret + collected_attributes[name] = column_copies[obj] = ( + ret + ) if ( isinstance(ret, (Column, MapperProperty)) diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py index 9bdd92428e2..71c06fbeb19 100644 --- a/lib/sqlalchemy/orm/dependency.py +++ b/lib/sqlalchemy/orm/dependency.py @@ -167,9 +167,11 @@ def per_state_flush_actions(self, uow, states, isdelete): sum_ = state.manager[self.key].impl.get_all_pending( state, state.dict, - self._passive_delete_flag - if isdelete - else attributes.PASSIVE_NO_INITIALIZE, + ( + self._passive_delete_flag + if isdelete + else attributes.PASSIVE_NO_INITIALIZE + ), ) if not sum_: diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index a70f0b3ec37..a3650f5f001 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -419,13 +419,13 @@ def _init_accessor(self) -> None: and self.composite_class not in _composite_getters ): if self._generated_composite_accessor is not None: - _composite_getters[ - self.composite_class - ] = self._generated_composite_accessor + _composite_getters[self.composite_class] = ( + self._generated_composite_accessor + ) elif hasattr(self.composite_class, "__composite_values__"): - _composite_getters[ - self.composite_class - ] = lambda obj: obj.__composite_values__() + _composite_getters[self.composite_class] = ( + lambda obj: obj.__composite_values__() + ) @util.preload_module("sqlalchemy.orm.properties") @util.preload_module("sqlalchemy.orm.decl_base") diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index d5db03a19db..7496e5c30da 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -172,8 +172,7 @@ def _iter(self) -> Union[result.ScalarResult[_T], result.Result[_T]]: if TYPE_CHECKING: - def __iter__(self) -> Iterator[_T]: - ... + def __iter__(self) -> Iterator[_T]: ... def __getitem__(self, index: Any) -> Union[_T, List[_T]]: sess = self.session diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index b5c4f94a72a..2233d7ee156 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -729,9 +729,9 @@ def populate( class _InstanceEventsHold(_EventsHold[_ET]): - all_holds: weakref.WeakKeyDictionary[ - Any, Any - ] = weakref.WeakKeyDictionary() + all_holds: weakref.WeakKeyDictionary[Any, Any] = ( + weakref.WeakKeyDictionary() + ) def resolve(self, class_: Type[_O]) -> Optional[ClassManager[_O]]: return instrumentation.opt_manager_of_class(class_) diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py index 9360e4a3435..e9fe843360c 100644 --- a/lib/sqlalchemy/orm/instrumentation.py +++ b/lib/sqlalchemy/orm/instrumentation.py @@ -85,13 +85,11 @@ def __call__( state: state.InstanceState[Any], toload: Set[str], passive: base.PassiveFlag, - ) -> None: - ... + ) -> None: ... class _ManagerFactory(Protocol): - def __call__(self, class_: Type[_O]) -> ClassManager[_O]: - ... + def __call__(self, class_: Type[_O]) -> ClassManager[_O]: ... class ClassManager( diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 3d9018257e8..68a6f645317 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -735,6 +735,7 @@ class SomeMappedClass(Base): :attr:`.TypeEngine.comparator_factory` """ + __slots__ = "prop", "_parententity", "_adapt_to_entity" __visit_name__ = "orm_prop_comparator" @@ -838,13 +839,11 @@ def _of_type_op(a: Any, class_: Any) -> Any: def operate( self, op: OperatorType, *other: Any, **kwargs: Any - ) -> ColumnElement[Any]: - ... + ) -> ColumnElement[Any]: ... def reverse_operate( self, op: OperatorType, other: Any, **kwargs: Any - ) -> ColumnElement[Any]: - ... + ) -> ColumnElement[Any]: ... def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T_co]: r"""Redefine this object in terms of a polymorphic subclass, diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index 1de71f9c71c..4e2cb8250fc 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -181,20 +181,22 @@ def go(obj): return go unique_filters = [ - _no_unique - if context.yield_per - else _not_hashable( - ent.column.type, # type: ignore - legacy=context.load_options._legacy_uniquing, - uncertain=ent._null_column_type, - ) - if ( - not ent.use_id_for_hash - and (ent._non_hashable_value or ent._null_column_type) + ( + _no_unique + if context.yield_per + else ( + _not_hashable( + ent.column.type, # type: ignore + legacy=context.load_options._legacy_uniquing, + uncertain=ent._null_column_type, + ) + if ( + not ent.use_id_for_hash + and (ent._non_hashable_value or ent._null_column_type) + ) + else id if ent.use_id_for_hash else None + ) ) - else id - if ent.use_id_for_hash - else None for ent in context.compile_state._entities ] diff --git a/lib/sqlalchemy/orm/mapped_collection.py b/lib/sqlalchemy/orm/mapped_collection.py index 24ac0cc1b95..13c6b689e1d 100644 --- a/lib/sqlalchemy/orm/mapped_collection.py +++ b/lib/sqlalchemy/orm/mapped_collection.py @@ -117,9 +117,7 @@ def __reduce__( return self.__class__, (self.colkeys,) @classmethod - def _reduce_from_cols( - cls, cols: Sequence[ColumnElement[_KT]] - ) -> Tuple[ + def _reduce_from_cols(cls, cols: Sequence[ColumnElement[_KT]]) -> Tuple[ Type[_SerializableColumnGetterV2[_KT]], Tuple[Sequence[Tuple[Optional[str], Optional[str]]]], ]: diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 9b8ddb10770..0caed0e2fd0 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -132,9 +132,9 @@ ] -_mapper_registries: weakref.WeakKeyDictionary[ - _RegistryType, bool -] = weakref.WeakKeyDictionary() +_mapper_registries: weakref.WeakKeyDictionary[_RegistryType, bool] = ( + weakref.WeakKeyDictionary() +) def _all_registries() -> Set[registry]: @@ -1606,9 +1606,11 @@ def _configure_pks(self) -> None: if self._primary_key_argument: coerced_pk_arg = [ - self._str_arg_to_mapped_col("primary_key", c) - if isinstance(c, str) - else c + ( + self._str_arg_to_mapped_col("primary_key", c) + if isinstance(c, str) + else c + ) for c in ( coercions.expect( roles.DDLConstraintColumnRole, @@ -2465,9 +2467,11 @@ def __str__(self) -> str: return "Mapper[%s%s(%s)]" % ( self.class_.__name__, self.non_primary and " (non-primary)" or "", - self.local_table.description - if self.local_table is not None - else self.persist_selectable.description, + ( + self.local_table.description + if self.local_table is not None + else self.persist_selectable.description + ), ) def _is_orphan(self, state: InstanceState[_O]) -> bool: diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py index c97afe7e613..76484b3e68f 100644 --- a/lib/sqlalchemy/orm/path_registry.py +++ b/lib/sqlalchemy/orm/path_registry.py @@ -45,11 +45,9 @@ from ..util.typing import _LiteralStar from ..util.typing import TypeGuard - def is_root(path: PathRegistry) -> TypeGuard[RootRegistry]: - ... + def is_root(path: PathRegistry) -> TypeGuard[RootRegistry]: ... - def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: - ... + def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: ... else: is_root = operator.attrgetter("is_root") @@ -185,26 +183,21 @@ def __hash__(self) -> int: return id(self) @overload - def __getitem__(self, entity: _StrPathToken) -> TokenRegistry: - ... + def __getitem__(self, entity: _StrPathToken) -> TokenRegistry: ... @overload - def __getitem__(self, entity: int) -> _PathElementType: - ... + def __getitem__(self, entity: int) -> _PathElementType: ... @overload - def __getitem__(self, entity: slice) -> _PathRepresentation: - ... + def __getitem__(self, entity: slice) -> _PathRepresentation: ... @overload def __getitem__( self, entity: _InternalEntityType[Any] - ) -> AbstractEntityRegistry: - ... + ) -> AbstractEntityRegistry: ... @overload - def __getitem__(self, entity: MapperProperty[Any]) -> PropRegistry: - ... + def __getitem__(self, entity: MapperProperty[Any]) -> PropRegistry: ... def __getitem__( self, @@ -320,13 +313,11 @@ def deserialize(cls, path: _SerializedPath) -> PathRegistry: @overload @classmethod - def per_mapper(cls, mapper: Mapper[Any]) -> CachingEntityRegistry: - ... + def per_mapper(cls, mapper: Mapper[Any]) -> CachingEntityRegistry: ... @overload @classmethod - def per_mapper(cls, mapper: AliasedInsp[Any]) -> SlotsEntityRegistry: - ... + def per_mapper(cls, mapper: AliasedInsp[Any]) -> SlotsEntityRegistry: ... @classmethod def per_mapper( @@ -808,11 +799,9 @@ def _getitem(self, entity: Any) -> Any: def path_is_entity( path: PathRegistry, - ) -> TypeGuard[AbstractEntityRegistry]: - ... + ) -> TypeGuard[AbstractEntityRegistry]: ... - def path_is_property(path: PathRegistry) -> TypeGuard[PropRegistry]: - ... + def path_is_property(path: PathRegistry) -> TypeGuard[PropRegistry]: ... else: path_is_entity = operator.attrgetter("is_entity") diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 0c2529d5d13..abe69bf4684 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -140,11 +140,13 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols): state_dict, sub_mapper, connection, - mapper._get_committed_state_attr_by_column( - state, state_dict, mapper.version_id_col - ) - if mapper.version_id_col is not None - else None, + ( + mapper._get_committed_state_attr_by_column( + state, state_dict, mapper.version_id_col + ) + if mapper.version_id_col is not None + else None + ), ) for state, state_dict, sub_mapper, connection in states_to_update if table in sub_mapper._pks_by_table @@ -703,10 +705,10 @@ def _collect_delete_commands( params = {} for col in mapper._pks_by_table[table]: - params[ - col.key - ] = value = mapper._get_committed_state_attr_by_column( - state, state_dict, col + params[col.key] = value = ( + mapper._get_committed_state_attr_by_column( + state, state_dict, col + ) ) if value is None: raise orm_exc.FlushError( @@ -934,9 +936,11 @@ def update_stmt(existing_stmt=None): c.context.compiled_parameters[0], value_params, True, - c.returned_defaults - if not c.context.executemany - else None, + ( + c.returned_defaults + if not c.context.executemany + else None + ), ) if check_rowcount: @@ -1069,9 +1073,11 @@ def _emit_insert_statements( last_inserted_params, value_params, False, - result.returned_defaults - if not result.context.executemany - else None, + ( + result.returned_defaults + if not result.context.executemany + else None + ), ) else: _postfetch_bulk_save(mapper_rec, state_dict, table) @@ -1261,9 +1267,11 @@ def _emit_insert_statements( result.context.compiled_parameters[0], value_params, False, - result.returned_defaults - if not result.context.executemany - else None, + ( + result.returned_defaults + if not result.context.executemany + else None + ), ) else: _postfetch_bulk_save(mapper_rec, state_dict, table) diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 6e2e73dc46f..7a5eb8625b2 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -429,8 +429,7 @@ def _orm_annotate_column(self, column: _NC) -> _NC: if TYPE_CHECKING: - def __clause_element__(self) -> NamedColumn[_PT]: - ... + def __clause_element__(self) -> NamedColumn[_PT]: ... def _memoized_method___clause_element__( self, @@ -636,9 +635,11 @@ def columns_to_assign(self) -> List[Tuple[Column[Any], int]]: return [ ( self.column, - self._sort_order - if self._sort_order is not _NoArg.NO_ARG - else 0, + ( + self._sort_order + if self._sort_order is not _NoArg.NO_ARG + else 0 + ), ) ] diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index a13e23fc192..77bce788483 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -166,7 +166,6 @@ class Query( Executable, Generic[_T], ): - """ORM-level SQL construction object. .. legacy:: The ORM :class:`.Query` object is a legacy construct @@ -205,9 +204,9 @@ class Query( _memoized_select_entities = () - _compile_options: Union[ - Type[CacheableOptions], CacheableOptions - ] = ORMCompileState.default_compile_options + _compile_options: Union[Type[CacheableOptions], CacheableOptions] = ( + ORMCompileState.default_compile_options + ) _with_options: Tuple[ExecutableOption, ...] load_options = QueryContext.default_load_options + { @@ -734,18 +733,15 @@ def label(self, name: Optional[str]) -> Label[Any]: @overload def as_scalar( self: Query[Tuple[_MAYBE_ENTITY]], - ) -> ScalarSelect[_MAYBE_ENTITY]: - ... + ) -> ScalarSelect[_MAYBE_ENTITY]: ... @overload def as_scalar( self: Query[Tuple[_NOT_ENTITY]], - ) -> ScalarSelect[_NOT_ENTITY]: - ... + ) -> ScalarSelect[_NOT_ENTITY]: ... @overload - def as_scalar(self) -> ScalarSelect[Any]: - ... + def as_scalar(self) -> ScalarSelect[Any]: ... @util.deprecated( "1.4", @@ -763,18 +759,15 @@ def as_scalar(self) -> ScalarSelect[Any]: @overload def scalar_subquery( self: Query[Tuple[_MAYBE_ENTITY]], - ) -> ScalarSelect[Any]: - ... + ) -> ScalarSelect[Any]: ... @overload def scalar_subquery( self: Query[Tuple[_NOT_ENTITY]], - ) -> ScalarSelect[_NOT_ENTITY]: - ... + ) -> ScalarSelect[_NOT_ENTITY]: ... @overload - def scalar_subquery(self) -> ScalarSelect[Any]: - ... + def scalar_subquery(self) -> ScalarSelect[Any]: ... def scalar_subquery(self) -> ScalarSelect[Any]: """Return the full SELECT statement represented by this @@ -822,14 +815,12 @@ def __clause_element__(self) -> Union[Select[_T], FromStatement[_T]]: @overload def only_return_tuples( self: Query[_O], value: Literal[True] - ) -> RowReturningQuery[Tuple[_O]]: - ... + ) -> RowReturningQuery[Tuple[_O]]: ... @overload def only_return_tuples( self: Query[_O], value: Literal[False] - ) -> Query[_O]: - ... + ) -> Query[_O]: ... @_generative def only_return_tuples(self, value: bool) -> Query[Any]: @@ -1475,15 +1466,13 @@ def value(self, column: _ColumnExpressionArgument[Any]) -> Any: return None @overload - def with_entities(self, _entity: _EntityType[_O]) -> Query[_O]: - ... + def with_entities(self, _entity: _EntityType[_O]) -> Query[_O]: ... @overload def with_entities( self, _colexpr: roles.TypedColumnsClauseRole[_T], - ) -> RowReturningQuery[Tuple[_T]]: - ... + ) -> RowReturningQuery[Tuple[_T]]: ... # START OVERLOADED FUNCTIONS self.with_entities RowReturningQuery 2-8 @@ -1493,14 +1482,12 @@ def with_entities( @overload def with_entities( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] - ) -> RowReturningQuery[Tuple[_T0, _T1]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1]]: ... @overload def with_entities( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ... @overload def with_entities( @@ -1509,8 +1496,7 @@ def with_entities( __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ... @overload def with_entities( @@ -1520,8 +1506,7 @@ def with_entities( __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload def with_entities( @@ -1532,8 +1517,7 @@ def with_entities( __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload def with_entities( @@ -1545,8 +1529,7 @@ def with_entities( __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def with_entities( @@ -1559,16 +1542,14 @@ def with_entities( __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... # END OVERLOADED FUNCTIONS self.with_entities @overload def with_entities( self, *entities: _ColumnsClauseArgument[Any] - ) -> Query[Any]: - ... + ) -> Query[Any]: ... @_generative def with_entities( @@ -1730,12 +1711,10 @@ def execution_options( populate_existing: bool = False, autoflush: bool = False, **opt: Any, - ) -> Self: - ... + ) -> Self: ... @overload - def execution_options(self, **opt: Any) -> Self: - ... + def execution_options(self, **opt: Any) -> Self: ... @_generative def execution_options(self, **kwargs: Any) -> Self: diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index bcdd79cb75a..db76bd912f7 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -1307,9 +1307,11 @@ def _go() -> Any: state, dict_, column, - passive=PassiveFlag.PASSIVE_OFF - if state.persistent - else PassiveFlag.PASSIVE_NO_FETCH ^ PassiveFlag.INIT_OK, + passive=( + PassiveFlag.PASSIVE_OFF + if state.persistent + else PassiveFlag.PASSIVE_NO_FETCH ^ PassiveFlag.INIT_OK + ), ) if current_value is LoaderCallableStatus.NEVER_SET: @@ -1999,9 +2001,11 @@ def _check_cascade_settings(self, cascade: CascadeOptions) -> None: "the single_parent=True flag." % { "rel": self, - "direction": "many-to-one" - if self.direction is MANYTOONE - else "many-to-many", + "direction": ( + "many-to-one" + if self.direction is MANYTOONE + else "many-to-many" + ), "clsname": self.parent.class_.__name__, "relatedcls": self.mapper.class_.__name__, }, @@ -3052,9 +3056,9 @@ def _deannotate_pairs( def _setup_pairs(self) -> None: sync_pairs: _MutableColumnPairs = [] - lrp: util.OrderedSet[ - Tuple[ColumnElement[Any], ColumnElement[Any]] - ] = util.OrderedSet([]) + lrp: util.OrderedSet[Tuple[ColumnElement[Any], ColumnElement[Any]]] = ( + util.OrderedSet([]) + ) secondary_sync_pairs: _MutableColumnPairs = [] def go( @@ -3131,9 +3135,9 @@ def _warn_for_conflicting_sync_targets(self) -> None: # level configuration that benefits from this warning. if to_ not in self._track_overlapping_sync_targets: - self._track_overlapping_sync_targets[ - to_ - ] = weakref.WeakKeyDictionary({self.prop: from_}) + self._track_overlapping_sync_targets[to_] = ( + weakref.WeakKeyDictionary({self.prop: from_}) + ) else: other_props = [] prop_to_from = self._track_overlapping_sync_targets[to_] @@ -3366,9 +3370,7 @@ def mark_unrelated_columns_as_ok_to_adapt( dest_selectable, ) - def create_lazy_clause( - self, reverse_direction: bool = False - ) -> Tuple[ + def create_lazy_clause(self, reverse_direction: bool = False) -> Tuple[ ColumnElement[bool], Dict[str, ColumnElement[Any]], Dict[ColumnElement[Any], ColumnElement[Any]], diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index ff8271fafdb..819616ae85f 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -86,8 +86,7 @@ class QueryPropertyDescriptor(Protocol): """ - def __get__(self, instance: Any, owner: Type[_T]) -> Query[_T]: - ... + def __get__(self, instance: Any, owner: Type[_T]) -> Query[_T]: ... _O = TypeVar("_O", bound=object) @@ -682,8 +681,7 @@ def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> Result[_T]: - ... + ) -> Result[_T]: ... @overload def execute( @@ -695,8 +693,7 @@ def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> CursorResult[Any]: - ... + ) -> CursorResult[Any]: ... @overload def execute( @@ -708,8 +705,7 @@ def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> Result[Any]: - ... + ) -> Result[Any]: ... def execute( self, @@ -1574,14 +1570,12 @@ def merge( return self._proxied.merge(instance, load=load, options=options) @overload - def query(self, _entity: _EntityType[_O]) -> Query[_O]: - ... + def query(self, _entity: _EntityType[_O]) -> Query[_O]: ... @overload def query( self, _colexpr: TypedColumnsClauseRole[_T] - ) -> RowReturningQuery[Tuple[_T]]: - ... + ) -> RowReturningQuery[Tuple[_T]]: ... # START OVERLOADED FUNCTIONS self.query RowReturningQuery 2-8 @@ -1591,14 +1585,12 @@ def query( @overload def query( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] - ) -> RowReturningQuery[Tuple[_T0, _T1]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1]]: ... @overload def query( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ... @overload def query( @@ -1607,8 +1599,7 @@ def query( __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ... @overload def query( @@ -1618,8 +1609,7 @@ def query( __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload def query( @@ -1630,8 +1620,7 @@ def query( __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload def query( @@ -1643,8 +1632,7 @@ def query( __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def query( @@ -1657,16 +1645,14 @@ def query( __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... # END OVERLOADED FUNCTIONS self.query @overload def query( self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any - ) -> Query[Any]: - ... + ) -> Query[Any]: ... def query( self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any @@ -1818,8 +1804,7 @@ def scalar( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload def scalar( @@ -1830,8 +1815,7 @@ def scalar( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> Any: - ... + ) -> Any: ... def scalar( self, @@ -1873,8 +1857,7 @@ def scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> ScalarResult[_T]: - ... + ) -> ScalarResult[_T]: ... @overload def scalars( @@ -1885,8 +1868,7 @@ def scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> ScalarResult[Any]: - ... + ) -> ScalarResult[Any]: ... def scalars( self, diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index e80a8af6e29..3eba5aaf411 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -146,9 +146,9 @@ "object_session", ] -_sessions: weakref.WeakValueDictionary[ - int, Session -] = weakref.WeakValueDictionary() +_sessions: weakref.WeakValueDictionary[int, Session] = ( + weakref.WeakValueDictionary() +) """Weak-referencing dictionary of :class:`.Session` objects. """ @@ -188,8 +188,7 @@ def __call__( mapper: Optional[Mapper[Any]] = None, instance: Optional[object] = None, **kw: Any, - ) -> Connection: - ... + ) -> Connection: ... def _state_session(state: InstanceState[Any]) -> Optional[Session]: @@ -1000,9 +999,11 @@ def connection( def _begin(self, nested: bool = False) -> SessionTransaction: return SessionTransaction( self.session, - SessionTransactionOrigin.BEGIN_NESTED - if nested - else SessionTransactionOrigin.SUBTRANSACTION, + ( + SessionTransactionOrigin.BEGIN_NESTED + if nested + else SessionTransactionOrigin.SUBTRANSACTION + ), self, ) @@ -1819,9 +1820,11 @@ def _autobegin_t(self, begin: bool = False) -> SessionTransaction: ) trans = SessionTransaction( self, - SessionTransactionOrigin.BEGIN - if begin - else SessionTransactionOrigin.AUTOBEGIN, + ( + SessionTransactionOrigin.BEGIN + if begin + else SessionTransactionOrigin.AUTOBEGIN + ), ) assert self._transaction is trans return trans @@ -2057,8 +2060,7 @@ def _execute_internal( _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, _scalar_result: Literal[True] = ..., - ) -> Any: - ... + ) -> Any: ... @overload def _execute_internal( @@ -2071,8 +2073,7 @@ def _execute_internal( _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, _scalar_result: bool = ..., - ) -> Result[Any]: - ... + ) -> Result[Any]: ... def _execute_internal( self, @@ -2215,8 +2216,7 @@ def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> Result[_T]: - ... + ) -> Result[_T]: ... @overload def execute( @@ -2228,8 +2228,7 @@ def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> CursorResult[Any]: - ... + ) -> CursorResult[Any]: ... @overload def execute( @@ -2241,8 +2240,7 @@ def execute( bind_arguments: Optional[_BindArguments] = None, _parent_execute_state: Optional[Any] = None, _add_event: Optional[Any] = None, - ) -> Result[Any]: - ... + ) -> Result[Any]: ... def execute( self, @@ -2323,8 +2321,7 @@ def scalar( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> Optional[_T]: - ... + ) -> Optional[_T]: ... @overload def scalar( @@ -2335,8 +2332,7 @@ def scalar( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> Any: - ... + ) -> Any: ... def scalar( self, @@ -2373,8 +2369,7 @@ def scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> ScalarResult[_T]: - ... + ) -> ScalarResult[_T]: ... @overload def scalars( @@ -2385,8 +2380,7 @@ def scalars( execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT, bind_arguments: Optional[_BindArguments] = None, **kw: Any, - ) -> ScalarResult[Any]: - ... + ) -> ScalarResult[Any]: ... def scalars( self, @@ -2795,14 +2789,12 @@ def get_bind( ) @overload - def query(self, _entity: _EntityType[_O]) -> Query[_O]: - ... + def query(self, _entity: _EntityType[_O]) -> Query[_O]: ... @overload def query( self, _colexpr: TypedColumnsClauseRole[_T] - ) -> RowReturningQuery[Tuple[_T]]: - ... + ) -> RowReturningQuery[Tuple[_T]]: ... # START OVERLOADED FUNCTIONS self.query RowReturningQuery 2-8 @@ -2812,14 +2804,12 @@ def query( @overload def query( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] - ) -> RowReturningQuery[Tuple[_T0, _T1]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1]]: ... @overload def query( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ... @overload def query( @@ -2828,8 +2818,7 @@ def query( __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ... @overload def query( @@ -2839,8 +2828,7 @@ def query( __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload def query( @@ -2851,8 +2839,7 @@ def query( __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload def query( @@ -2864,8 +2851,7 @@ def query( __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def query( @@ -2878,16 +2864,14 @@ def query( __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], - ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: - ... + ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... # END OVERLOADED FUNCTIONS self.query @overload def query( self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any - ) -> Query[Any]: - ... + ) -> Query[Any]: ... def query( self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any @@ -3768,9 +3752,9 @@ def _get_impl( if correct_keys: primary_key_identity = dict(primary_key_identity) for k in correct_keys: - primary_key_identity[ - pk_synonyms[k] - ] = primary_key_identity[k] + primary_key_identity[pk_synonyms[k]] = ( + primary_key_identity[k] + ) try: primary_key_identity = list( @@ -4988,8 +4972,7 @@ def __init__( expire_on_commit: bool = ..., info: Optional[_InfoType] = ..., **kw: Any, - ): - ... + ): ... @overload def __init__( @@ -5000,8 +4983,7 @@ def __init__( expire_on_commit: bool = ..., info: Optional[_InfoType] = ..., **kw: Any, - ): - ... + ): ... def __init__( self, diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index e628a682c6c..03b81f90405 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -78,8 +78,7 @@ class _InstanceDictProto(Protocol): - def __call__(self) -> Optional[IdentityMap]: - ... + def __call__(self) -> Optional[IdentityMap]: ... class _InstallLoaderCallableProto(Protocol[_O]): @@ -94,8 +93,7 @@ class _InstallLoaderCallableProto(Protocol[_O]): def __call__( self, state: InstanceState[_O], dict_: _InstanceDict, row: Row[Any] - ) -> None: - ... + ) -> None: ... @inspection._self_inspects diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index d7671e07941..e38a05f0613 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -1195,9 +1195,11 @@ def create_row_processor( key, self, loadopt, - loadopt._generate_extra_criteria(context) - if loadopt._extra_criteria - else None, + ( + loadopt._generate_extra_criteria(context) + if loadopt._extra_criteria + else None + ), ), key, ) @@ -1672,9 +1674,11 @@ def _apply_joins( elif ltj > 2: middle = [ ( - orm_util.AliasedClass(item[0]) - if not inspect(item[0]).is_aliased_class - else item[0].entity, + ( + orm_util.AliasedClass(item[0]) + if not inspect(item[0]).is_aliased_class + else item[0].entity + ), item[1], ) for item in to_join[1:-1] @@ -2328,9 +2332,11 @@ def _generate_row_adapter( to_adapt = orm_util.AliasedClass( self.mapper, - alias=alt_selectable._anonymous_fromclause(flat=True) - if alt_selectable is not None - else None, + alias=( + alt_selectable._anonymous_fromclause(flat=True) + if alt_selectable is not None + else None + ), flat=True, use_mapper_path=True, ) diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 7de0e31d4d3..5f3ac9bf455 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -320,9 +320,11 @@ def joinedload( loader = self._set_relationship_strategy( attr, {"lazy": "joined"}, - opts={"innerjoin": innerjoin} - if innerjoin is not None - else util.EMPTY_DICT, + opts=( + {"innerjoin": innerjoin} + if innerjoin is not None + else util.EMPTY_DICT + ), ) return loader @@ -777,12 +779,10 @@ def selectin_polymorphic(self, classes: Iterable[Type[Any]]) -> Self: return self @overload - def _coerce_strat(self, strategy: _StrategySpec) -> _StrategyKey: - ... + def _coerce_strat(self, strategy: _StrategySpec) -> _StrategyKey: ... @overload - def _coerce_strat(self, strategy: Literal[None]) -> None: - ... + def _coerce_strat(self, strategy: Literal[None]) -> None: ... def _coerce_strat( self, strategy: Optional[_StrategySpec] @@ -2081,9 +2081,9 @@ def __getstate__(self): d["_extra_criteria"] = () if self._path_with_polymorphic_path: - d[ - "_path_with_polymorphic_path" - ] = self._path_with_polymorphic_path.serialize() + d["_path_with_polymorphic_path"] = ( + self._path_with_polymorphic_path.serialize() + ) if self._of_type: if self._of_type.is_aliased_class: diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index a6186df7f28..c6102098a6a 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -163,8 +163,7 @@ def __call__( *, str_cleanup_fn: Optional[Callable[[str, str], str]] = None, include_generic: bool = False, - ) -> Type[Any]: - ... + ) -> Type[Any]: ... de_stringify_annotation = cast( @@ -180,8 +179,7 @@ def __call__( originating_module: str, *, str_cleanup_fn: Optional[Callable[[str, str], str]] = None, - ) -> Type[Any]: - ... + ) -> Type[Any]: ... de_stringify_union_elements = cast( @@ -191,8 +189,7 @@ def __call__( class _EvalNameOnly(Protocol): - def __call__(self, name: str, module_name: str) -> Any: - ... + def __call__(self, name: str, module_name: str) -> Any: ... eval_name_only = cast(_EvalNameOnly, _de_stringify_partial(_eval_name_only)) @@ -755,12 +752,16 @@ def __init__( insp, alias, name, - with_polymorphic_mappers - if with_polymorphic_mappers - else mapper.with_polymorphic_mappers, - with_polymorphic_discriminator - if with_polymorphic_discriminator is not None - else mapper.polymorphic_on, + ( + with_polymorphic_mappers + if with_polymorphic_mappers + else mapper.with_polymorphic_mappers + ), + ( + with_polymorphic_discriminator + if with_polymorphic_discriminator is not None + else mapper.polymorphic_on + ), base_alias, use_mapper_path, adapt_on_names, @@ -971,9 +972,9 @@ def __init__( self._weak_entity = weakref.ref(entity) self.mapper = mapper - self.selectable = ( - self.persist_selectable - ) = self.local_table = selectable + self.selectable = self.persist_selectable = self.local_table = ( + selectable + ) self.name = name self.polymorphic_on = polymorphic_on self._base_alias = weakref.ref(_base_alias or self) @@ -1229,8 +1230,7 @@ def _orm_adapt_element( self, obj: _CE, key: Optional[str] = None, - ) -> _CE: - ... + ) -> _CE: ... else: _orm_adapt_element = _adapt_element diff --git a/lib/sqlalchemy/orm/writeonly.py b/lib/sqlalchemy/orm/writeonly.py index 2f54fc9a86f..5680cc70ec5 100644 --- a/lib/sqlalchemy/orm/writeonly.py +++ b/lib/sqlalchemy/orm/writeonly.py @@ -196,8 +196,7 @@ def get_collection( dict_: _InstanceDict, user_data: Literal[None] = ..., passive: Literal[PassiveFlag.PASSIVE_OFF] = ..., - ) -> CollectionAdapter: - ... + ) -> CollectionAdapter: ... @overload def get_collection( @@ -206,8 +205,7 @@ def get_collection( dict_: _InstanceDict, user_data: _AdaptedCollectionProtocol = ..., passive: PassiveFlag = ..., - ) -> CollectionAdapter: - ... + ) -> CollectionAdapter: ... @overload def get_collection( @@ -218,8 +216,7 @@ def get_collection( passive: PassiveFlag = ..., ) -> Union[ Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter - ]: - ... + ]: ... def get_collection( self, diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index 18e0171989e..98d202789d6 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -147,17 +147,14 @@ class _AsyncConnDialect(_ConnDialect): class _CreatorFnType(Protocol): - def __call__(self) -> DBAPIConnection: - ... + def __call__(self) -> DBAPIConnection: ... class _CreatorWRecFnType(Protocol): - def __call__(self, rec: ConnectionPoolEntry) -> DBAPIConnection: - ... + def __call__(self, rec: ConnectionPoolEntry) -> DBAPIConnection: ... class Pool(log.Identified, event.EventTarget): - """Abstract base class for connection pools.""" dispatch: dispatcher[Pool] @@ -633,7 +630,6 @@ def close(self) -> None: class _ConnectionRecord(ConnectionPoolEntry): - """Maintains a position in a connection pool which references a pooled connection. @@ -729,11 +725,13 @@ def checkout(cls, pool: Pool) -> _ConnectionFairy: rec.fairy_ref = ref = weakref.ref( fairy, - lambda ref: _finalize_fairy( - None, rec, pool, ref, echo, transaction_was_reset=False - ) - if _finalize_fairy is not None - else None, + lambda ref: ( + _finalize_fairy( + None, rec, pool, ref, echo, transaction_was_reset=False + ) + if _finalize_fairy is not None + else None + ), ) _strong_ref_connection_records[ref] = rec if echo: @@ -1074,14 +1072,11 @@ class PoolProxiedConnection(ManagesConnection): if typing.TYPE_CHECKING: - def commit(self) -> None: - ... + def commit(self) -> None: ... - def cursor(self) -> DBAPICursor: - ... + def cursor(self) -> DBAPICursor: ... - def rollback(self) -> None: - ... + def rollback(self) -> None: ... @property def is_valid(self) -> bool: @@ -1189,7 +1184,6 @@ def __getattr__(self, key: Any) -> Any: class _ConnectionFairy(PoolProxiedConnection): - """Proxies a DBAPI connection and provides return-on-dereference support. diff --git a/lib/sqlalchemy/pool/impl.py b/lib/sqlalchemy/pool/impl.py index 5bb2dd7778d..18f9414ca64 100644 --- a/lib/sqlalchemy/pool/impl.py +++ b/lib/sqlalchemy/pool/impl.py @@ -43,7 +43,6 @@ class QueuePool(Pool): - """A :class:`_pool.Pool` that imposes a limit on the number of open connections. @@ -55,9 +54,9 @@ class QueuePool(Pool): _is_asyncio = False # type: ignore[assignment] - _queue_class: Type[ - sqla_queue.QueueCommon[ConnectionPoolEntry] - ] = sqla_queue.Queue + _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = ( + sqla_queue.Queue + ) _pool: sqla_queue.QueueCommon[ConnectionPoolEntry] @@ -250,9 +249,9 @@ def checkedout(self) -> int: class AsyncAdaptedQueuePool(QueuePool): _is_asyncio = True # type: ignore[assignment] - _queue_class: Type[ - sqla_queue.QueueCommon[ConnectionPoolEntry] - ] = sqla_queue.AsyncAdaptedQueue + _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = ( + sqla_queue.AsyncAdaptedQueue + ) _dialect = _AsyncConnDialect() @@ -262,7 +261,6 @@ class FallbackAsyncAdaptedQueuePool(AsyncAdaptedQueuePool): class NullPool(Pool): - """A Pool which does not pool connections. Instead it literally opens and closes the underlying DB-API connection @@ -302,7 +300,6 @@ def dispose(self) -> None: class SingletonThreadPool(Pool): - """A Pool that maintains one connection per thread. Maintains one connection per each thread, never moving a connection to a @@ -422,7 +419,6 @@ def connect(self) -> PoolProxiedConnection: class StaticPool(Pool): - """A Pool of exactly one connection, used for all requests. Reconnect-related functions such as ``recycle`` and connection @@ -486,7 +482,6 @@ def _do_get(self) -> ConnectionPoolEntry: class AssertionPool(Pool): - """A :class:`_pool.Pool` that allows at most one checked out connection at any given time. diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index 9dd2a58a1b8..27bac59e126 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -437,13 +437,11 @@ def outparam( @overload -def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]: - ... +def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]: ... @overload -def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: - ... +def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: ... def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: diff --git a/lib/sqlalchemy/sql/_selectable_constructors.py b/lib/sqlalchemy/sql/_selectable_constructors.py index c1e0fa9dc40..c2b5008c679 100644 --- a/lib/sqlalchemy/sql/_selectable_constructors.py +++ b/lib/sqlalchemy/sql/_selectable_constructors.py @@ -330,20 +330,19 @@ def outerjoin( @overload -def select(__ent0: _TCCA[_T0]) -> Select[Tuple[_T0]]: - ... +def select(__ent0: _TCCA[_T0]) -> Select[Tuple[_T0]]: ... @overload -def select(__ent0: _TCCA[_T0], __ent1: _TCCA[_T1]) -> Select[Tuple[_T0, _T1]]: - ... +def select( + __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] +) -> Select[Tuple[_T0, _T1]]: ... @overload def select( __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] -) -> Select[Tuple[_T0, _T1, _T2]]: - ... +) -> Select[Tuple[_T0, _T1, _T2]]: ... @overload @@ -352,8 +351,7 @@ def select( __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], -) -> Select[Tuple[_T0, _T1, _T2, _T3]]: - ... +) -> Select[Tuple[_T0, _T1, _T2, _T3]]: ... @overload @@ -363,8 +361,7 @@ def select( __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], -) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: - ... +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload @@ -375,8 +372,7 @@ def select( __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], -) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: - ... +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload @@ -388,8 +384,7 @@ def select( __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], -) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: - ... +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload @@ -402,8 +397,7 @@ def select( __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], -) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: - ... +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... @overload @@ -417,8 +411,7 @@ def select( __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], __ent8: _TCCA[_T8], -) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]]: - ... +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]]: ... @overload @@ -433,16 +426,16 @@ def select( __ent7: _TCCA[_T7], __ent8: _TCCA[_T8], __ent9: _TCCA[_T9], -) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9]]: - ... +) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9]]: ... # END OVERLOADED FUNCTIONS select @overload -def select(*entities: _ColumnsClauseArgument[Any], **__kw: Any) -> Select[Any]: - ... +def select( + *entities: _ColumnsClauseArgument[Any], **__kw: Any +) -> Select[Any]: ... def select(*entities: _ColumnsClauseArgument[Any], **__kw: Any) -> Select[Any]: diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index 024507ec798..ea9cbe1f482 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -83,15 +83,13 @@ class _HasClauseElement(Protocol, Generic[_T_co]): """indicates a class that has a __clause_element__() method""" - def __clause_element__(self) -> roles.ExpressionElementRole[_T_co]: - ... + def __clause_element__(self) -> roles.ExpressionElementRole[_T_co]: ... class _CoreAdapterProto(Protocol): """protocol for the ClauseAdapter/ColumnAdapter.traverse() method.""" - def __call__(self, obj: _CE) -> _CE: - ... + def __call__(self, obj: _CE) -> _CE: ... # match column types that are not ORM entities @@ -288,56 +286,47 @@ def __call__(self, obj: _CE) -> _CE: if TYPE_CHECKING: - def is_sql_compiler(c: Compiled) -> TypeGuard[SQLCompiler]: - ... + def is_sql_compiler(c: Compiled) -> TypeGuard[SQLCompiler]: ... - def is_ddl_compiler(c: Compiled) -> TypeGuard[DDLCompiler]: - ... + def is_ddl_compiler(c: Compiled) -> TypeGuard[DDLCompiler]: ... - def is_named_from_clause(t: FromClauseRole) -> TypeGuard[NamedFromClause]: - ... + def is_named_from_clause( + t: FromClauseRole, + ) -> TypeGuard[NamedFromClause]: ... - def is_column_element(c: ClauseElement) -> TypeGuard[ColumnElement[Any]]: - ... + def is_column_element( + c: ClauseElement, + ) -> TypeGuard[ColumnElement[Any]]: ... def is_keyed_column_element( c: ClauseElement, - ) -> TypeGuard[KeyedColumnElement[Any]]: - ... + ) -> TypeGuard[KeyedColumnElement[Any]]: ... - def is_text_clause(c: ClauseElement) -> TypeGuard[TextClause]: - ... + def is_text_clause(c: ClauseElement) -> TypeGuard[TextClause]: ... - def is_from_clause(c: ClauseElement) -> TypeGuard[FromClause]: - ... + def is_from_clause(c: ClauseElement) -> TypeGuard[FromClause]: ... - def is_tuple_type(t: TypeEngine[Any]) -> TypeGuard[TupleType]: - ... + def is_tuple_type(t: TypeEngine[Any]) -> TypeGuard[TupleType]: ... - def is_table_value_type(t: TypeEngine[Any]) -> TypeGuard[TableValueType]: - ... + def is_table_value_type( + t: TypeEngine[Any], + ) -> TypeGuard[TableValueType]: ... - def is_selectable(t: Any) -> TypeGuard[Selectable]: - ... + def is_selectable(t: Any) -> TypeGuard[Selectable]: ... def is_select_base( t: Union[Executable, ReturnsRows] - ) -> TypeGuard[SelectBase]: - ... + ) -> TypeGuard[SelectBase]: ... def is_select_statement( t: Union[Executable, ReturnsRows] - ) -> TypeGuard[Select[Any]]: - ... + ) -> TypeGuard[Select[Any]]: ... - def is_table(t: FromClause) -> TypeGuard[TableClause]: - ... + def is_table(t: FromClause) -> TypeGuard[TableClause]: ... - def is_subquery(t: FromClause) -> TypeGuard[Subquery]: - ... + def is_subquery(t: FromClause) -> TypeGuard[Subquery]: ... - def is_dml(c: ClauseElement) -> TypeGuard[UpdateBase]: - ... + def is_dml(c: ClauseElement) -> TypeGuard[UpdateBase]: ... else: is_sql_compiler = operator.attrgetter("is_sql") @@ -388,20 +377,17 @@ def _unexpected_kw(methname: str, kw: Dict[str, Any]) -> NoReturn: @overload def Nullable( val: "SQLCoreOperations[_T]", -) -> "SQLCoreOperations[Optional[_T]]": - ... +) -> "SQLCoreOperations[Optional[_T]]": ... @overload def Nullable( val: roles.ExpressionElementRole[_T], -) -> roles.ExpressionElementRole[Optional[_T]]: - ... +) -> roles.ExpressionElementRole[Optional[_T]]: ... @overload -def Nullable(val: Type[_T]) -> Type[Optional[_T]]: - ... +def Nullable(val: Type[_T]) -> Type[Optional[_T]]: ... def Nullable( @@ -425,25 +411,21 @@ def Nullable( @overload def NotNullable( val: "SQLCoreOperations[Optional[_T]]", -) -> "SQLCoreOperations[_T]": - ... +) -> "SQLCoreOperations[_T]": ... @overload def NotNullable( val: roles.ExpressionElementRole[Optional[_T]], -) -> roles.ExpressionElementRole[_T]: - ... +) -> roles.ExpressionElementRole[_T]: ... @overload -def NotNullable(val: Type[Optional[_T]]) -> Type[_T]: - ... +def NotNullable(val: Type[Optional[_T]]) -> Type[_T]: ... @overload -def NotNullable(val: Optional[Type[_T]]) -> Type[_T]: - ... +def NotNullable(val: Optional[Type[_T]]) -> Type[_T]: ... def NotNullable( diff --git a/lib/sqlalchemy/sql/annotation.py b/lib/sqlalchemy/sql/annotation.py index 14e48bd2b8c..db382b874b6 100644 --- a/lib/sqlalchemy/sql/annotation.py +++ b/lib/sqlalchemy/sql/annotation.py @@ -67,16 +67,14 @@ def _deannotate( self, values: Literal[None] = ..., clone: bool = ..., - ) -> Self: - ... + ) -> Self: ... @overload def _deannotate( self, values: Sequence[str] = ..., clone: bool = ..., - ) -> SupportsAnnotations: - ... + ) -> SupportsAnnotations: ... def _deannotate( self, @@ -99,9 +97,11 @@ def _gen_annotations_cache_key( tuple( ( key, - value._gen_cache_key(anon_map, []) - if isinstance(value, HasCacheKey) - else value, + ( + value._gen_cache_key(anon_map, []) + if isinstance(value, HasCacheKey) + else value + ), ) for key, value in [ (key, self._annotations[key]) @@ -119,8 +119,7 @@ class SupportsWrappingAnnotations(SupportsAnnotations): if TYPE_CHECKING: @util.ro_non_memoized_property - def entity_namespace(self) -> _EntityNamespace: - ... + def entity_namespace(self) -> _EntityNamespace: ... def _annotate(self, values: _AnnotationDict) -> Self: """return a copy of this ClauseElement with annotations @@ -141,16 +140,14 @@ def _deannotate( self, values: Literal[None] = ..., clone: bool = ..., - ) -> Self: - ... + ) -> Self: ... @overload def _deannotate( self, values: Sequence[str] = ..., clone: bool = ..., - ) -> SupportsAnnotations: - ... + ) -> SupportsAnnotations: ... def _deannotate( self, @@ -214,16 +211,14 @@ def _deannotate( self, values: Literal[None] = ..., clone: bool = ..., - ) -> Self: - ... + ) -> Self: ... @overload def _deannotate( self, values: Sequence[str] = ..., clone: bool = ..., - ) -> SupportsAnnotations: - ... + ) -> SupportsAnnotations: ... def _deannotate( self, @@ -316,16 +311,14 @@ def _deannotate( self, values: Literal[None] = ..., clone: bool = ..., - ) -> Self: - ... + ) -> Self: ... @overload def _deannotate( self, values: Sequence[str] = ..., clone: bool = ..., - ) -> Annotated: - ... + ) -> Annotated: ... def _deannotate( self, @@ -395,9 +388,9 @@ def entity_namespace(self) -> _EntityNamespace: # so that the resulting objects are pickleable; additionally, other # decisions can be made up front about the type of object being annotated # just once per class rather than per-instance. -annotated_classes: Dict[ - Type[SupportsWrappingAnnotations], Type[Annotated] -] = {} +annotated_classes: Dict[Type[SupportsWrappingAnnotations], Type[Annotated]] = ( + {} +) _SA = TypeVar("_SA", bound="SupportsAnnotations") @@ -487,15 +480,13 @@ def clone(elem: SupportsAnnotations, **kw: Any) -> SupportsAnnotations: @overload def _deep_deannotate( element: Literal[None], values: Optional[Sequence[str]] = None -) -> Literal[None]: - ... +) -> Literal[None]: ... @overload def _deep_deannotate( element: _SA, values: Optional[Sequence[str]] = None -) -> _SA: - ... +) -> _SA: ... def _deep_deannotate( diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index f8e3e73883a..cea0f177df6 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -155,14 +155,12 @@ def _from_column_default( class _EntityNamespace(Protocol): - def __getattr__(self, key: str) -> SQLCoreOperations[Any]: - ... + def __getattr__(self, key: str) -> SQLCoreOperations[Any]: ... class _HasEntityNamespace(Protocol): @util.ro_non_memoized_property - def entity_namespace(self) -> _EntityNamespace: - ... + def entity_namespace(self) -> _EntityNamespace: ... def _is_has_entity_namespace(element: Any) -> TypeGuard[_HasEntityNamespace]: @@ -261,8 +259,7 @@ def _select_iterables( class _GenerativeType(compat_typing.Protocol): - def _generate(self) -> Self: - ... + def _generate(self) -> Self: ... def _generative(fn: _Fn) -> _Fn: @@ -801,14 +798,11 @@ def __add__(self, other): if TYPE_CHECKING: - def __getattr__(self, key: str) -> Any: - ... + def __getattr__(self, key: str) -> Any: ... - def __setattr__(self, key: str, value: Any) -> None: - ... + def __setattr__(self, key: str, value: Any) -> None: ... - def __delattr__(self, key: str) -> None: - ... + def __delattr__(self, key: str) -> None: ... class Options(metaclass=_MetaOptions): @@ -966,14 +960,11 @@ def from_execution_options( if TYPE_CHECKING: - def __getattr__(self, key: str) -> Any: - ... + def __getattr__(self, key: str) -> Any: ... - def __setattr__(self, key: str, value: Any) -> None: - ... + def __setattr__(self, key: str, value: Any) -> None: ... - def __delattr__(self, key: str) -> None: - ... + def __delattr__(self, key: str) -> None: ... class CacheableOptions(Options, HasCacheKey): @@ -1058,24 +1049,21 @@ def _compile_w_cache( **kw: Any, ) -> Tuple[ Compiled, Optional[Sequence[BindParameter[Any]]], CacheStats - ]: - ... + ]: ... def _execute_on_connection( self, connection: Connection, distilled_params: _CoreMultiExecuteParams, execution_options: CoreExecuteOptionsParameter, - ) -> CursorResult[Any]: - ... + ) -> CursorResult[Any]: ... def _execute_on_scalar( self, connection: Connection, distilled_params: _CoreMultiExecuteParams, execution_options: CoreExecuteOptionsParameter, - ) -> Any: - ... + ) -> Any: ... @util.ro_non_memoized_property def _all_selected_columns(self): @@ -1180,12 +1168,10 @@ def execution_options( is_delete_using: bool = ..., is_update_from: bool = ..., **opt: Any, - ) -> Self: - ... + ) -> Self: ... @overload - def execution_options(self, **opt: Any) -> Self: - ... + def execution_options(self, **opt: Any) -> Self: ... @_generative def execution_options(self, **kw: Any) -> Self: @@ -1591,20 +1577,17 @@ def __iter__(self) -> Iterator[_COL_co]: return iter([col for _, col, _ in self._collection]) @overload - def __getitem__(self, key: Union[str, int]) -> _COL_co: - ... + def __getitem__(self, key: Union[str, int]) -> _COL_co: ... @overload def __getitem__( self, key: Tuple[Union[str, int], ...] - ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: - ... + ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ... @overload def __getitem__( self, key: slice - ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: - ... + ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ... def __getitem__( self, key: Union[str, int, slice, Tuple[Union[str, int], ...]] diff --git a/lib/sqlalchemy/sql/cache_key.py b/lib/sqlalchemy/sql/cache_key.py index 640a27de2d7..b4b6b23fa80 100644 --- a/lib/sqlalchemy/sql/cache_key.py +++ b/lib/sqlalchemy/sql/cache_key.py @@ -44,8 +44,7 @@ class _CacheKeyTraversalDispatchType(Protocol): def __call__( s, self: HasCacheKey, visitor: _CacheKeyTraversal - ) -> _CacheKeyTraversalDispatchTypeReturn: - ... + ) -> _CacheKeyTraversalDispatchTypeReturn: ... class CacheConst(enum.Enum): @@ -303,11 +302,13 @@ def _gen_cache_key( result += ( attrname, obj["compile_state_plugin"], - obj["plugin_subject"]._gen_cache_key( - anon_map, bindparams - ) - if obj["plugin_subject"] - else None, + ( + obj["plugin_subject"]._gen_cache_key( + anon_map, bindparams + ) + if obj["plugin_subject"] + else None + ), ) elif meth is InternalTraversal.dp_annotations_key: # obj is here is the _annotations dict. Table uses @@ -619,9 +620,9 @@ class _CacheKeyTraversal(HasTraversalDispatch): InternalTraversal.dp_memoized_select_entities ) - visit_string = ( - visit_boolean - ) = visit_operator = visit_plain_obj = CACHE_IN_PLACE + visit_string = visit_boolean = visit_operator = visit_plain_obj = ( + CACHE_IN_PLACE + ) visit_statement_hint_list = CACHE_IN_PLACE visit_type = STATIC_CACHE_KEY visit_anon_name = ANON_NAME @@ -668,9 +669,11 @@ def visit_multi( ) -> Tuple[Any, ...]: return ( attrname, - obj._gen_cache_key(anon_map, bindparams) - if isinstance(obj, HasCacheKey) - else obj, + ( + obj._gen_cache_key(anon_map, bindparams) + if isinstance(obj, HasCacheKey) + else obj + ), ) def visit_multi_list( @@ -684,9 +687,11 @@ def visit_multi_list( return ( attrname, tuple( - elem._gen_cache_key(anon_map, bindparams) - if isinstance(elem, HasCacheKey) - else elem + ( + elem._gen_cache_key(anon_map, bindparams) + if isinstance(elem, HasCacheKey) + else elem + ) for elem in obj ), ) @@ -847,12 +852,16 @@ def visit_setup_join_tuple( return tuple( ( target._gen_cache_key(anon_map, bindparams), - onclause._gen_cache_key(anon_map, bindparams) - if onclause is not None - else None, - from_._gen_cache_key(anon_map, bindparams) - if from_ is not None - else None, + ( + onclause._gen_cache_key(anon_map, bindparams) + if onclause is not None + else None + ), + ( + from_._gen_cache_key(anon_map, bindparams) + if from_ is not None + else None + ), tuple([(key, flags[key]) for key in sorted(flags)]), ) for (target, onclause, from_, flags) in obj @@ -946,9 +955,11 @@ def visit_string_multi_dict( tuple( ( key, - value._gen_cache_key(anon_map, bindparams) - if isinstance(value, HasCacheKey) - else value, + ( + value._gen_cache_key(anon_map, bindparams) + if isinstance(value, HasCacheKey) + else value + ), ) for key, value in [(key, obj[key]) for key in sorted(obj)] ), @@ -994,9 +1005,11 @@ def visit_dml_ordered_values( attrname, tuple( ( - key._gen_cache_key(anon_map, bindparams) - if hasattr(key, "__clause_element__") - else key, + ( + key._gen_cache_key(anon_map, bindparams) + if hasattr(key, "__clause_element__") + else key + ), value._gen_cache_key(anon_map, bindparams), ) for key, value in obj @@ -1017,9 +1030,11 @@ def visit_dml_values( attrname, tuple( ( - k._gen_cache_key(anon_map, bindparams) - if hasattr(k, "__clause_element__") - else k, + ( + k._gen_cache_key(anon_map, bindparams) + if hasattr(k, "__clause_element__") + else k + ), obj[k]._gen_cache_key(anon_map, bindparams), ) for k in obj diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 3d33924d894..22d60915522 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -165,8 +165,7 @@ def expect( role: Type[roles.TruncatedLabelRole], element: Any, **kw: Any, -) -> str: - ... +) -> str: ... @overload @@ -176,8 +175,7 @@ def expect( *, as_key: Literal[True] = ..., **kw: Any, -) -> str: - ... +) -> str: ... @overload @@ -185,8 +183,7 @@ def expect( role: Type[roles.LiteralValueRole], element: Any, **kw: Any, -) -> BindParameter[Any]: - ... +) -> BindParameter[Any]: ... @overload @@ -194,8 +191,7 @@ def expect( role: Type[roles.DDLReferredColumnRole], element: Any, **kw: Any, -) -> Column[Any]: - ... +) -> Column[Any]: ... @overload @@ -203,8 +199,7 @@ def expect( role: Type[roles.DDLConstraintColumnRole], element: Any, **kw: Any, -) -> Union[Column[Any], str]: - ... +) -> Union[Column[Any], str]: ... @overload @@ -212,8 +207,7 @@ def expect( role: Type[roles.StatementOptionRole], element: Any, **kw: Any, -) -> DQLDMLClauseElement: - ... +) -> DQLDMLClauseElement: ... @overload @@ -221,8 +215,7 @@ def expect( role: Type[roles.LabeledColumnExprRole[Any]], element: _ColumnExpressionArgument[_T], **kw: Any, -) -> NamedColumn[_T]: - ... +) -> NamedColumn[_T]: ... @overload @@ -234,8 +227,7 @@ def expect( ], element: _ColumnExpressionArgument[_T], **kw: Any, -) -> ColumnElement[_T]: - ... +) -> ColumnElement[_T]: ... @overload @@ -249,8 +241,7 @@ def expect( ], element: Any, **kw: Any, -) -> ColumnElement[Any]: - ... +) -> ColumnElement[Any]: ... @overload @@ -258,8 +249,7 @@ def expect( role: Type[roles.DMLTableRole], element: _DMLTableArgument, **kw: Any, -) -> _DMLTableElement: - ... +) -> _DMLTableElement: ... @overload @@ -267,8 +257,7 @@ def expect( role: Type[roles.HasCTERole], element: HasCTE, **kw: Any, -) -> HasCTE: - ... +) -> HasCTE: ... @overload @@ -276,8 +265,7 @@ def expect( role: Type[roles.SelectStatementRole], element: SelectBase, **kw: Any, -) -> SelectBase: - ... +) -> SelectBase: ... @overload @@ -285,8 +273,7 @@ def expect( role: Type[roles.FromClauseRole], element: _FromClauseArgument, **kw: Any, -) -> FromClause: - ... +) -> FromClause: ... @overload @@ -296,8 +283,7 @@ def expect( *, explicit_subquery: Literal[True] = ..., **kw: Any, -) -> Subquery: - ... +) -> Subquery: ... @overload @@ -305,8 +291,7 @@ def expect( role: Type[roles.ColumnsClauseRole], element: _ColumnsClauseArgument[Any], **kw: Any, -) -> _ColumnsClauseElement: - ... +) -> _ColumnsClauseElement: ... @overload @@ -314,8 +299,7 @@ def expect( role: Type[roles.JoinTargetRole], element: _JoinTargetProtocol, **kw: Any, -) -> _JoinTargetProtocol: - ... +) -> _JoinTargetProtocol: ... # catchall for not-yet-implemented overloads @@ -324,8 +308,7 @@ def expect( role: Type[_SR], element: Any, **kw: Any, -) -> Any: - ... +) -> Any: ... def expect( @@ -870,9 +853,11 @@ def _literal_coercion(self, element, expr, operator, **kw): if non_literal_expressions: return elements.ClauseList( *[ - non_literal_expressions[o] - if o in non_literal_expressions - else expr._bind_param(operator, o) + ( + non_literal_expressions[o] + if o in non_literal_expressions + else expr._bind_param(operator, o) + ) for o in element ] ) @@ -1150,9 +1135,9 @@ def _text_coercion(self, element, argname=None): % { "column": util.ellipses_string(element), "argname": "for argument %s" % (argname,) if argname else "", - "literal_column": "literal_column" - if guess_is_literal - else "column", + "literal_column": ( + "literal_column" if guess_is_literal else "column" + ), } ) diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 6c82bab8316..4f393f7f6ef 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -382,8 +382,7 @@ def __call__( name: str, objects: Sequence[Any], type_: TypeEngine[Any], - ) -> None: - ... + ) -> None: ... # integer indexes into ResultColumnsEntry used by cursor.py. @@ -737,7 +736,6 @@ def warn(self, stmt_type="SELECT"): class Compiled: - """Represent a compiled SQL or DDL expression. The ``__str__`` method of the ``Compiled`` object should produce @@ -967,7 +965,6 @@ def visit_unsupported_compilation( class _CompileLabel( roles.BinaryElementRole[Any], elements.CompilerColumnElement ): - """lightweight label object which acts as an expression.Label.""" __visit_name__ = "label" @@ -1037,19 +1034,19 @@ class SQLCompiler(Compiled): extract_map = EXTRACT_MAP - bindname_escape_characters: ClassVar[ - Mapping[str, str] - ] = util.immutabledict( - { - "%": "P", - "(": "A", - ")": "Z", - ":": "C", - ".": "_", - "[": "_", - "]": "_", - " ": "_", - } + bindname_escape_characters: ClassVar[Mapping[str, str]] = ( + util.immutabledict( + { + "%": "P", + "(": "A", + ")": "Z", + ":": "C", + ".": "_", + "[": "_", + "]": "_", + " ": "_", + } + ) ) """A mapping (e.g. dict or similar) containing a lookup of characters keyed to replacement characters which will be applied to all @@ -1789,11 +1786,15 @@ def _bind_processors( for key, value in ( ( self.bind_names[bindparam], - bindparam.type._cached_bind_processor(self.dialect) - if not bindparam.type._is_tuple_type - else tuple( - elem_type._cached_bind_processor(self.dialect) - for elem_type in cast(TupleType, bindparam.type).types + ( + bindparam.type._cached_bind_processor(self.dialect) + if not bindparam.type._is_tuple_type + else tuple( + elem_type._cached_bind_processor(self.dialect) + for elem_type in cast( + TupleType, bindparam.type + ).types + ) ), ) for bindparam in self.bind_names @@ -2099,11 +2100,11 @@ def _process_parameters_for_postcompile( if parameter in self.literal_execute_params: if escaped_name not in replacement_expressions: - replacement_expressions[ - escaped_name - ] = self.render_literal_bindparam( - parameter, - render_literal_value=parameters.pop(escaped_name), + replacement_expressions[escaped_name] = ( + self.render_literal_bindparam( + parameter, + render_literal_value=parameters.pop(escaped_name), + ) ) continue @@ -2312,12 +2313,14 @@ def get(lastrowid, parameters): else: return row_fn( ( - autoinc_getter(lastrowid, parameters) - if autoinc_getter is not None - else lastrowid + ( + autoinc_getter(lastrowid, parameters) + if autoinc_getter is not None + else lastrowid + ) + if col is autoinc_col + else getter(parameters) ) - if col is autoinc_col - else getter(parameters) for getter, col in getters ) @@ -2347,11 +2350,15 @@ def _inserted_primary_key_from_returning_getter(self): getters = cast( "List[Tuple[Callable[[Any], Any], bool]]", [ - (operator.itemgetter(ret[col]), True) - if col in ret - else ( - operator.methodcaller("get", param_key_getter(col), None), - False, + ( + (operator.itemgetter(ret[col]), True) + if col in ret + else ( + operator.methodcaller( + "get", param_key_getter(col), None + ), + False, + ) ) for col in table.primary_key ], @@ -2420,9 +2427,9 @@ def visit_label_reference( resolve_dict[order_by_elem.name] ) ): - kwargs[ - "render_label_as_label" - ] = element.element._order_by_label_element + kwargs["render_label_as_label"] = ( + element.element._order_by_label_element + ) return self.process( element.element, within_columns_clause=within_columns_clause, @@ -2668,9 +2675,9 @@ def visit_textual_select( ) if populate_result_map: - self._ordered_columns = ( - self._textual_ordered_columns - ) = taf.positional + self._ordered_columns = self._textual_ordered_columns = ( + taf.positional + ) # enable looser result column matching when the SQL text links to # Column objects by name only @@ -2797,24 +2804,44 @@ def visit_cast(self, cast, **kwargs): def _format_frame_clause(self, range_, **kw): return "%s AND %s" % ( - "UNBOUNDED PRECEDING" - if range_[0] is elements.RANGE_UNBOUNDED - else "CURRENT ROW" - if range_[0] is elements.RANGE_CURRENT - else "%s PRECEDING" - % (self.process(elements.literal(abs(range_[0])), **kw),) - if range_[0] < 0 - else "%s FOLLOWING" - % (self.process(elements.literal(range_[0]), **kw),), - "UNBOUNDED FOLLOWING" - if range_[1] is elements.RANGE_UNBOUNDED - else "CURRENT ROW" - if range_[1] is elements.RANGE_CURRENT - else "%s PRECEDING" - % (self.process(elements.literal(abs(range_[1])), **kw),) - if range_[1] < 0 - else "%s FOLLOWING" - % (self.process(elements.literal(range_[1]), **kw),), + ( + "UNBOUNDED PRECEDING" + if range_[0] is elements.RANGE_UNBOUNDED + else ( + "CURRENT ROW" + if range_[0] is elements.RANGE_CURRENT + else ( + "%s PRECEDING" + % ( + self.process( + elements.literal(abs(range_[0])), **kw + ), + ) + if range_[0] < 0 + else "%s FOLLOWING" + % (self.process(elements.literal(range_[0]), **kw),) + ) + ) + ), + ( + "UNBOUNDED FOLLOWING" + if range_[1] is elements.RANGE_UNBOUNDED + else ( + "CURRENT ROW" + if range_[1] is elements.RANGE_CURRENT + else ( + "%s PRECEDING" + % ( + self.process( + elements.literal(abs(range_[1])), **kw + ), + ) + if range_[1] < 0 + else "%s FOLLOWING" + % (self.process(elements.literal(range_[1]), **kw),) + ) + ) + ), ) def visit_over(self, over, **kwargs): @@ -3055,9 +3082,12 @@ def visit_truediv_binary(self, binary, operator, **kw): + self.process( elements.Cast( binary.right, - binary.right.type - if binary.right.type._type_affinity is sqltypes.Numeric - else sqltypes.Numeric(), + ( + binary.right.type + if binary.right.type._type_affinity + is sqltypes.Numeric + else sqltypes.Numeric() + ), ), **kw, ) @@ -4212,12 +4242,14 @@ def visit_alias( "%s%s" % ( self.preparer.quote(col.name), - " %s" - % self.dialect.type_compiler_instance.process( - col.type, **kwargs - ) - if alias._render_derived_w_types - else "", + ( + " %s" + % self.dialect.type_compiler_instance.process( + col.type, **kwargs + ) + if alias._render_derived_w_types + else "" + ), ) for col in alias.c ) @@ -4609,9 +4641,9 @@ def visit_select( compile_state = select_stmt._compile_state_factory( select_stmt, self, **kwargs ) - kwargs[ - "ambiguous_table_name_map" - ] = compile_state._ambiguous_table_name_map + kwargs["ambiguous_table_name_map"] = ( + compile_state._ambiguous_table_name_map + ) select_stmt = compile_state.statement @@ -5854,9 +5886,9 @@ def visit_insert( insert_stmt._post_values_clause is not None ), sentinel_columns=add_sentinel_cols, - num_sentinel_columns=len(add_sentinel_cols) - if add_sentinel_cols - else 0, + num_sentinel_columns=( + len(add_sentinel_cols) if add_sentinel_cols else 0 + ), implicit_sentinel=implicit_sentinel, ) elif compile_state._has_multi_parameters: @@ -5950,9 +5982,9 @@ def visit_insert( insert_stmt._post_values_clause is not None ), sentinel_columns=add_sentinel_cols, - num_sentinel_columns=len(add_sentinel_cols) - if add_sentinel_cols - else 0, + num_sentinel_columns=( + len(add_sentinel_cols) if add_sentinel_cols else 0 + ), sentinel_param_keys=named_sentinel_params, implicit_sentinel=implicit_sentinel, embed_values_counter=embed_sentinel_value, @@ -6437,8 +6469,7 @@ def __init__( schema_translate_map: Optional[SchemaTranslateMapType] = ..., render_schema_translate: bool = ..., compile_kwargs: Mapping[str, Any] = ..., - ): - ... + ): ... @util.memoized_property def sql_compiler(self): @@ -7166,17 +7197,14 @@ def visit_user_defined(self, type_, **kw): class _SchemaForObjectCallable(Protocol): - def __call__(self, obj: Any) -> str: - ... + def __call__(self, obj: Any) -> str: ... class _BindNameForColProtocol(Protocol): - def __call__(self, col: ColumnClause[Any]) -> str: - ... + def __call__(self, col: ColumnClause[Any]) -> str: ... class IdentifierPreparer: - """Handle quoting and case-folding of identifiers based on options.""" reserved_words = RESERVED_WORDS diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py index fc6f51de1cc..499a19d97cc 100644 --- a/lib/sqlalchemy/sql/crud.py +++ b/lib/sqlalchemy/sql/crud.py @@ -394,8 +394,7 @@ def _create_bind_param( required: bool = False, name: Optional[str] = None, **kw: Any, -) -> str: - ... +) -> str: ... @overload @@ -404,8 +403,7 @@ def _create_bind_param( col: ColumnElement[Any], value: Any, **kw: Any, -) -> str: - ... +) -> str: ... def _create_bind_param( @@ -859,10 +857,12 @@ def _append_param_parameter( c, value, required=value is REQUIRED, - name=_col_bind_name(c) - if not _compile_state_isinsert(compile_state) - or not compile_state._has_multi_parameters - else "%s_m0" % _col_bind_name(c), + name=( + _col_bind_name(c) + if not _compile_state_isinsert(compile_state) + or not compile_state._has_multi_parameters + else "%s_m0" % _col_bind_name(c) + ), accumulate_bind_names=accumulated_bind_names, **kw, ) @@ -884,10 +884,12 @@ def _append_param_parameter( compiler, c, value, - name=_col_bind_name(c) - if not _compile_state_isinsert(compile_state) - or not compile_state._has_multi_parameters - else "%s_m0" % _col_bind_name(c), + name=( + _col_bind_name(c) + if not _compile_state_isinsert(compile_state) + or not compile_state._has_multi_parameters + else "%s_m0" % _col_bind_name(c) + ), accumulate_bind_names=accumulated_bind_names, **kw, ) @@ -1213,8 +1215,7 @@ def _create_insert_prefetch_bind_param( c: ColumnElement[Any], process: Literal[True] = ..., **kw: Any, -) -> str: - ... +) -> str: ... @overload @@ -1223,8 +1224,7 @@ def _create_insert_prefetch_bind_param( c: ColumnElement[Any], process: Literal[False], **kw: Any, -) -> elements.BindParameter[Any]: - ... +) -> elements.BindParameter[Any]: ... def _create_insert_prefetch_bind_param( @@ -1247,8 +1247,7 @@ def _create_update_prefetch_bind_param( c: ColumnElement[Any], process: Literal[True] = ..., **kw: Any, -) -> str: - ... +) -> str: ... @overload @@ -1257,8 +1256,7 @@ def _create_update_prefetch_bind_param( c: ColumnElement[Any], process: Literal[False], **kw: Any, -) -> elements.BindParameter[Any]: - ... +) -> elements.BindParameter[Any]: ... def _create_update_prefetch_bind_param( diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 7fcd7e9f8d8..d9e3f673a21 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -95,8 +95,7 @@ def __call__( dialect: Dialect, compiler: Optional[DDLCompiler] = ..., checkfirst: bool, - ) -> bool: - ... + ) -> bool: ... class DDLIf(typing.NamedTuple): @@ -1021,10 +1020,12 @@ def visit_metadata(self, metadata): reversed( sort_tables_and_constraints( unsorted_tables, - filter_fn=lambda constraint: False - if not self.dialect.supports_alter - or constraint.name is None - else None, + filter_fn=lambda constraint: ( + False + if not self.dialect.supports_alter + or constraint.name is None + else None + ), ) ) ) diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py index 072acafed30..5bf8d582e53 100644 --- a/lib/sqlalchemy/sql/default_comparator.py +++ b/lib/sqlalchemy/sql/default_comparator.py @@ -296,9 +296,11 @@ def _match_impl( operator=operators.match_op, ), result_type=type_api.MATCHTYPE, - negate_op=operators.not_match_op - if op is operators.match_op - else operators.match_op, + negate_op=( + operators.not_match_op + if op is operators.match_op + else operators.match_op + ), **kw, ) @@ -340,9 +342,11 @@ def _between_impl( group=False, ), op, - negate=operators.not_between_op - if op is operators.between_op - else operators.between_op, + negate=( + operators.not_between_op + if op is operators.between_op + else operators.between_op + ), modifiers=kw, ) diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index e51e0a59e9c..1151d61ad80 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -91,14 +91,11 @@ from .selectable import Select from .selectable import Selectable - def isupdate(dml: DMLState) -> TypeGuard[UpdateDMLState]: - ... + def isupdate(dml: DMLState) -> TypeGuard[UpdateDMLState]: ... - def isdelete(dml: DMLState) -> TypeGuard[DeleteDMLState]: - ... + def isdelete(dml: DMLState) -> TypeGuard[DeleteDMLState]: ... - def isinsert(dml: DMLState) -> TypeGuard[InsertDMLState]: - ... + def isinsert(dml: DMLState) -> TypeGuard[InsertDMLState]: ... else: isupdate = operator.attrgetter("isupdate") @@ -137,9 +134,11 @@ def __init__( @classmethod def get_entity_description(cls, statement: UpdateBase) -> Dict[str, Any]: return { - "name": statement.table.name - if is_named_from_clause(statement.table) - else None, + "name": ( + statement.table.name + if is_named_from_clause(statement.table) + else None + ), "table": statement.table, } @@ -163,8 +162,7 @@ def dml_table(self) -> _DMLTableElement: if TYPE_CHECKING: @classmethod - def get_plugin_class(cls, statement: Executable) -> Type[DMLState]: - ... + def get_plugin_class(cls, statement: Executable) -> Type[DMLState]: ... @classmethod def _get_multi_crud_kv_pairs( @@ -190,13 +188,15 @@ def _get_crud_kv_pairs( return [ ( coercions.expect(roles.DMLColumnRole, k), - v - if not needs_to_be_cacheable - else coercions.expect( - roles.ExpressionElementRole, - v, - type_=NullType(), - is_crud=True, + ( + v + if not needs_to_be_cacheable + else coercions.expect( + roles.ExpressionElementRole, + v, + type_=NullType(), + is_crud=True, + ) ), ) for k, v in kv_iterator @@ -306,12 +306,14 @@ def _process_values(self, statement: ValuesBase) -> None: def _process_multi_values(self, statement: ValuesBase) -> None: for parameters in statement._multi_values: multi_parameters: List[MutableMapping[_DMLColumnElement, Any]] = [ - { - c.key: value - for c, value in zip(statement.table.c, parameter_set) - } - if isinstance(parameter_set, collections_abc.Sequence) - else parameter_set + ( + { + c.key: value + for c, value in zip(statement.table.c, parameter_set) + } + if isinstance(parameter_set, collections_abc.Sequence) + else parameter_set + ) for parameter_set in parameters ] @@ -396,9 +398,9 @@ class UpdateBase( __visit_name__ = "update_base" - _hints: util.immutabledict[ - Tuple[_DMLTableElement, str], str - ] = util.EMPTY_DICT + _hints: util.immutabledict[Tuple[_DMLTableElement, str], str] = ( + util.EMPTY_DICT + ) named_with_column = False _label_style: SelectLabelStyle = ( @@ -407,9 +409,9 @@ class UpdateBase( table: _DMLTableElement _return_defaults = False - _return_defaults_columns: Optional[ - Tuple[_ColumnsClauseElement, ...] - ] = None + _return_defaults_columns: Optional[Tuple[_ColumnsClauseElement, ...]] = ( + None + ) _supplemental_returning: Optional[Tuple[_ColumnsClauseElement, ...]] = None _returning: Tuple[_ColumnsClauseElement, ...] = () @@ -1295,8 +1297,7 @@ def from_select( @overload def returning( self, __ent0: _TCCA[_T0], *, sort_by_parameter_order: bool = False - ) -> ReturningInsert[Tuple[_T0]]: - ... + ) -> ReturningInsert[Tuple[_T0]]: ... @overload def returning( @@ -1305,8 +1306,7 @@ def returning( __ent1: _TCCA[_T1], *, sort_by_parameter_order: bool = False, - ) -> ReturningInsert[Tuple[_T0, _T1]]: - ... + ) -> ReturningInsert[Tuple[_T0, _T1]]: ... @overload def returning( @@ -1316,8 +1316,7 @@ def returning( __ent2: _TCCA[_T2], *, sort_by_parameter_order: bool = False, - ) -> ReturningInsert[Tuple[_T0, _T1, _T2]]: - ... + ) -> ReturningInsert[Tuple[_T0, _T1, _T2]]: ... @overload def returning( @@ -1328,8 +1327,7 @@ def returning( __ent3: _TCCA[_T3], *, sort_by_parameter_order: bool = False, - ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3]]: - ... + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3]]: ... @overload def returning( @@ -1341,8 +1339,7 @@ def returning( __ent4: _TCCA[_T4], *, sort_by_parameter_order: bool = False, - ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4]]: - ... + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload def returning( @@ -1355,8 +1352,7 @@ def returning( __ent5: _TCCA[_T5], *, sort_by_parameter_order: bool = False, - ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: - ... + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload def returning( @@ -1370,8 +1366,7 @@ def returning( __ent6: _TCCA[_T6], *, sort_by_parameter_order: bool = False, - ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: - ... + ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def returning( @@ -1386,8 +1381,9 @@ def returning( __ent7: _TCCA[_T7], *, sort_by_parameter_order: bool = False, - ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: - ... + ) -> ReturningInsert[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... # END OVERLOADED FUNCTIONS self.returning @@ -1397,16 +1393,14 @@ def returning( *cols: _ColumnsClauseArgument[Any], sort_by_parameter_order: bool = False, **__kw: Any, - ) -> ReturningInsert[Any]: - ... + ) -> ReturningInsert[Any]: ... def returning( self, *cols: _ColumnsClauseArgument[Any], sort_by_parameter_order: bool = False, **__kw: Any, - ) -> ReturningInsert[Any]: - ... + ) -> ReturningInsert[Any]: ... class ReturningInsert(Insert, TypedReturnsRows[_TP]): @@ -1596,20 +1590,19 @@ def inline(self) -> Self: # statically generated** by tools/generate_tuple_map_overloads.py @overload - def returning(self, __ent0: _TCCA[_T0]) -> ReturningUpdate[Tuple[_T0]]: - ... + def returning( + self, __ent0: _TCCA[_T0] + ) -> ReturningUpdate[Tuple[_T0]]: ... @overload def returning( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] - ) -> ReturningUpdate[Tuple[_T0, _T1]]: - ... + ) -> ReturningUpdate[Tuple[_T0, _T1]]: ... @overload def returning( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] - ) -> ReturningUpdate[Tuple[_T0, _T1, _T2]]: - ... + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2]]: ... @overload def returning( @@ -1618,8 +1611,7 @@ def returning( __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], - ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3]]: - ... + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3]]: ... @overload def returning( @@ -1629,8 +1621,7 @@ def returning( __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], - ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4]]: - ... + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload def returning( @@ -1641,8 +1632,7 @@ def returning( __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], - ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: - ... + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload def returning( @@ -1654,8 +1644,7 @@ def returning( __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], - ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: - ... + ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def returning( @@ -1668,21 +1657,20 @@ def returning( __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], - ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: - ... + ) -> ReturningUpdate[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... # END OVERLOADED FUNCTIONS self.returning @overload def returning( self, *cols: _ColumnsClauseArgument[Any], **__kw: Any - ) -> ReturningUpdate[Any]: - ... + ) -> ReturningUpdate[Any]: ... def returning( self, *cols: _ColumnsClauseArgument[Any], **__kw: Any - ) -> ReturningUpdate[Any]: - ... + ) -> ReturningUpdate[Any]: ... class ReturningUpdate(Update, TypedReturnsRows[_TP]): @@ -1734,20 +1722,19 @@ def __init__(self, table: _DMLTableArgument): # statically generated** by tools/generate_tuple_map_overloads.py @overload - def returning(self, __ent0: _TCCA[_T0]) -> ReturningDelete[Tuple[_T0]]: - ... + def returning( + self, __ent0: _TCCA[_T0] + ) -> ReturningDelete[Tuple[_T0]]: ... @overload def returning( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] - ) -> ReturningDelete[Tuple[_T0, _T1]]: - ... + ) -> ReturningDelete[Tuple[_T0, _T1]]: ... @overload def returning( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] - ) -> ReturningDelete[Tuple[_T0, _T1, _T2]]: - ... + ) -> ReturningDelete[Tuple[_T0, _T1, _T2]]: ... @overload def returning( @@ -1756,8 +1743,7 @@ def returning( __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], - ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3]]: - ... + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3]]: ... @overload def returning( @@ -1767,8 +1753,7 @@ def returning( __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], - ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4]]: - ... + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload def returning( @@ -1779,8 +1764,7 @@ def returning( __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], - ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: - ... + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload def returning( @@ -1792,8 +1776,7 @@ def returning( __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], - ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: - ... + ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def returning( @@ -1806,21 +1789,20 @@ def returning( __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], - ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: - ... + ) -> ReturningDelete[ + Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] + ]: ... # END OVERLOADED FUNCTIONS self.returning @overload def returning( self, *cols: _ColumnsClauseArgument[Any], **__kw: Any - ) -> ReturningDelete[Any]: - ... + ) -> ReturningDelete[Any]: ... def returning( self, *cols: _ColumnsClauseArgument[Any], **__kw: Any - ) -> ReturningDelete[Any]: - ... + ) -> ReturningDelete[Any]: ... class ReturningDelete(Update, TypedReturnsRows[_TP]): diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 92a040ca0bb..e8131994b6b 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -130,8 +130,7 @@ def literal( value: Any, type_: _TypeEngineArgument[_T], literal_execute: bool = False, -) -> BindParameter[_T]: - ... +) -> BindParameter[_T]: ... @overload @@ -139,8 +138,7 @@ def literal( value: _T, type_: None = None, literal_execute: bool = False, -) -> BindParameter[_T]: - ... +) -> BindParameter[_T]: ... @overload @@ -148,8 +146,7 @@ def literal( value: Any, type_: Optional[_TypeEngineArgument[Any]] = None, literal_execute: bool = False, -) -> BindParameter[Any]: - ... +) -> BindParameter[Any]: ... def literal( @@ -388,8 +385,7 @@ def _order_by_label_element(self) -> Optional[Label[Any]]: def get_children( self, *, omit_attrs: typing_Tuple[str, ...] = ..., **kw: Any - ) -> Iterable[ClauseElement]: - ... + ) -> Iterable[ClauseElement]: ... @util.ro_non_memoized_property def _from_objects(self) -> List[FromClause]: @@ -782,8 +778,7 @@ def compile( # noqa: A001 bind: Optional[Union[Engine, Connection]] = None, dialect: Optional[Dialect] = None, **kw: Any, - ) -> SQLCompiler: - ... + ) -> SQLCompiler: ... class CompilerColumnElement( @@ -816,18 +811,15 @@ class SQLCoreOperations(Generic[_T_co], ColumnOperators, TypingOnly): if typing.TYPE_CHECKING: @util.non_memoized_property - def _propagate_attrs(self) -> _PropagateAttrsType: - ... + def _propagate_attrs(self) -> _PropagateAttrsType: ... def operate( self, op: OperatorType, *other: Any, **kwargs: Any - ) -> ColumnElement[Any]: - ... + ) -> ColumnElement[Any]: ... def reverse_operate( self, op: OperatorType, other: Any, **kwargs: Any - ) -> ColumnElement[Any]: - ... + ) -> ColumnElement[Any]: ... @overload def op( @@ -838,8 +830,7 @@ def op( *, return_type: _TypeEngineArgument[_OPT], python_impl: Optional[Callable[..., Any]] = None, - ) -> Callable[[Any], BinaryExpression[_OPT]]: - ... + ) -> Callable[[Any], BinaryExpression[_OPT]]: ... @overload def op( @@ -849,8 +840,7 @@ def op( is_comparison: bool = ..., return_type: Optional[_TypeEngineArgument[Any]] = ..., python_impl: Optional[Callable[..., Any]] = ..., - ) -> Callable[[Any], BinaryExpression[Any]]: - ... + ) -> Callable[[Any], BinaryExpression[Any]]: ... def op( self, @@ -859,38 +849,30 @@ def op( is_comparison: bool = False, return_type: Optional[_TypeEngineArgument[Any]] = None, python_impl: Optional[Callable[..., Any]] = None, - ) -> Callable[[Any], BinaryExpression[Any]]: - ... + ) -> Callable[[Any], BinaryExpression[Any]]: ... def bool_op( self, opstring: str, precedence: int = 0, python_impl: Optional[Callable[..., Any]] = None, - ) -> Callable[[Any], BinaryExpression[bool]]: - ... + ) -> Callable[[Any], BinaryExpression[bool]]: ... - def __and__(self, other: Any) -> BooleanClauseList: - ... + def __and__(self, other: Any) -> BooleanClauseList: ... - def __or__(self, other: Any) -> BooleanClauseList: - ... + def __or__(self, other: Any) -> BooleanClauseList: ... - def __invert__(self) -> ColumnElement[_T_co]: - ... + def __invert__(self) -> ColumnElement[_T_co]: ... - def __lt__(self, other: Any) -> ColumnElement[bool]: - ... + def __lt__(self, other: Any) -> ColumnElement[bool]: ... - def __le__(self, other: Any) -> ColumnElement[bool]: - ... + def __le__(self, other: Any) -> ColumnElement[bool]: ... # declare also that this class has an hash method otherwise # it may be assumed to be None by type checkers since the # object defines __eq__ and python sets it to None in that case: # https://docs.python.org/3/reference/datamodel.html#object.__hash__ - def __hash__(self) -> int: - ... + def __hash__(self) -> int: ... def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 ... @@ -898,226 +880,172 @@ def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 ... - def is_distinct_from(self, other: Any) -> ColumnElement[bool]: - ... + def is_distinct_from(self, other: Any) -> ColumnElement[bool]: ... - def is_not_distinct_from(self, other: Any) -> ColumnElement[bool]: - ... + def is_not_distinct_from(self, other: Any) -> ColumnElement[bool]: ... - def __gt__(self, other: Any) -> ColumnElement[bool]: - ... + def __gt__(self, other: Any) -> ColumnElement[bool]: ... - def __ge__(self, other: Any) -> ColumnElement[bool]: - ... + def __ge__(self, other: Any) -> ColumnElement[bool]: ... - def __neg__(self) -> UnaryExpression[_T_co]: - ... + def __neg__(self) -> UnaryExpression[_T_co]: ... - def __contains__(self, other: Any) -> ColumnElement[bool]: - ... + def __contains__(self, other: Any) -> ColumnElement[bool]: ... - def __getitem__(self, index: Any) -> ColumnElement[Any]: - ... + def __getitem__(self, index: Any) -> ColumnElement[Any]: ... @overload - def __lshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: - ... + def __lshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ... @overload - def __lshift__(self, other: Any) -> ColumnElement[Any]: - ... + def __lshift__(self, other: Any) -> ColumnElement[Any]: ... - def __lshift__(self, other: Any) -> ColumnElement[Any]: - ... + def __lshift__(self, other: Any) -> ColumnElement[Any]: ... @overload - def __rshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: - ... + def __rshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ... @overload - def __rshift__(self, other: Any) -> ColumnElement[Any]: - ... + def __rshift__(self, other: Any) -> ColumnElement[Any]: ... - def __rshift__(self, other: Any) -> ColumnElement[Any]: - ... + def __rshift__(self, other: Any) -> ColumnElement[Any]: ... @overload - def concat(self: _SQO[str], other: Any) -> ColumnElement[str]: - ... + def concat(self: _SQO[str], other: Any) -> ColumnElement[str]: ... @overload - def concat(self, other: Any) -> ColumnElement[Any]: - ... + def concat(self, other: Any) -> ColumnElement[Any]: ... - def concat(self, other: Any) -> ColumnElement[Any]: - ... + def concat(self, other: Any) -> ColumnElement[Any]: ... def like( self, other: Any, escape: Optional[str] = None - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... def ilike( self, other: Any, escape: Optional[str] = None - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... - def bitwise_xor(self, other: Any) -> BinaryExpression[Any]: - ... + def bitwise_xor(self, other: Any) -> BinaryExpression[Any]: ... - def bitwise_or(self, other: Any) -> BinaryExpression[Any]: - ... + def bitwise_or(self, other: Any) -> BinaryExpression[Any]: ... - def bitwise_and(self, other: Any) -> BinaryExpression[Any]: - ... + def bitwise_and(self, other: Any) -> BinaryExpression[Any]: ... - def bitwise_not(self) -> UnaryExpression[_T_co]: - ... + def bitwise_not(self) -> UnaryExpression[_T_co]: ... - def bitwise_lshift(self, other: Any) -> BinaryExpression[Any]: - ... + def bitwise_lshift(self, other: Any) -> BinaryExpression[Any]: ... - def bitwise_rshift(self, other: Any) -> BinaryExpression[Any]: - ... + def bitwise_rshift(self, other: Any) -> BinaryExpression[Any]: ... def in_( self, other: Union[ Iterable[Any], BindParameter[Any], roles.InElementRole ], - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... def not_in( self, other: Union[ Iterable[Any], BindParameter[Any], roles.InElementRole ], - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... def notin_( self, other: Union[ Iterable[Any], BindParameter[Any], roles.InElementRole ], - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... def not_like( self, other: Any, escape: Optional[str] = None - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... def notlike( self, other: Any, escape: Optional[str] = None - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... def not_ilike( self, other: Any, escape: Optional[str] = None - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... def notilike( self, other: Any, escape: Optional[str] = None - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... - def is_(self, other: Any) -> BinaryExpression[bool]: - ... + def is_(self, other: Any) -> BinaryExpression[bool]: ... - def is_not(self, other: Any) -> BinaryExpression[bool]: - ... + def is_not(self, other: Any) -> BinaryExpression[bool]: ... - def isnot(self, other: Any) -> BinaryExpression[bool]: - ... + def isnot(self, other: Any) -> BinaryExpression[bool]: ... def startswith( self, other: Any, escape: Optional[str] = None, autoescape: bool = False, - ) -> ColumnElement[bool]: - ... + ) -> ColumnElement[bool]: ... def istartswith( self, other: Any, escape: Optional[str] = None, autoescape: bool = False, - ) -> ColumnElement[bool]: - ... + ) -> ColumnElement[bool]: ... def endswith( self, other: Any, escape: Optional[str] = None, autoescape: bool = False, - ) -> ColumnElement[bool]: - ... + ) -> ColumnElement[bool]: ... def iendswith( self, other: Any, escape: Optional[str] = None, autoescape: bool = False, - ) -> ColumnElement[bool]: - ... + ) -> ColumnElement[bool]: ... - def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]: - ... + def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ... - def icontains(self, other: Any, **kw: Any) -> ColumnElement[bool]: - ... + def icontains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ... - def match(self, other: Any, **kwargs: Any) -> ColumnElement[bool]: - ... + def match(self, other: Any, **kwargs: Any) -> ColumnElement[bool]: ... def regexp_match( self, pattern: Any, flags: Optional[str] = None - ) -> ColumnElement[bool]: - ... + ) -> ColumnElement[bool]: ... def regexp_replace( self, pattern: Any, replacement: Any, flags: Optional[str] = None - ) -> ColumnElement[str]: - ... + ) -> ColumnElement[str]: ... - def desc(self) -> UnaryExpression[_T_co]: - ... + def desc(self) -> UnaryExpression[_T_co]: ... - def asc(self) -> UnaryExpression[_T_co]: - ... + def asc(self) -> UnaryExpression[_T_co]: ... - def nulls_first(self) -> UnaryExpression[_T_co]: - ... + def nulls_first(self) -> UnaryExpression[_T_co]: ... - def nullsfirst(self) -> UnaryExpression[_T_co]: - ... + def nullsfirst(self) -> UnaryExpression[_T_co]: ... - def nulls_last(self) -> UnaryExpression[_T_co]: - ... + def nulls_last(self) -> UnaryExpression[_T_co]: ... - def nullslast(self) -> UnaryExpression[_T_co]: - ... + def nullslast(self) -> UnaryExpression[_T_co]: ... - def collate(self, collation: str) -> CollationClause: - ... + def collate(self, collation: str) -> CollationClause: ... def between( self, cleft: Any, cright: Any, symmetric: bool = False - ) -> BinaryExpression[bool]: - ... + ) -> BinaryExpression[bool]: ... - def distinct(self: _SQO[_T_co]) -> UnaryExpression[_T_co]: - ... + def distinct(self: _SQO[_T_co]) -> UnaryExpression[_T_co]: ... - def any_(self) -> CollectionAggregate[Any]: - ... + def any_(self) -> CollectionAggregate[Any]: ... - def all_(self) -> CollectionAggregate[Any]: - ... + def all_(self) -> CollectionAggregate[Any]: ... # numeric overloads. These need more tweaking # in particular they all need to have a variant for Optiona[_T] @@ -1128,159 +1056,126 @@ def all_(self) -> CollectionAggregate[Any]: def __add__( self: _SQO[_NMT], other: Any, - ) -> ColumnElement[_NMT]: - ... + ) -> ColumnElement[_NMT]: ... @overload def __add__( self: _SQO[str], other: Any, - ) -> ColumnElement[str]: - ... + ) -> ColumnElement[str]: ... - def __add__(self, other: Any) -> ColumnElement[Any]: - ... + def __add__(self, other: Any) -> ColumnElement[Any]: ... @overload - def __radd__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: - ... + def __radd__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... @overload - def __radd__(self: _SQO[str], other: Any) -> ColumnElement[str]: - ... + def __radd__(self: _SQO[str], other: Any) -> ColumnElement[str]: ... - def __radd__(self, other: Any) -> ColumnElement[Any]: - ... + def __radd__(self, other: Any) -> ColumnElement[Any]: ... @overload def __sub__( self: _SQO[_NMT], other: Any, - ) -> ColumnElement[_NMT]: - ... + ) -> ColumnElement[_NMT]: ... @overload - def __sub__(self, other: Any) -> ColumnElement[Any]: - ... + def __sub__(self, other: Any) -> ColumnElement[Any]: ... - def __sub__(self, other: Any) -> ColumnElement[Any]: - ... + def __sub__(self, other: Any) -> ColumnElement[Any]: ... @overload def __rsub__( self: _SQO[_NMT], other: Any, - ) -> ColumnElement[_NMT]: - ... + ) -> ColumnElement[_NMT]: ... @overload - def __rsub__(self, other: Any) -> ColumnElement[Any]: - ... + def __rsub__(self, other: Any) -> ColumnElement[Any]: ... - def __rsub__(self, other: Any) -> ColumnElement[Any]: - ... + def __rsub__(self, other: Any) -> ColumnElement[Any]: ... @overload def __mul__( self: _SQO[_NMT], other: Any, - ) -> ColumnElement[_NMT]: - ... + ) -> ColumnElement[_NMT]: ... @overload - def __mul__(self, other: Any) -> ColumnElement[Any]: - ... + def __mul__(self, other: Any) -> ColumnElement[Any]: ... - def __mul__(self, other: Any) -> ColumnElement[Any]: - ... + def __mul__(self, other: Any) -> ColumnElement[Any]: ... @overload def __rmul__( self: _SQO[_NMT], other: Any, - ) -> ColumnElement[_NMT]: - ... + ) -> ColumnElement[_NMT]: ... @overload - def __rmul__(self, other: Any) -> ColumnElement[Any]: - ... + def __rmul__(self, other: Any) -> ColumnElement[Any]: ... - def __rmul__(self, other: Any) -> ColumnElement[Any]: - ... + def __rmul__(self, other: Any) -> ColumnElement[Any]: ... @overload - def __mod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: - ... + def __mod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... @overload - def __mod__(self, other: Any) -> ColumnElement[Any]: - ... + def __mod__(self, other: Any) -> ColumnElement[Any]: ... - def __mod__(self, other: Any) -> ColumnElement[Any]: - ... + def __mod__(self, other: Any) -> ColumnElement[Any]: ... @overload - def __rmod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: - ... + def __rmod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ... @overload - def __rmod__(self, other: Any) -> ColumnElement[Any]: - ... + def __rmod__(self, other: Any) -> ColumnElement[Any]: ... - def __rmod__(self, other: Any) -> ColumnElement[Any]: - ... + def __rmod__(self, other: Any) -> ColumnElement[Any]: ... @overload def __truediv__( self: _SQO[int], other: Any - ) -> ColumnElement[_NUMERIC]: - ... + ) -> ColumnElement[_NUMERIC]: ... @overload - def __truediv__(self: _SQO[_NT], other: Any) -> ColumnElement[_NT]: - ... + def __truediv__(self: _SQO[_NT], other: Any) -> ColumnElement[_NT]: ... @overload - def __truediv__(self, other: Any) -> ColumnElement[Any]: - ... + def __truediv__(self, other: Any) -> ColumnElement[Any]: ... - def __truediv__(self, other: Any) -> ColumnElement[Any]: - ... + def __truediv__(self, other: Any) -> ColumnElement[Any]: ... @overload def __rtruediv__( self: _SQO[_NMT], other: Any - ) -> ColumnElement[_NUMERIC]: - ... + ) -> ColumnElement[_NUMERIC]: ... @overload - def __rtruediv__(self, other: Any) -> ColumnElement[Any]: - ... + def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ... - def __rtruediv__(self, other: Any) -> ColumnElement[Any]: - ... + def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ... @overload - def __floordiv__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: - ... + def __floordiv__( + self: _SQO[_NMT], other: Any + ) -> ColumnElement[_NMT]: ... @overload - def __floordiv__(self, other: Any) -> ColumnElement[Any]: - ... + def __floordiv__(self, other: Any) -> ColumnElement[Any]: ... - def __floordiv__(self, other: Any) -> ColumnElement[Any]: - ... + def __floordiv__(self, other: Any) -> ColumnElement[Any]: ... @overload - def __rfloordiv__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: - ... + def __rfloordiv__( + self: _SQO[_NMT], other: Any + ) -> ColumnElement[_NMT]: ... @overload - def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: - ... + def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ... - def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: - ... + def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ... class SQLColumnExpression( @@ -1532,14 +1427,12 @@ def _non_anon_label(self) -> Optional[str]: @overload def self_group( self: ColumnElement[_T], against: Optional[OperatorType] = None - ) -> ColumnElement[_T]: - ... + ) -> ColumnElement[_T]: ... @overload def self_group( self: ColumnElement[Any], against: Optional[OperatorType] = None - ) -> ColumnElement[Any]: - ... + ) -> ColumnElement[Any]: ... def self_group( self, against: Optional[OperatorType] = None @@ -1555,12 +1448,10 @@ def self_group( return self @overload - def _negate(self: ColumnElement[bool]) -> ColumnElement[bool]: - ... + def _negate(self: ColumnElement[bool]) -> ColumnElement[bool]: ... @overload - def _negate(self: ColumnElement[_T]) -> ColumnElement[_T]: - ... + def _negate(self: ColumnElement[_T]) -> ColumnElement[_T]: ... def _negate(self) -> ColumnElement[Any]: if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity: @@ -1764,9 +1655,11 @@ def _make_proxy( assert key is not None co: ColumnClause[_T] = ColumnClause( - coercions.expect(roles.TruncatedLabelRole, name) - if name_is_truncatable - else name, + ( + coercions.expect(roles.TruncatedLabelRole, name) + if name_is_truncatable + else name + ), type_=getattr(self, "type", None), _selectable=selectable, ) @@ -2078,9 +1971,12 @@ def __init__( if unique: self.key = _anonymous_label.safe_construct( id(self), - key - if key is not None and not isinstance(key, _anonymous_label) - else "param", + ( + key + if key is not None + and not isinstance(key, _anonymous_label) + else "param" + ), sanitize_key=True, ) self._key_is_anon = True @@ -2141,13 +2037,13 @@ def __init__( check_value = value[0] else: check_value = value - cast( - "BindParameter[typing_Tuple[Any, ...]]", self - ).type = type_._resolve_values_to_types(check_value) + cast("BindParameter[typing_Tuple[Any, ...]]", self).type = ( + type_._resolve_values_to_types(check_value) + ) else: - cast( - "BindParameter[typing_Tuple[Any, ...]]", self - ).type = type_ + cast("BindParameter[typing_Tuple[Any, ...]]", self).type = ( + type_ + ) else: self.type = type_ @@ -2651,9 +2547,11 @@ def columns( ] positional_input_cols = [ - ColumnClause(col.key, types.pop(col.key)) - if col.key in types - else col + ( + ColumnClause(col.key, types.pop(col.key)) + if col.key in types + else col + ) for col in input_cols ] keyed_input_cols: List[NamedColumn[Any]] = [ @@ -3165,9 +3063,11 @@ def _construct( # which will link elements against the operator. flattened_clauses = itertools.chain.from_iterable( - (c for c in to_flat._flattened_operator_clauses) - if getattr(to_flat, "operator", None) is operator - else (to_flat,) + ( + (c for c in to_flat._flattened_operator_clauses) + if getattr(to_flat, "operator", None) is operator + else (to_flat,) + ) for to_flat in convert_clauses ) @@ -4025,8 +3925,7 @@ def __bool__(self): def __invert__( self: BinaryExpression[_T], - ) -> BinaryExpression[_T]: - ... + ) -> BinaryExpression[_T]: ... @util.ro_non_memoized_property def _from_objects(self) -> List[FromClause]: @@ -4592,9 +4491,11 @@ def _make_proxy( **kw: Any, ) -> typing_Tuple[str, ColumnClause[_T]]: c = ColumnClause( - coercions.expect(roles.TruncatedLabelRole, name or self.name) - if name_is_truncatable - else (name or self.name), + ( + coercions.expect(roles.TruncatedLabelRole, name or self.name) + if name_is_truncatable + else (name or self.name) + ), type_=self.type, _selectable=selectable, is_literal=False, @@ -5022,9 +4923,11 @@ def _make_proxy( ) ) c = self._constructor( - coercions.expect(roles.TruncatedLabelRole, name or self.name) - if name_is_truncatable - else (name or self.name), + ( + coercions.expect(roles.TruncatedLabelRole, name or self.name) + if name_is_truncatable + else (name or self.name) + ), type_=self.type, _selectable=selectable, is_literal=is_literal, @@ -5167,13 +5070,11 @@ class quoted_name(util.MemoizedSlots, str): @overload @classmethod - def construct(cls, value: str, quote: Optional[bool]) -> quoted_name: - ... + def construct(cls, value: str, quote: Optional[bool]) -> quoted_name: ... @overload @classmethod - def construct(cls, value: None, quote: Optional[bool]) -> None: - ... + def construct(cls, value: None, quote: Optional[bool]) -> None: ... @classmethod def construct( diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index 1ea68b87e60..afb2b1d9b99 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -84,9 +84,9 @@ _T = TypeVar("_T", bound=Any) _S = TypeVar("_S", bound=Any) -_registry: util.defaultdict[ - str, Dict[str, Type[Function[Any]]] -] = util.defaultdict(dict) +_registry: util.defaultdict[str, Dict[str, Type[Function[Any]]]] = ( + util.defaultdict(dict) +) def register_function( @@ -486,16 +486,14 @@ def within_group( return WithinGroup(self, *order_by) @overload - def filter(self) -> Self: - ... + def filter(self) -> Self: ... @overload def filter( self, __criterion0: _ColumnExpressionArgument[bool], *criterion: _ColumnExpressionArgument[bool], - ) -> FunctionFilter[_T]: - ... + ) -> FunctionFilter[_T]: ... def filter( self, *criterion: _ColumnExpressionArgument[bool] @@ -945,12 +943,10 @@ def __getattr__(self, name: str) -> _FunctionGenerator: @overload def __call__( self, *c: Any, type_: _TypeEngineArgument[_T], **kwargs: Any - ) -> Function[_T]: - ... + ) -> Function[_T]: ... @overload - def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]: - ... + def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]: ... def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]: o = self.opts.copy() @@ -981,24 +977,19 @@ def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]: # statically generated** by tools/generate_sql_functions.py @property - def aggregate_strings(self) -> Type[aggregate_strings]: - ... + def aggregate_strings(self) -> Type[aggregate_strings]: ... @property - def ansifunction(self) -> Type[AnsiFunction[Any]]: - ... + def ansifunction(self) -> Type[AnsiFunction[Any]]: ... @property - def array_agg(self) -> Type[array_agg[Any]]: - ... + def array_agg(self) -> Type[array_agg[Any]]: ... @property - def cast(self) -> Type[Cast[Any]]: - ... + def cast(self) -> Type[Cast[Any]]: ... @property - def char_length(self) -> Type[char_length]: - ... + def char_length(self) -> Type[char_length]: ... # set ColumnElement[_T] as a separate overload, to appease mypy # which seems to not want to accept _T from _ColumnExpressionArgument. @@ -1011,8 +1002,7 @@ def coalesce( col: ColumnElement[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> coalesce[_T]: - ... + ) -> coalesce[_T]: ... @overload def coalesce( @@ -1020,8 +1010,7 @@ def coalesce( col: _ColumnExpressionArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> coalesce[_T]: - ... + ) -> coalesce[_T]: ... @overload def coalesce( @@ -1029,68 +1018,53 @@ def coalesce( col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> coalesce[_T]: - ... + ) -> coalesce[_T]: ... def coalesce( self, col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> coalesce[_T]: - ... + ) -> coalesce[_T]: ... @property - def concat(self) -> Type[concat]: - ... + def concat(self) -> Type[concat]: ... @property - def count(self) -> Type[count]: - ... + def count(self) -> Type[count]: ... @property - def cube(self) -> Type[cube[Any]]: - ... + def cube(self) -> Type[cube[Any]]: ... @property - def cume_dist(self) -> Type[cume_dist]: - ... + def cume_dist(self) -> Type[cume_dist]: ... @property - def current_date(self) -> Type[current_date]: - ... + def current_date(self) -> Type[current_date]: ... @property - def current_time(self) -> Type[current_time]: - ... + def current_time(self) -> Type[current_time]: ... @property - def current_timestamp(self) -> Type[current_timestamp]: - ... + def current_timestamp(self) -> Type[current_timestamp]: ... @property - def current_user(self) -> Type[current_user]: - ... + def current_user(self) -> Type[current_user]: ... @property - def dense_rank(self) -> Type[dense_rank]: - ... + def dense_rank(self) -> Type[dense_rank]: ... @property - def extract(self) -> Type[Extract]: - ... + def extract(self) -> Type[Extract]: ... @property - def grouping_sets(self) -> Type[grouping_sets[Any]]: - ... + def grouping_sets(self) -> Type[grouping_sets[Any]]: ... @property - def localtime(self) -> Type[localtime]: - ... + def localtime(self) -> Type[localtime]: ... @property - def localtimestamp(self) -> Type[localtimestamp]: - ... + def localtimestamp(self) -> Type[localtimestamp]: ... # set ColumnElement[_T] as a separate overload, to appease mypy # which seems to not want to accept _T from _ColumnExpressionArgument. @@ -1103,8 +1077,7 @@ def max( # noqa: A001 col: ColumnElement[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> max[_T]: - ... + ) -> max[_T]: ... @overload def max( # noqa: A001 @@ -1112,8 +1085,7 @@ def max( # noqa: A001 col: _ColumnExpressionArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> max[_T]: - ... + ) -> max[_T]: ... @overload def max( # noqa: A001 @@ -1121,16 +1093,14 @@ def max( # noqa: A001 col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> max[_T]: - ... + ) -> max[_T]: ... def max( # noqa: A001 self, col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> max[_T]: - ... + ) -> max[_T]: ... # set ColumnElement[_T] as a separate overload, to appease mypy # which seems to not want to accept _T from _ColumnExpressionArgument. @@ -1143,8 +1113,7 @@ def min( # noqa: A001 col: ColumnElement[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> min[_T]: - ... + ) -> min[_T]: ... @overload def min( # noqa: A001 @@ -1152,8 +1121,7 @@ def min( # noqa: A001 col: _ColumnExpressionArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> min[_T]: - ... + ) -> min[_T]: ... @overload def min( # noqa: A001 @@ -1161,60 +1129,47 @@ def min( # noqa: A001 col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> min[_T]: - ... + ) -> min[_T]: ... def min( # noqa: A001 self, col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> min[_T]: - ... + ) -> min[_T]: ... @property - def mode(self) -> Type[mode[Any]]: - ... + def mode(self) -> Type[mode[Any]]: ... @property - def next_value(self) -> Type[next_value]: - ... + def next_value(self) -> Type[next_value]: ... @property - def now(self) -> Type[now]: - ... + def now(self) -> Type[now]: ... @property - def orderedsetagg(self) -> Type[OrderedSetAgg[Any]]: - ... + def orderedsetagg(self) -> Type[OrderedSetAgg[Any]]: ... @property - def percent_rank(self) -> Type[percent_rank]: - ... + def percent_rank(self) -> Type[percent_rank]: ... @property - def percentile_cont(self) -> Type[percentile_cont[Any]]: - ... + def percentile_cont(self) -> Type[percentile_cont[Any]]: ... @property - def percentile_disc(self) -> Type[percentile_disc[Any]]: - ... + def percentile_disc(self) -> Type[percentile_disc[Any]]: ... @property - def random(self) -> Type[random]: - ... + def random(self) -> Type[random]: ... @property - def rank(self) -> Type[rank]: - ... + def rank(self) -> Type[rank]: ... @property - def rollup(self) -> Type[rollup[Any]]: - ... + def rollup(self) -> Type[rollup[Any]]: ... @property - def session_user(self) -> Type[session_user]: - ... + def session_user(self) -> Type[session_user]: ... # set ColumnElement[_T] as a separate overload, to appease mypy # which seems to not want to accept _T from _ColumnExpressionArgument. @@ -1227,8 +1182,7 @@ def sum( # noqa: A001 col: ColumnElement[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> sum[_T]: - ... + ) -> sum[_T]: ... @overload def sum( # noqa: A001 @@ -1236,8 +1190,7 @@ def sum( # noqa: A001 col: _ColumnExpressionArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> sum[_T]: - ... + ) -> sum[_T]: ... @overload def sum( # noqa: A001 @@ -1245,24 +1198,20 @@ def sum( # noqa: A001 col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> sum[_T]: - ... + ) -> sum[_T]: ... def sum( # noqa: A001 self, col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ) -> sum[_T]: - ... + ) -> sum[_T]: ... @property - def sysdate(self) -> Type[sysdate]: - ... + def sysdate(self) -> Type[sysdate]: ... @property - def user(self) -> Type[user]: - ... + def user(self) -> Type[user]: ... # END GENERATED FUNCTION ACCESSORS @@ -1342,8 +1291,7 @@ def __init__( *clauses: _ColumnExpressionOrLiteralArgument[_T], type_: None = ..., packagenames: Optional[Tuple[str, ...]] = ..., - ): - ... + ): ... @overload def __init__( @@ -1352,8 +1300,7 @@ def __init__( *clauses: _ColumnExpressionOrLiteralArgument[Any], type_: _TypeEngineArgument[_T] = ..., packagenames: Optional[Tuple[str, ...]] = ..., - ): - ... + ): ... def __init__( self, @@ -1632,8 +1579,7 @@ def __init__( col: ColumnElement[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ): - ... + ): ... @overload def __init__( @@ -1641,8 +1587,7 @@ def __init__( col: _ColumnExpressionArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ): - ... + ): ... @overload def __init__( @@ -1650,8 +1595,7 @@ def __init__( col: _ColumnExpressionOrLiteralArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ): - ... + ): ... def __init__( self, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any @@ -1771,6 +1715,7 @@ class count(GenericFunction[int]): """ + type = sqltypes.Integer() inherit_cache = True @@ -2023,6 +1968,7 @@ class cube(GenericFunction[_T]): .. versionadded:: 1.2 """ + _has_args = True inherit_cache = True @@ -2040,6 +1986,7 @@ class rollup(GenericFunction[_T]): .. versionadded:: 1.2 """ + _has_args = True inherit_cache = True @@ -2073,6 +2020,7 @@ class grouping_sets(GenericFunction[_T]): .. versionadded:: 1.2 """ + _has_args = True inherit_cache = True diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index a53ebae7973..726fa2411f8 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -407,9 +407,9 @@ def _gen_cache_key(self, anon_map, bindparams): while parent is not None: assert parent.closure_cache_key is not CacheConst.NO_CACHE - parent_closure_cache_key: Tuple[ - Any, ... - ] = parent.closure_cache_key + parent_closure_cache_key: Tuple[Any, ...] = ( + parent.closure_cache_key + ) cache_key = ( (parent.fn.__code__,) + parent_closure_cache_key + cache_key @@ -535,8 +535,7 @@ def __init__( role: Type[SQLRole], opts: Union[Type[LambdaOptions], LambdaOptions] = LambdaOptions, apply_propagate_attrs: Optional[ClauseElement] = None, - ): - ... + ): ... def __add__( self, other: _StmtLambdaElementType[Any] @@ -737,9 +736,9 @@ class AnalyzedCode: "closure_trackers", "build_py_wrappers", ) - _fns: weakref.WeakKeyDictionary[ - CodeType, AnalyzedCode - ] = weakref.WeakKeyDictionary() + _fns: weakref.WeakKeyDictionary[CodeType, AnalyzedCode] = ( + weakref.WeakKeyDictionary() + ) _generation_mutex = threading.RLock() @@ -1184,12 +1183,12 @@ def _instrument_and_run_function(self, lambda_element): # rewrite the original fn. things that look like they will # become bound parameters are wrapped in a PyWrapper. - self.tracker_instrumented_fn = ( - tracker_instrumented_fn - ) = self._rewrite_code_obj( - fn, - [new_closure[name] for name in fn.__code__.co_freevars], - new_globals, + self.tracker_instrumented_fn = tracker_instrumented_fn = ( + self._rewrite_code_obj( + fn, + [new_closure[name] for name in fn.__code__.co_freevars], + new_globals, + ) ) # now invoke the function. This will give us a new SQL diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index b177e01ef1e..9fb096e470c 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -77,8 +77,7 @@ def __call__( right: Optional[Any] = None, *other: Any, **kwargs: Any, - ) -> ColumnElement[Any]: - ... + ) -> ColumnElement[Any]: ... @overload def __call__( @@ -87,8 +86,7 @@ def __call__( right: Optional[Any] = None, *other: Any, **kwargs: Any, - ) -> Operators: - ... + ) -> Operators: ... def __call__( self, @@ -96,8 +94,7 @@ def __call__( right: Optional[Any] = None, *other: Any, **kwargs: Any, - ) -> Operators: - ... + ) -> Operators: ... add = cast(OperatorType, _uncast_add) @@ -466,8 +463,7 @@ def __call__( right: Optional[Any] = None, *other: Any, **kwargs: Any, - ) -> ColumnElement[Any]: - ... + ) -> ColumnElement[Any]: ... @overload def __call__( @@ -476,8 +472,7 @@ def __call__( right: Optional[Any] = None, *other: Any, **kwargs: Any, - ) -> Operators: - ... + ) -> Operators: ... def __call__( self, @@ -545,13 +540,11 @@ def eq(a, b): def operate( self, op: OperatorType, *other: Any, **kwargs: Any - ) -> ColumnOperators: - ... + ) -> ColumnOperators: ... def reverse_operate( self, op: OperatorType, other: Any, **kwargs: Any - ) -> ColumnOperators: - ... + ) -> ColumnOperators: ... def __lt__(self, other: Any) -> ColumnOperators: """Implement the ``<`` operator. @@ -574,8 +567,7 @@ def __le__(self, other: Any) -> ColumnOperators: # https://docs.python.org/3/reference/datamodel.html#object.__hash__ if TYPE_CHECKING: - def __hash__(self) -> int: - ... + def __hash__(self) -> int: ... else: __hash__ = Operators.__hash__ @@ -623,8 +615,7 @@ def is_not_distinct_from(self, other: Any) -> ColumnOperators: # deprecated 1.4; see #5435 if TYPE_CHECKING: - def isnot_distinct_from(self, other: Any) -> ColumnOperators: - ... + def isnot_distinct_from(self, other: Any) -> ColumnOperators: ... else: isnot_distinct_from = is_not_distinct_from @@ -964,8 +955,7 @@ def not_in(self, other: Any) -> ColumnOperators: # deprecated 1.4; see #5429 if TYPE_CHECKING: - def notin_(self, other: Any) -> ColumnOperators: - ... + def notin_(self, other: Any) -> ColumnOperators: ... else: notin_ = not_in @@ -994,8 +984,7 @@ def not_like( def notlike( self, other: Any, escape: Optional[str] = None - ) -> ColumnOperators: - ... + ) -> ColumnOperators: ... else: notlike = not_like @@ -1024,8 +1013,7 @@ def not_ilike( def notilike( self, other: Any, escape: Optional[str] = None - ) -> ColumnOperators: - ... + ) -> ColumnOperators: ... else: notilike = not_ilike @@ -1063,8 +1051,7 @@ def is_not(self, other: Any) -> ColumnOperators: # deprecated 1.4; see #5429 if TYPE_CHECKING: - def isnot(self, other: Any) -> ColumnOperators: - ... + def isnot(self, other: Any) -> ColumnOperators: ... else: isnot = is_not @@ -1728,8 +1715,7 @@ def nulls_first(self) -> ColumnOperators: # deprecated 1.4; see #5435 if TYPE_CHECKING: - def nullsfirst(self) -> ColumnOperators: - ... + def nullsfirst(self) -> ColumnOperators: ... else: nullsfirst = nulls_first @@ -1747,8 +1733,7 @@ def nulls_last(self) -> ColumnOperators: # deprecated 1.4; see #5429 if TYPE_CHECKING: - def nullslast(self) -> ColumnOperators: - ... + def nullslast(self) -> ColumnOperators: ... else: nullslast = nulls_last @@ -1968,8 +1953,7 @@ def is_true(a: Any) -> Any: if TYPE_CHECKING: @_operator_fn - def istrue(a: Any) -> Any: - ... + def istrue(a: Any) -> Any: ... else: istrue = is_true @@ -1984,8 +1968,7 @@ def is_false(a: Any) -> Any: if TYPE_CHECKING: @_operator_fn - def isfalse(a: Any) -> Any: - ... + def isfalse(a: Any) -> Any: ... else: isfalse = is_false @@ -2007,8 +1990,7 @@ def is_not_distinct_from(a: Any, b: Any) -> Any: if TYPE_CHECKING: @_operator_fn - def isnot_distinct_from(a: Any, b: Any) -> Any: - ... + def isnot_distinct_from(a: Any, b: Any) -> Any: ... else: isnot_distinct_from = is_not_distinct_from @@ -2030,8 +2012,7 @@ def is_not(a: Any, b: Any) -> Any: if TYPE_CHECKING: @_operator_fn - def isnot(a: Any, b: Any) -> Any: - ... + def isnot(a: Any, b: Any) -> Any: ... else: isnot = is_not @@ -2063,8 +2044,7 @@ def not_like_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: if TYPE_CHECKING: @_operator_fn - def notlike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: - ... + def notlike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: ... else: notlike_op = not_like_op @@ -2086,8 +2066,7 @@ def not_ilike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: if TYPE_CHECKING: @_operator_fn - def notilike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: - ... + def notilike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: ... else: notilike_op = not_ilike_op @@ -2109,8 +2088,9 @@ def not_between_op(a: Any, b: Any, c: Any, symmetric: bool = False) -> Any: if TYPE_CHECKING: @_operator_fn - def notbetween_op(a: Any, b: Any, c: Any, symmetric: bool = False) -> Any: - ... + def notbetween_op( + a: Any, b: Any, c: Any, symmetric: bool = False + ) -> Any: ... else: notbetween_op = not_between_op @@ -2132,8 +2112,7 @@ def not_in_op(a: Any, b: Any) -> Any: if TYPE_CHECKING: @_operator_fn - def notin_op(a: Any, b: Any) -> Any: - ... + def notin_op(a: Any, b: Any) -> Any: ... else: notin_op = not_in_op @@ -2198,8 +2177,7 @@ def not_startswith_op( @_operator_fn def notstartswith_op( a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False - ) -> Any: - ... + ) -> Any: ... else: notstartswith_op = not_startswith_op @@ -2243,8 +2221,7 @@ def not_endswith_op( @_operator_fn def notendswith_op( a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False - ) -> Any: - ... + ) -> Any: ... else: notendswith_op = not_endswith_op @@ -2288,8 +2265,7 @@ def not_contains_op( @_operator_fn def notcontains_op( a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False - ) -> Any: - ... + ) -> Any: ... else: notcontains_op = not_contains_op @@ -2346,8 +2322,7 @@ def not_match_op(a: Any, b: Any, **kw: Any) -> Any: if TYPE_CHECKING: @_operator_fn - def notmatch_op(a: Any, b: Any, **kw: Any) -> Any: - ... + def notmatch_op(a: Any, b: Any, **kw: Any) -> Any: ... else: notmatch_op = not_match_op @@ -2392,8 +2367,7 @@ def nulls_first_op(a: Any) -> Any: if TYPE_CHECKING: @_operator_fn - def nullsfirst_op(a: Any) -> Any: - ... + def nullsfirst_op(a: Any) -> Any: ... else: nullsfirst_op = nulls_first_op @@ -2408,8 +2382,7 @@ def nulls_last_op(a: Any) -> Any: if TYPE_CHECKING: @_operator_fn - def nullslast_op(a: Any) -> Any: - ... + def nullslast_op(a: Any) -> Any: ... else: nullslast_op = nulls_last_op diff --git a/lib/sqlalchemy/sql/roles.py b/lib/sqlalchemy/sql/roles.py index 42c561cb4b7..ae70ac3a5bc 100644 --- a/lib/sqlalchemy/sql/roles.py +++ b/lib/sqlalchemy/sql/roles.py @@ -227,8 +227,7 @@ class AnonymizedFromClauseRole(StrictFromClauseRole): def _anonymous_fromclause( self, *, name: Optional[str] = None, flat: bool = False - ) -> FromClause: - ... + ) -> FromClause: ... class ReturnsRowsRole(SQLRole): @@ -246,8 +245,7 @@ class StatementRole(SQLRole): if TYPE_CHECKING: @util.memoized_property - def _propagate_attrs(self) -> _PropagateAttrsType: - ... + def _propagate_attrs(self) -> _PropagateAttrsType: ... else: _propagate_attrs = util.EMPTY_DICT diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index fcde05721cf..96e350447a8 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -159,15 +159,15 @@ class SchemaConst(Enum): """ -RETAIN_SCHEMA: Final[ - Literal[SchemaConst.RETAIN_SCHEMA] -] = SchemaConst.RETAIN_SCHEMA -BLANK_SCHEMA: Final[ - Literal[SchemaConst.BLANK_SCHEMA] -] = SchemaConst.BLANK_SCHEMA -NULL_UNSPECIFIED: Final[ - Literal[SchemaConst.NULL_UNSPECIFIED] -] = SchemaConst.NULL_UNSPECIFIED +RETAIN_SCHEMA: Final[Literal[SchemaConst.RETAIN_SCHEMA]] = ( + SchemaConst.RETAIN_SCHEMA +) +BLANK_SCHEMA: Final[Literal[SchemaConst.BLANK_SCHEMA]] = ( + SchemaConst.BLANK_SCHEMA +) +NULL_UNSPECIFIED: Final[Literal[SchemaConst.NULL_UNSPECIFIED]] = ( + SchemaConst.NULL_UNSPECIFIED +) def _get_table_key(name: str, schema: Optional[str]) -> str: @@ -344,12 +344,10 @@ class Table( if TYPE_CHECKING: @util.ro_non_memoized_property - def primary_key(self) -> PrimaryKeyConstraint: - ... + def primary_key(self) -> PrimaryKeyConstraint: ... @util.ro_non_memoized_property - def foreign_keys(self) -> Set[ForeignKey]: - ... + def foreign_keys(self) -> Set[ForeignKey]: ... _columns: DedupeColumnCollection[Column[Any]] @@ -401,18 +399,15 @@ def foreign_keys(self) -> Set[ForeignKey]: if TYPE_CHECKING: @util.ro_non_memoized_property - def columns(self) -> ReadOnlyColumnCollection[str, Column[Any]]: - ... + def columns(self) -> ReadOnlyColumnCollection[str, Column[Any]]: ... @util.ro_non_memoized_property def exported_columns( self, - ) -> ReadOnlyColumnCollection[str, Column[Any]]: - ... + ) -> ReadOnlyColumnCollection[str, Column[Any]]: ... @util.ro_non_memoized_property - def c(self) -> ReadOnlyColumnCollection[str, Column[Any]]: - ... + def c(self) -> ReadOnlyColumnCollection[str, Column[Any]]: ... def _gen_cache_key( self, anon_map: anon_map, bindparams: List[BindParameter[Any]] @@ -2463,9 +2458,9 @@ def _copy(self, **kw: Any) -> Column[Any]: dialect_option_key, dialect_option_value, ) in dialect_options.items(): - column_kwargs[ - dialect_name + "_" + dialect_option_key - ] = dialect_option_value + column_kwargs[dialect_name + "_" + dialect_option_key] = ( + dialect_option_value + ) server_default = self.server_default server_onupdate = self.server_onupdate @@ -2636,19 +2631,23 @@ def _make_proxy( ) try: c = self._constructor( - coercions.expect( - roles.TruncatedLabelRole, name if name else self.name - ) - if name_is_truncatable - else (name or self.name), + ( + coercions.expect( + roles.TruncatedLabelRole, name if name else self.name + ) + if name_is_truncatable + else (name or self.name) + ), self.type, # this may actually be ._proxy_key when the key is incoming key=key if key else name if name else self.key, primary_key=self.primary_key, nullable=self.nullable, - _proxies=list(compound_select_cols) - if compound_select_cols - else [self], + _proxies=( + list(compound_select_cols) + if compound_select_cols + else [self] + ), *fk, ) except TypeError as err: @@ -2713,9 +2712,9 @@ def insert_sentinel( return Column( name=name, type_=type_api.INTEGERTYPE if type_ is None else type_, - default=default - if default is not None - else _InsertSentinelColumnDefault(), + default=( + default if default is not None else _InsertSentinelColumnDefault() + ), _omit_from_statements=omit_from_statements, insert_sentinel=True, ) @@ -3160,14 +3159,14 @@ def column(self) -> Column[Any]: return self._resolve_column() @overload - def _resolve_column(self, *, raiseerr: Literal[True] = ...) -> Column[Any]: - ... + def _resolve_column( + self, *, raiseerr: Literal[True] = ... + ) -> Column[Any]: ... @overload def _resolve_column( self, *, raiseerr: bool = ... - ) -> Optional[Column[Any]]: - ... + ) -> Optional[Column[Any]]: ... def _resolve_column( self, *, raiseerr: bool = True @@ -3284,18 +3283,15 @@ def _set_table(self, column: Column[Any], table: Table) -> None: def default_is_sequence( obj: Optional[DefaultGenerator], - ) -> TypeGuard[Sequence]: - ... + ) -> TypeGuard[Sequence]: ... def default_is_clause_element( obj: Optional[DefaultGenerator], - ) -> TypeGuard[ColumnElementColumnDefault]: - ... + ) -> TypeGuard[ColumnElementColumnDefault]: ... def default_is_scalar( obj: Optional[DefaultGenerator], - ) -> TypeGuard[ScalarElementColumnDefault]: - ... + ) -> TypeGuard[ScalarElementColumnDefault]: ... else: default_is_sequence = operator.attrgetter("is_sequence") @@ -3395,21 +3391,18 @@ class ColumnDefault(DefaultGenerator, ABC): @overload def __new__( cls, arg: Callable[..., Any], for_update: bool = ... - ) -> CallableColumnDefault: - ... + ) -> CallableColumnDefault: ... @overload def __new__( cls, arg: ColumnElement[Any], for_update: bool = ... - ) -> ColumnElementColumnDefault: - ... + ) -> ColumnElementColumnDefault: ... # if I return ScalarElementColumnDefault here, which is what's actually # returned, mypy complains that # overloads overlap w/ incompatible return types. @overload - def __new__(cls, arg: object, for_update: bool = ...) -> ColumnDefault: - ... + def __new__(cls, arg: object, for_update: bool = ...) -> ColumnDefault: ... def __new__( cls, arg: Any = None, for_update: bool = False @@ -3551,8 +3544,7 @@ def _arg_is_typed(self) -> bool: class _CallableColumnDefaultProtocol(Protocol): - def __call__(self, context: ExecutionContext) -> Any: - ... + def __call__(self, context: ExecutionContext) -> Any: ... class CallableColumnDefault(ColumnDefault): @@ -4185,8 +4177,7 @@ class ColumnCollectionMixin: def _set_parent_with_dispatch( self, parent: SchemaEventTarget, **kw: Any - ) -> None: - ... + ) -> None: ... def __init__( self, @@ -4399,9 +4390,9 @@ def _copy( dialect_option_key, dialect_option_value, ) in dialect_options.items(): - constraint_kwargs[ - dialect_name + "_" + dialect_option_key - ] = dialect_option_value + constraint_kwargs[dialect_name + "_" + dialect_option_key] = ( + dialect_option_value + ) assert isinstance(self.parent, Table) c = self.__class__( @@ -4822,10 +4813,12 @@ def _copy( [ x._get_colspec( schema=schema, - table_name=target_table.name - if target_table is not None - and x._table_key() == x.parent.table.key - else None, + table_name=( + target_table.name + if target_table is not None + and x._table_key() == x.parent.table.key + else None + ), _is_copy=True, ) for x in self.elements @@ -5489,9 +5482,9 @@ def __init__( self.info = info self._schemas: Set[str] = set() self._sequences: Dict[str, Sequence] = {} - self._fk_memos: Dict[ - Tuple[str, Optional[str]], List[ForeignKey] - ] = collections.defaultdict(list) + self._fk_memos: Dict[Tuple[str, Optional[str]], List[ForeignKey]] = ( + collections.defaultdict(list) + ) tables: util.FacadeDict[str, Table] """A dictionary of :class:`_schema.Table` diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index ee33d46616a..65978f6646c 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -154,12 +154,10 @@ class _JoinTargetProtocol(Protocol): @util.ro_non_memoized_property - def _from_objects(self) -> List[FromClause]: - ... + def _from_objects(self) -> List[FromClause]: ... @util.ro_non_memoized_property - def entity_namespace(self) -> _EntityNamespace: - ... + def entity_namespace(self) -> _EntityNamespace: ... _JoinTargetElement = Union["FromClause", _JoinTargetProtocol] @@ -465,9 +463,9 @@ def suffix_with( class HasHints: - _hints: util.immutabledict[ - Tuple[FromClause, str], str - ] = util.immutabledict() + _hints: util.immutabledict[Tuple[FromClause, str], str] = ( + util.immutabledict() + ) _statement_hints: Tuple[Tuple[str, str], ...] = () _has_hints_traverse_internals: _TraverseInternalsType = [ @@ -988,8 +986,7 @@ def _anonymous_fromclause( def self_group( self, against: Optional[OperatorType] = None - ) -> Union[FromGrouping, Self]: - ... + ) -> Union[FromGrouping, Self]: ... class NamedFromClause(FromClause): @@ -2256,9 +2253,9 @@ def _generate_columns_plus_names( repeated = False if not c._render_label_in_columns_clause: - effective_name = ( - required_label_name - ) = fallback_label_name = None + effective_name = required_label_name = fallback_label_name = ( + None + ) elif label_style_none: if TYPE_CHECKING: assert is_column_element(c) @@ -2270,9 +2267,9 @@ def _generate_columns_plus_names( assert is_column_element(c) if table_qualified: - required_label_name = ( - effective_name - ) = fallback_label_name = c._tq_label + required_label_name = effective_name = ( + fallback_label_name + ) = c._tq_label else: effective_name = fallback_label_name = c._non_anon_label required_label_name = None @@ -2303,9 +2300,9 @@ def _generate_columns_plus_names( else: fallback_label_name = c._anon_name_label else: - required_label_name = ( - effective_name - ) = fallback_label_name = expr_label + required_label_name = effective_name = ( + fallback_label_name + ) = expr_label if effective_name is not None: if TYPE_CHECKING: @@ -2319,13 +2316,13 @@ def _generate_columns_plus_names( # different column under the same name. apply # disambiguating label if table_qualified: - required_label_name = ( - fallback_label_name - ) = c._anon_tq_label + required_label_name = fallback_label_name = ( + c._anon_tq_label + ) else: - required_label_name = ( - fallback_label_name - ) = c._anon_name_label + required_label_name = fallback_label_name = ( + c._anon_name_label + ) if anon_for_dupe_key and required_label_name in names: # here, c._anon_tq_label is definitely unique to @@ -2340,14 +2337,14 @@ def _generate_columns_plus_names( # subsequent occurrences of the column so that the # original stays non-ambiguous if table_qualified: - required_label_name = ( - fallback_label_name - ) = c._dedupe_anon_tq_label_idx(dedupe_hash) + required_label_name = fallback_label_name = ( + c._dedupe_anon_tq_label_idx(dedupe_hash) + ) dedupe_hash += 1 else: - required_label_name = ( - fallback_label_name - ) = c._dedupe_anon_label_idx(dedupe_hash) + required_label_name = fallback_label_name = ( + c._dedupe_anon_label_idx(dedupe_hash) + ) dedupe_hash += 1 repeated = True else: @@ -2356,14 +2353,14 @@ def _generate_columns_plus_names( # same column under the same name. apply the "dedupe" # label so that the original stays non-ambiguous if table_qualified: - required_label_name = ( - fallback_label_name - ) = c._dedupe_anon_tq_label_idx(dedupe_hash) + required_label_name = fallback_label_name = ( + c._dedupe_anon_tq_label_idx(dedupe_hash) + ) dedupe_hash += 1 else: - required_label_name = ( - fallback_label_name - ) = c._dedupe_anon_label_idx(dedupe_hash) + required_label_name = fallback_label_name = ( + c._dedupe_anon_label_idx(dedupe_hash) + ) dedupe_hash += 1 repeated = True else: @@ -2980,12 +2977,12 @@ def __init__(self, name: str, *columns: ColumnClause[Any], **kw: Any): if TYPE_CHECKING: @util.ro_non_memoized_property - def columns(self) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]: - ... + def columns( + self, + ) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]: ... @util.ro_non_memoized_property - def c(self) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]: - ... + def c(self) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]: ... def __str__(self) -> str: if self.schema is not None: @@ -3692,8 +3689,7 @@ def self_group(self, against: Optional[OperatorType] = None) -> Self: if TYPE_CHECKING: - def _ungroup(self) -> _SB: - ... + def _ungroup(self) -> _SB: ... # def _generate_columns_plus_names( # self, anon_for_dupe_key: bool @@ -3913,14 +3909,12 @@ def _offset_or_limit_clause( @overload def _offset_or_limit_clause_asint( self, clause: ColumnElement[Any], attrname: str - ) -> NoReturn: - ... + ) -> NoReturn: ... @overload def _offset_or_limit_clause_asint( self, clause: Optional[_OffsetLimitParam], attrname: str - ) -> Optional[int]: - ... + ) -> Optional[int]: ... def _offset_or_limit_clause_asint( self, clause: Optional[ColumnElement[Any]], attrname: str @@ -4485,8 +4479,9 @@ class default_select_compile_options(CacheableOptions): if TYPE_CHECKING: @classmethod - def get_plugin_class(cls, statement: Executable) -> Type[SelectState]: - ... + def get_plugin_class( + cls, statement: Executable + ) -> Type[SelectState]: ... def __init__( self, @@ -5177,21 +5172,17 @@ def _filter_by_zero( @overload def scalar_subquery( self: Select[Tuple[_MAYBE_ENTITY]], - ) -> ScalarSelect[Any]: - ... + ) -> ScalarSelect[Any]: ... @overload def scalar_subquery( self: Select[Tuple[_NOT_ENTITY]], - ) -> ScalarSelect[_NOT_ENTITY]: - ... + ) -> ScalarSelect[_NOT_ENTITY]: ... @overload - def scalar_subquery(self) -> ScalarSelect[Any]: - ... + def scalar_subquery(self) -> ScalarSelect[Any]: ... - def scalar_subquery(self) -> ScalarSelect[Any]: - ... + def scalar_subquery(self) -> ScalarSelect[Any]: ... def filter_by(self, **kwargs: Any) -> Self: r"""apply the given filtering criterion as a WHERE clause @@ -5770,20 +5761,17 @@ def reduce_columns(self, only_synonyms: bool = True) -> Select[Any]: # statically generated** by tools/generate_sel_v1_overloads.py @overload - def with_only_columns(self, __ent0: _TCCA[_T0]) -> Select[Tuple[_T0]]: - ... + def with_only_columns(self, __ent0: _TCCA[_T0]) -> Select[Tuple[_T0]]: ... @overload def with_only_columns( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] - ) -> Select[Tuple[_T0, _T1]]: - ... + ) -> Select[Tuple[_T0, _T1]]: ... @overload def with_only_columns( self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] - ) -> Select[Tuple[_T0, _T1, _T2]]: - ... + ) -> Select[Tuple[_T0, _T1, _T2]]: ... @overload def with_only_columns( @@ -5792,8 +5780,7 @@ def with_only_columns( __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], - ) -> Select[Tuple[_T0, _T1, _T2, _T3]]: - ... + ) -> Select[Tuple[_T0, _T1, _T2, _T3]]: ... @overload def with_only_columns( @@ -5803,8 +5790,7 @@ def with_only_columns( __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], - ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: - ... + ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload def with_only_columns( @@ -5815,8 +5801,7 @@ def with_only_columns( __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], - ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: - ... + ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload def with_only_columns( @@ -5828,8 +5813,7 @@ def with_only_columns( __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], - ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: - ... + ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def with_only_columns( @@ -5842,8 +5826,7 @@ def with_only_columns( __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], - ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: - ... + ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... # END OVERLOADED FUNCTIONS self.with_only_columns @@ -5853,8 +5836,7 @@ def with_only_columns( *entities: _ColumnsClauseArgument[Any], maintain_column_froms: bool = False, **__kw: Any, - ) -> Select[Any]: - ... + ) -> Select[Any]: ... @_generative def with_only_columns( @@ -6521,14 +6503,12 @@ def where(self, crit: _ColumnExpressionArgument[bool]) -> Self: @overload def self_group( self: ScalarSelect[Any], against: Optional[OperatorType] = None - ) -> ScalarSelect[Any]: - ... + ) -> ScalarSelect[Any]: ... @overload def self_group( self: ColumnElement[Any], against: Optional[OperatorType] = None - ) -> ColumnElement[Any]: - ... + ) -> ColumnElement[Any]: ... def self_group( self, against: Optional[OperatorType] = None @@ -6537,8 +6517,7 @@ def self_group( if TYPE_CHECKING: - def _ungroup(self) -> Select[Any]: - ... + def _ungroup(self) -> Select[Any]: ... @_generative def correlate( diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 0963e8ed200..2ba63cdbbf1 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -80,7 +80,6 @@ class HasExpressionLookup(TypeEngineMixin): - """Mixin expression adaptations based on lookup tables. These rules are currently used by the numeric, integer and date types @@ -119,7 +118,6 @@ def _adapt_expression( class Concatenable(TypeEngineMixin): - """A mixin that marks a type as supporting 'concatenation', typically strings.""" @@ -168,7 +166,6 @@ def __getitem__(self, index): class String(Concatenable, TypeEngine[str]): - """The base for all string and character types. In SQL, corresponds to VARCHAR. @@ -255,7 +252,6 @@ def get_dbapi_type(self, dbapi): class Text(String): - """A variably sized string type. In SQL, usually corresponds to CLOB or TEXT. In general, TEXT objects @@ -268,7 +264,6 @@ class Text(String): class Unicode(String): - """A variable length Unicode string type. The :class:`.Unicode` type is a :class:`.String` subclass that assumes @@ -322,7 +317,6 @@ def __init__(self, length=None, **kwargs): class UnicodeText(Text): - """An unbounded-length Unicode string type. See :class:`.Unicode` for details on the unicode @@ -347,7 +341,6 @@ def __init__(self, length=None, **kwargs): class Integer(HasExpressionLookup, TypeEngine[int]): - """A type for ``int`` integers.""" __visit_name__ = "integer" @@ -355,8 +348,7 @@ class Integer(HasExpressionLookup, TypeEngine[int]): if TYPE_CHECKING: @util.ro_memoized_property - def _type_affinity(self) -> Type[Integer]: - ... + def _type_affinity(self) -> Type[Integer]: ... def get_dbapi_type(self, dbapi): return dbapi.NUMBER @@ -397,7 +389,6 @@ def _expression_adaptations(self): class SmallInteger(Integer): - """A type for smaller ``int`` integers. Typically generates a ``SMALLINT`` in DDL, and otherwise acts like @@ -409,7 +400,6 @@ class SmallInteger(Integer): class BigInteger(Integer): - """A type for bigger ``int`` integers. Typically generates a ``BIGINT`` in DDL, and otherwise acts like @@ -424,7 +414,6 @@ class BigInteger(Integer): class Numeric(HasExpressionLookup, TypeEngine[_N]): - """Base for non-integer numeric types, such as ``NUMERIC``, ``FLOAT``, ``DECIMAL``, and other variants. @@ -461,8 +450,7 @@ class Numeric(HasExpressionLookup, TypeEngine[_N]): if TYPE_CHECKING: @util.ro_memoized_property - def _type_affinity(self) -> Type[Numeric[_N]]: - ... + def _type_affinity(self) -> Type[Numeric[_N]]: ... _default_decimal_return_scale = 10 @@ -473,8 +461,7 @@ def __init__( scale: Optional[int] = ..., decimal_return_scale: Optional[int] = ..., asdecimal: Literal[True] = ..., - ): - ... + ): ... @overload def __init__( @@ -483,8 +470,7 @@ def __init__( scale: Optional[int] = ..., decimal_return_scale: Optional[int] = ..., asdecimal: Literal[False] = ..., - ): - ... + ): ... def __init__( self, @@ -580,9 +566,11 @@ def result_processor(self, dialect, coltype): # we're a "numeric", DBAPI returns floats, convert. return processors.to_decimal_processor_factory( decimal.Decimal, - self.scale - if self.scale is not None - else self._default_decimal_return_scale, + ( + self.scale + if self.scale is not None + else self._default_decimal_return_scale + ), ) else: if dialect.supports_native_decimal: @@ -635,8 +623,7 @@ def __init__( precision: Optional[int] = ..., asdecimal: Literal[False] = ..., decimal_return_scale: Optional[int] = ..., - ): - ... + ): ... @overload def __init__( @@ -644,8 +631,7 @@ def __init__( precision: Optional[int] = ..., asdecimal: Literal[True] = ..., decimal_return_scale: Optional[int] = ..., - ): - ... + ): ... def __init__( self: Float[_N], @@ -753,7 +739,6 @@ def process(value): class DateTime( _RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.datetime] ): - """A type for ``datetime.datetime()`` objects. Date and time types return objects from the Python ``datetime`` @@ -817,7 +802,6 @@ def _expression_adaptations(self): class Date(_RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.date]): - """A type for ``datetime.date()`` objects.""" __visit_name__ = "date" @@ -858,7 +842,6 @@ def _expression_adaptations(self): class Time(_RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.time]): - """A type for ``datetime.time()`` objects.""" __visit_name__ = "time" @@ -895,7 +878,6 @@ def literal_processor(self, dialect): class _Binary(TypeEngine[bytes]): - """Define base behavior for binary types.""" def __init__(self, length: Optional[int] = None): @@ -959,7 +941,6 @@ def get_dbapi_type(self, dbapi): class LargeBinary(_Binary): - """A type for large binary byte data. The :class:`.LargeBinary` type corresponds to a large and/or unlengthed @@ -983,7 +964,6 @@ def __init__(self, length: Optional[int] = None): class SchemaType(SchemaEventTarget, TypeEngineMixin): - """Add capabilities to a type which allow for schema-level DDL to be associated with a type. @@ -1121,12 +1101,12 @@ def copy(self, **kw): ) @overload - def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: - ... + def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ... @overload - def adapt(self, cls: Type[TypeEngineMixin], **kw: Any) -> TypeEngine[Any]: - ... + def adapt( + self, cls: Type[TypeEngineMixin], **kw: Any + ) -> TypeEngine[Any]: ... def adapt( self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any @@ -1886,7 +1866,6 @@ def compare_values(self, x, y): class Boolean(SchemaType, Emulated, TypeEngine[bool]): - """A bool datatype. :class:`.Boolean` typically uses BOOLEAN or SMALLINT on the DDL side, @@ -2044,7 +2023,6 @@ def _type_affinity(self) -> Type[Interval]: class Interval(Emulated, _AbstractInterval, TypeDecorator[dt.timedelta]): - """A type for ``datetime.timedelta()`` objects. The Interval type deals with ``datetime.timedelta`` objects. In @@ -2545,9 +2523,11 @@ def _setup_getitem(self, index): index, expr=self.expr, operator=operators.json_getitem_op, - bindparam_type=JSON.JSONIntIndexType - if isinstance(index, int) - else JSON.JSONStrIndexType, + bindparam_type=( + JSON.JSONIntIndexType + if isinstance(index, int) + else JSON.JSONStrIndexType + ), ) operator = operators.json_getitem_op @@ -2869,7 +2849,6 @@ class Comparator( Indexable.Comparator[Sequence[Any]], Concatenable.Comparator[Sequence[Any]], ): - """Define comparison operations for :class:`_types.ARRAY`. More operators are available on the dialect-specific form @@ -3144,14 +3123,16 @@ def _apply_item_processor(self, arr, itemproc, dim, collection_callable): return collection_callable(arr) else: return collection_callable( - self._apply_item_processor( - x, - itemproc, - dim - 1 if dim is not None else None, - collection_callable, + ( + self._apply_item_processor( + x, + itemproc, + dim - 1 if dim is not None else None, + collection_callable, + ) + if x is not None + else None ) - if x is not None - else None for x in arr ) @@ -3202,7 +3183,6 @@ def result_processor(self, dialect, coltype): class REAL(Float[_N]): - """The SQL REAL type. .. seealso:: @@ -3215,7 +3195,6 @@ class REAL(Float[_N]): class FLOAT(Float[_N]): - """The SQL FLOAT type. .. seealso:: @@ -3256,7 +3235,6 @@ class DOUBLE_PRECISION(Double[_N]): class NUMERIC(Numeric[_N]): - """The SQL NUMERIC type. .. seealso:: @@ -3269,7 +3247,6 @@ class NUMERIC(Numeric[_N]): class DECIMAL(Numeric[_N]): - """The SQL DECIMAL type. .. seealso:: @@ -3282,7 +3259,6 @@ class DECIMAL(Numeric[_N]): class INTEGER(Integer): - """The SQL INT or INTEGER type. .. seealso:: @@ -3298,7 +3274,6 @@ class INTEGER(Integer): class SMALLINT(SmallInteger): - """The SQL SMALLINT type. .. seealso:: @@ -3311,7 +3286,6 @@ class SMALLINT(SmallInteger): class BIGINT(BigInteger): - """The SQL BIGINT type. .. seealso:: @@ -3324,7 +3298,6 @@ class BIGINT(BigInteger): class TIMESTAMP(DateTime): - """The SQL TIMESTAMP type. :class:`_types.TIMESTAMP` datatypes have support for timezone @@ -3354,35 +3327,30 @@ def get_dbapi_type(self, dbapi): class DATETIME(DateTime): - """The SQL DATETIME type.""" __visit_name__ = "DATETIME" class DATE(Date): - """The SQL DATE type.""" __visit_name__ = "DATE" class TIME(Time): - """The SQL TIME type.""" __visit_name__ = "TIME" class TEXT(Text): - """The SQL TEXT type.""" __visit_name__ = "TEXT" class CLOB(Text): - """The CLOB type. This type is found in Oracle and Informix. @@ -3392,63 +3360,54 @@ class CLOB(Text): class VARCHAR(String): - """The SQL VARCHAR type.""" __visit_name__ = "VARCHAR" class NVARCHAR(Unicode): - """The SQL NVARCHAR type.""" __visit_name__ = "NVARCHAR" class CHAR(String): - """The SQL CHAR type.""" __visit_name__ = "CHAR" class NCHAR(Unicode): - """The SQL NCHAR type.""" __visit_name__ = "NCHAR" class BLOB(LargeBinary): - """The SQL BLOB type.""" __visit_name__ = "BLOB" class BINARY(_Binary): - """The SQL BINARY type.""" __visit_name__ = "BINARY" class VARBINARY(_Binary): - """The SQL VARBINARY type.""" __visit_name__ = "VARBINARY" class BOOLEAN(Boolean): - """The SQL BOOLEAN type.""" __visit_name__ = "BOOLEAN" class NullType(TypeEngine[None]): - """An unknown type. :class:`.NullType` is used as a default type for those cases where @@ -3533,7 +3492,6 @@ class MatchType(Boolean): class Uuid(Emulated, TypeEngine[_UUID_RETURN]): - """Represent a database agnostic UUID datatype. For backends that have no "native" UUID datatype, the value will @@ -3593,16 +3551,14 @@ def __init__( self: Uuid[_python_UUID], as_uuid: Literal[True] = ..., native_uuid: bool = ..., - ): - ... + ): ... @overload def __init__( self: Uuid[str], as_uuid: Literal[False] = ..., native_uuid: bool = ..., - ): - ... + ): ... def __init__(self, as_uuid: bool = True, native_uuid: bool = True): """Construct a :class:`_sqltypes.Uuid` type. @@ -3725,7 +3681,6 @@ def process(value): class UUID(Uuid[_UUID_RETURN], type_api.NativeForEmulated): - """Represent the SQL UUID type. This is the SQL-native form of the :class:`_types.Uuid` database agnostic @@ -3749,12 +3704,10 @@ class UUID(Uuid[_UUID_RETURN], type_api.NativeForEmulated): __visit_name__ = "UUID" @overload - def __init__(self: UUID[_python_UUID], as_uuid: Literal[True] = ...): - ... + def __init__(self: UUID[_python_UUID], as_uuid: Literal[True] = ...): ... @overload - def __init__(self: UUID[str], as_uuid: Literal[False] = ...): - ... + def __init__(self: UUID[str], as_uuid: Literal[False] = ...): ... def __init__(self, as_uuid: bool = True): """Construct a :class:`_sqltypes.UUID` type. diff --git a/lib/sqlalchemy/sql/traversals.py b/lib/sqlalchemy/sql/traversals.py index 6c44d52175e..3ca3caf9e2c 100644 --- a/lib/sqlalchemy/sql/traversals.py +++ b/lib/sqlalchemy/sql/traversals.py @@ -80,16 +80,13 @@ class HasShallowCopy(HasTraverseInternals): if typing.TYPE_CHECKING: - def _generated_shallow_copy_traversal(self, other: Self) -> None: - ... + def _generated_shallow_copy_traversal(self, other: Self) -> None: ... def _generated_shallow_from_dict_traversal( self, d: Dict[str, Any] - ) -> None: - ... + ) -> None: ... - def _generated_shallow_to_dict_traversal(self) -> Dict[str, Any]: - ... + def _generated_shallow_to_dict_traversal(self) -> Dict[str, Any]: ... @classmethod def _generate_shallow_copy( @@ -312,9 +309,11 @@ def visit_dml_ordered_values( # sequence of 2-tuples return [ ( - clone(key, **kw) - if hasattr(key, "__clause_element__") - else key, + ( + clone(key, **kw) + if hasattr(key, "__clause_element__") + else key + ), clone(value, **kw), ) for key, value in element @@ -336,9 +335,11 @@ def visit_dml_multi_values( def copy(elem): if isinstance(elem, (list, tuple)): return [ - clone(value, **kw) - if hasattr(value, "__clause_element__") - else value + ( + clone(value, **kw) + if hasattr(value, "__clause_element__") + else value + ) for value in elem ] elif isinstance(elem, dict): diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 4143a15a886..414b91ab4da 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -83,23 +83,19 @@ class _NoValueInList(Enum): class _LiteralProcessorType(Protocol[_T_co]): - def __call__(self, value: Any) -> str: - ... + def __call__(self, value: Any) -> str: ... class _BindProcessorType(Protocol[_T_con]): - def __call__(self, value: Optional[_T_con]) -> Any: - ... + def __call__(self, value: Optional[_T_con]) -> Any: ... class _ResultProcessorType(Protocol[_T_co]): - def __call__(self, value: Any) -> Optional[_T_co]: - ... + def __call__(self, value: Any) -> Optional[_T_co]: ... class _SentinelProcessorType(Protocol[_T_co]): - def __call__(self, value: Any) -> Optional[_T_co]: - ... + def __call__(self, value: Any) -> Optional[_T_co]: ... class _BaseTypeMemoDict(TypedDict): @@ -115,8 +111,9 @@ class _TypeMemoDict(_BaseTypeMemoDict, total=False): class _ComparatorFactory(Protocol[_T]): - def __call__(self, expr: ColumnElement[_T]) -> TypeEngine.Comparator[_T]: - ... + def __call__( + self, expr: ColumnElement[_T] + ) -> TypeEngine.Comparator[_T]: ... class TypeEngine(Visitable, Generic[_T]): @@ -300,9 +297,9 @@ def _adapt_expression( """ - _variant_mapping: util.immutabledict[ - str, TypeEngine[Any] - ] = util.EMPTY_DICT + _variant_mapping: util.immutabledict[str, TypeEngine[Any]] = ( + util.EMPTY_DICT + ) def evaluates_none(self) -> Self: """Return a copy of this type which has the @@ -1002,9 +999,11 @@ def _static_cache_key( return (self.__class__,) + tuple( ( k, - self.__dict__[k]._static_cache_key - if isinstance(self.__dict__[k], TypeEngine) - else self.__dict__[k], + ( + self.__dict__[k]._static_cache_key + if isinstance(self.__dict__[k], TypeEngine) + else self.__dict__[k] + ), ) for k in names if k in self.__dict__ @@ -1013,12 +1012,12 @@ def _static_cache_key( ) @overload - def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: - ... + def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ... @overload - def adapt(self, cls: Type[TypeEngineMixin], **kw: Any) -> TypeEngine[Any]: - ... + def adapt( + self, cls: Type[TypeEngineMixin], **kw: Any + ) -> TypeEngine[Any]: ... def adapt( self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any @@ -1111,26 +1110,21 @@ class TypeEngineMixin: @util.memoized_property def _static_cache_key( self, - ) -> Union[CacheConst, Tuple[Any, ...]]: - ... + ) -> Union[CacheConst, Tuple[Any, ...]]: ... @overload - def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: - ... + def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ... @overload def adapt( self, cls: Type[TypeEngineMixin], **kw: Any - ) -> TypeEngine[Any]: - ... + ) -> TypeEngine[Any]: ... def adapt( self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any - ) -> TypeEngine[Any]: - ... + ) -> TypeEngine[Any]: ... - def dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: - ... + def dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: ... class ExternalType(TypeEngineMixin): @@ -1432,12 +1426,12 @@ def adapt_to_emulated( return super().adapt(impltype, **kw) @overload - def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: - ... + def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ... @overload - def adapt(self, cls: Type[TypeEngineMixin], **kw: Any) -> TypeEngine[Any]: - ... + def adapt( + self, cls: Type[TypeEngineMixin], **kw: Any + ) -> TypeEngine[Any]: ... def adapt( self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any @@ -2283,13 +2277,13 @@ def __init__(self, *arg: Any, **kw: Any): @overload -def to_instance(typeobj: Union[Type[_TE], _TE], *arg: Any, **kw: Any) -> _TE: - ... +def to_instance( + typeobj: Union[Type[_TE], _TE], *arg: Any, **kw: Any +) -> _TE: ... @overload -def to_instance(typeobj: None, *arg: Any, **kw: Any) -> TypeEngine[None]: - ... +def to_instance(typeobj: None, *arg: Any, **kw: Any) -> TypeEngine[None]: ... def to_instance( diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 617bf56a6a4..4a35bb217ea 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -350,9 +350,9 @@ def find_tables( ] = _visitors["lateral"] = tables.append if include_crud: - _visitors["insert"] = _visitors["update"] = _visitors[ - "delete" - ] = lambda ent: tables.append(ent.table) + _visitors["insert"] = _visitors["update"] = _visitors["delete"] = ( + lambda ent: tables.append(ent.table) + ) if check_columns: @@ -878,8 +878,7 @@ def reduce_columns( columns: Iterable[ColumnElement[Any]], *clauses: Optional[ClauseElement], **kw: bool, -) -> Sequence[ColumnElement[Any]]: - ... +) -> Sequence[ColumnElement[Any]]: ... @overload @@ -887,8 +886,7 @@ def reduce_columns( columns: _SelectIterable, *clauses: Optional[ClauseElement], **kw: bool, -) -> Sequence[Union[ColumnElement[Any], TextClause]]: - ... +) -> Sequence[Union[ColumnElement[Any], TextClause]]: ... def reduce_columns( @@ -1099,8 +1097,7 @@ def __init__( if TYPE_CHECKING: @overload - def traverse(self, obj: Literal[None]) -> None: - ... + def traverse(self, obj: Literal[None]) -> None: ... # note this specializes the ReplacingExternalTraversal.traverse() # method to state @@ -1111,13 +1108,11 @@ def traverse(self, obj: Literal[None]) -> None: # FromClause but Mypy is not accepting those as compatible with # the base ReplacingExternalTraversal @overload - def traverse(self, obj: _ET) -> _ET: - ... + def traverse(self, obj: _ET) -> _ET: ... def traverse( self, obj: Optional[ExternallyTraversible] - ) -> Optional[ExternallyTraversible]: - ... + ) -> Optional[ExternallyTraversible]: ... def _corresponding_column( self, col, require_embedded, _seen=util.EMPTY_SET @@ -1219,23 +1214,18 @@ def replace( class _ColumnLookup(Protocol): @overload - def __getitem__(self, key: None) -> None: - ... + def __getitem__(self, key: None) -> None: ... @overload - def __getitem__(self, key: ColumnClause[Any]) -> ColumnClause[Any]: - ... + def __getitem__(self, key: ColumnClause[Any]) -> ColumnClause[Any]: ... @overload - def __getitem__(self, key: ColumnElement[Any]) -> ColumnElement[Any]: - ... + def __getitem__(self, key: ColumnElement[Any]) -> ColumnElement[Any]: ... @overload - def __getitem__(self, key: _ET) -> _ET: - ... + def __getitem__(self, key: _ET) -> _ET: ... - def __getitem__(self, key: Any) -> Any: - ... + def __getitem__(self, key: Any) -> Any: ... class ColumnAdapter(ClauseAdapter): @@ -1333,12 +1323,10 @@ def wrap(self, adapter): return ac @overload - def traverse(self, obj: Literal[None]) -> None: - ... + def traverse(self, obj: Literal[None]) -> None: ... @overload - def traverse(self, obj: _ET) -> _ET: - ... + def traverse(self, obj: _ET) -> _ET: ... def traverse( self, obj: Optional[ExternallyTraversible] @@ -1353,8 +1341,7 @@ def chain(self, visitor: ExternalTraversal) -> ColumnAdapter: if TYPE_CHECKING: @property - def visitor_iterator(self) -> Iterator[ColumnAdapter]: - ... + def visitor_iterator(self) -> Iterator[ColumnAdapter]: ... adapt_clause = traverse adapt_list = ClauseAdapter.copy_and_process diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py index 2f06ae71a06..d1cd7a939f6 100644 --- a/lib/sqlalchemy/sql/visitors.py +++ b/lib/sqlalchemy/sql/visitors.py @@ -72,8 +72,7 @@ class _CompilerDispatchType(Protocol): - def __call__(_self, self: Visitable, visitor: Any, **kw: Any) -> Any: - ... + def __call__(_self, self: Visitable, visitor: Any, **kw: Any) -> Any: ... class Visitable: @@ -100,8 +99,7 @@ class Visitable: if typing.TYPE_CHECKING: - def _compiler_dispatch(self, visitor: Any, **kw: Any) -> str: - ... + def _compiler_dispatch(self, visitor: Any, **kw: Any) -> str: ... def __init_subclass__(cls) -> None: if "__visit_name__" in cls.__dict__: @@ -493,8 +491,7 @@ def get_children( class _InternalTraversalDispatchType(Protocol): - def __call__(s, self: object, visitor: HasTraversalDispatch) -> Any: - ... + def __call__(s, self: object, visitor: HasTraversalDispatch) -> Any: ... class HasTraversalDispatch: @@ -602,13 +599,11 @@ class ExternallyTraversible(HasTraverseInternals, Visitable): if typing.TYPE_CHECKING: - def _annotate(self, values: _AnnotationDict) -> Self: - ... + def _annotate(self, values: _AnnotationDict) -> Self: ... def get_children( self, *, omit_attrs: Tuple[str, ...] = (), **kw: Any - ) -> Iterable[ExternallyTraversible]: - ... + ) -> Iterable[ExternallyTraversible]: ... def _clone(self, **kw: Any) -> Self: """clone this element""" @@ -638,13 +633,11 @@ def _copy_internals( class _CloneCallableType(Protocol): - def __call__(self, element: _ET, **kw: Any) -> _ET: - ... + def __call__(self, element: _ET, **kw: Any) -> _ET: ... class _TraverseTransformCallableType(Protocol[_ET]): - def __call__(self, element: _ET, **kw: Any) -> Optional[_ET]: - ... + def __call__(self, element: _ET, **kw: Any) -> Optional[_ET]: ... _ExtT = TypeVar("_ExtT", bound="ExternalTraversal") @@ -680,12 +673,12 @@ def iterate( return iterate(obj, self.__traverse_options__) @overload - def traverse(self, obj: Literal[None]) -> None: - ... + def traverse(self, obj: Literal[None]) -> None: ... @overload - def traverse(self, obj: ExternallyTraversible) -> ExternallyTraversible: - ... + def traverse( + self, obj: ExternallyTraversible + ) -> ExternallyTraversible: ... def traverse( self, obj: Optional[ExternallyTraversible] @@ -746,12 +739,12 @@ def copy_and_process( return [self.traverse(x) for x in list_] @overload - def traverse(self, obj: Literal[None]) -> None: - ... + def traverse(self, obj: Literal[None]) -> None: ... @overload - def traverse(self, obj: ExternallyTraversible) -> ExternallyTraversible: - ... + def traverse( + self, obj: ExternallyTraversible + ) -> ExternallyTraversible: ... def traverse( self, obj: Optional[ExternallyTraversible] @@ -786,12 +779,12 @@ def replace( return None @overload - def traverse(self, obj: Literal[None]) -> None: - ... + def traverse(self, obj: Literal[None]) -> None: ... @overload - def traverse(self, obj: ExternallyTraversible) -> ExternallyTraversible: - ... + def traverse( + self, obj: ExternallyTraversible + ) -> ExternallyTraversible: ... def traverse( self, obj: Optional[ExternallyTraversible] @@ -866,8 +859,7 @@ def traverse_using( iterator: Iterable[ExternallyTraversible], obj: Literal[None], visitors: Mapping[str, _TraverseCallableType[Any]], -) -> None: - ... +) -> None: ... @overload @@ -875,8 +867,7 @@ def traverse_using( iterator: Iterable[ExternallyTraversible], obj: ExternallyTraversible, visitors: Mapping[str, _TraverseCallableType[Any]], -) -> ExternallyTraversible: - ... +) -> ExternallyTraversible: ... def traverse_using( @@ -920,8 +911,7 @@ def traverse( obj: Literal[None], opts: Mapping[str, Any], visitors: Mapping[str, _TraverseCallableType[Any]], -) -> None: - ... +) -> None: ... @overload @@ -929,8 +919,7 @@ def traverse( obj: ExternallyTraversible, opts: Mapping[str, Any], visitors: Mapping[str, _TraverseCallableType[Any]], -) -> ExternallyTraversible: - ... +) -> ExternallyTraversible: ... def traverse( @@ -975,8 +964,7 @@ def cloned_traverse( obj: Literal[None], opts: Mapping[str, Any], visitors: Mapping[str, _TraverseCallableType[Any]], -) -> None: - ... +) -> None: ... # a bit of controversy here, as the clone of the lead element @@ -988,8 +976,7 @@ def cloned_traverse( obj: _ET, opts: Mapping[str, Any], visitors: Mapping[str, _TraverseCallableType[Any]], -) -> _ET: - ... +) -> _ET: ... def cloned_traverse( @@ -1088,8 +1075,7 @@ def replacement_traverse( obj: Literal[None], opts: Mapping[str, Any], replace: _TraverseTransformCallableType[Any], -) -> None: - ... +) -> None: ... @overload @@ -1097,8 +1083,7 @@ def replacement_traverse( obj: _CE, opts: Mapping[str, Any], replace: _TraverseTransformCallableType[Any], -) -> _CE: - ... +) -> _CE: ... @overload @@ -1106,8 +1091,7 @@ def replacement_traverse( obj: ExternallyTraversible, opts: Mapping[str, Any], replace: _TraverseTransformCallableType[Any], -) -> ExternallyTraversible: - ... +) -> ExternallyTraversible: ... def replacement_traverse( diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py index e061f269a85..ae4d335a960 100644 --- a/lib/sqlalchemy/testing/assertsql.py +++ b/lib/sqlalchemy/testing/assertsql.py @@ -88,9 +88,9 @@ def _compile_dialect(self, execute_observed): dialect.supports_default_metavalue = True if self.enable_returning: - dialect.insert_returning = ( - dialect.update_returning - ) = dialect.delete_returning = True + dialect.insert_returning = dialect.update_returning = ( + dialect.delete_returning + ) = True dialect.use_insertmanyvalues = True dialect.supports_multivalues_insert = True dialect.update_returning_multifrom = True diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py index ed24851df04..e2623ead58d 100644 --- a/lib/sqlalchemy/testing/config.py +++ b/lib/sqlalchemy/testing/config.py @@ -177,8 +177,7 @@ def __init__(self, case, argname, case_names): if typing.TYPE_CHECKING: - def __getattr__(self, key: str) -> bool: - ... + def __getattr__(self, key: str) -> bool: ... @property def name(self): @@ -269,9 +268,11 @@ def go(self, request): else: argname = argname_or_fn cases_plus_limitations = [ - entry - if (isinstance(entry, tuple) and len(entry) == 2) - else (entry, None) + ( + entry + if (isinstance(entry, tuple) and len(entry) == 2) + else (entry, None) + ) for entry in cases ] @@ -280,9 +281,11 @@ def go(self, request): ) return combinations( *[ - (variation._name, variation, limitation) - if limitation is not None - else (variation._name, variation) + ( + (variation._name, variation, limitation) + if limitation is not None + else (variation._name, variation) + ) for variation, (case, limitation) in zip( variations, cases_plus_limitations ) diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index 65b182b9e07..e055d99d26f 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -289,8 +289,7 @@ def testing_engine( options: Optional[Dict[str, Any]] = None, asyncio: Literal[False] = False, transfer_staticpool: bool = False, -) -> Engine: - ... +) -> Engine: ... @typing.overload @@ -299,8 +298,7 @@ def testing_engine( options: Optional[Dict[str, Any]] = None, asyncio: Literal[True] = True, transfer_staticpool: bool = False, -) -> AsyncEngine: - ... +) -> AsyncEngine: ... def testing_engine( diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index 7dca583f8ec..addc4b75940 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -205,12 +205,12 @@ def _format_description(self, config, negate=False): if negate: bool_ = not negate return self.description % { - "driver": config.db.url.get_driver_name() - if config - else "", - "database": config.db.url.get_backend_name() - if config - else "", + "driver": ( + config.db.url.get_driver_name() if config else "" + ), + "database": ( + config.db.url.get_backend_name() if config else "" + ), "doesnt_support": "doesn't support" if bool_ else "does support", "does_support": "does support" if bool_ else "doesn't support", } diff --git a/lib/sqlalchemy/testing/fixtures/mypy.py b/lib/sqlalchemy/testing/fixtures/mypy.py index 730c7bdc234..149df9f7d49 100644 --- a/lib/sqlalchemy/testing/fixtures/mypy.py +++ b/lib/sqlalchemy/testing/fixtures/mypy.py @@ -86,9 +86,11 @@ def run(path, use_plugin=False, use_cachedir=None): "--config-file", os.path.join( use_cachedir, - "sqla_mypy_config.cfg" - if use_plugin - else "plain_mypy_config.cfg", + ( + "sqla_mypy_config.cfg" + if use_plugin + else "plain_mypy_config.cfg" + ), ), ] @@ -208,9 +210,11 @@ def _collect_messages(self, path): # skip first character which could be capitalized # "List item x not found" type of message expected_msg = expected_msg[0] + re.sub( - r"\b(List|Tuple|Dict|Set)\b" - if is_type - else r"\b(List|Tuple|Dict|Set|Type)\b", + ( + r"\b(List|Tuple|Dict|Set)\b" + if is_type + else r"\b(List|Tuple|Dict|Set|Type)\b" + ), lambda m: m.group(1).lower(), expected_msg[1:], ) diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py index 2752813515a..b63e06359c5 100644 --- a/lib/sqlalchemy/testing/plugin/pytestplugin.py +++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py @@ -669,9 +669,9 @@ def mark_base_test_class(self): "i": lambda obj: obj, "r": repr, "s": str, - "n": lambda obj: obj.__name__ - if hasattr(obj, "__name__") - else type(obj).__name__, + "n": lambda obj: ( + obj.__name__ if hasattr(obj, "__name__") else type(obj).__name__ + ), } def combinations(self, *arg_sets, **kw): diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index cc30945cab6..8de60e43dc4 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -492,9 +492,11 @@ def test_insert_w_floats( t.c.value, sort_by_parameter_order=bool(sort_by_parameter_order), ), - [{"value": value} for i in range(10)] - if multiple_rows - else {"value": value}, + ( + [{"value": value} for i in range(10)] + if multiple_rows + else {"value": value} + ), ) if multiple_rows: @@ -596,9 +598,11 @@ def test_imv_returning_datatypes( t.c.value, sort_by_parameter_order=bool(sort_by_parameter_order), ), - [{"value": value} for i in range(10)] - if multiple_rows - else {"value": value}, + ( + [{"value": value} for i in range(10)] + if multiple_rows + else {"value": value} + ), ) if multiple_rows: diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index f0d4dca1c26..f257d2fcbc8 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -1090,9 +1090,9 @@ def fk( "referred_columns": ref_col, "name": name, "options": mock.ANY, - "referred_schema": ref_schema - if ref_schema is not None - else tt(), + "referred_schema": ( + ref_schema if ref_schema is not None else tt() + ), "referred_table": ref_table, "comment": comment, } diff --git a/lib/sqlalchemy/testing/suite/test_update_delete.py b/lib/sqlalchemy/testing/suite/test_update_delete.py index a46d8fad87e..fd4757f9a4a 100644 --- a/lib/sqlalchemy/testing/suite/test_update_delete.py +++ b/lib/sqlalchemy/testing/suite/test_update_delete.py @@ -93,9 +93,11 @@ def test_update_returning(self, connection, criteria): eq_( connection.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (2, "d2_new"), (3, "d3")] - if criteria.rows - else [(1, "d1"), (2, "d2"), (3, "d3")], + ( + [(1, "d1"), (2, "d2_new"), (3, "d3")] + if criteria.rows + else [(1, "d1"), (2, "d2"), (3, "d3")] + ), ) @testing.variation("criteria", ["rows", "norows", "emptyin"]) @@ -126,9 +128,11 @@ def test_delete_returning(self, connection, criteria): eq_( connection.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (3, "d3")] - if criteria.rows - else [(1, "d1"), (2, "d2"), (3, "d3")], + ( + [(1, "d1"), (3, "d3")] + if criteria.rows + else [(1, "d1"), (2, "d2"), (3, "d3")] + ), ) diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index 615577b742a..e3a8ad834a5 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -227,12 +227,10 @@ def update(self, value: Dict[str, _T]) -> None: self._data.update(value) @overload - def get(self, key: str) -> Optional[_T]: - ... + def get(self, key: str) -> Optional[_T]: ... @overload - def get(self, key: str, default: Union[_DT, _T]) -> Union[_DT, _T]: - ... + def get(self, key: str, default: Union[_DT, _T]) -> Union[_DT, _T]: ... def get( self, key: str, default: Optional[Union[_DT, _T]] = None @@ -520,12 +518,10 @@ def _inc_counter(self): return self._counter @overload - def get(self, key: _KT) -> Optional[_VT]: - ... + def get(self, key: _KT) -> Optional[_VT]: ... @overload - def get(self, key: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: - ... + def get(self, key: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: ... def get( self, key: _KT, default: Optional[Union[_VT, _T]] = None @@ -587,13 +583,11 @@ def _manage_size(self) -> None: class _CreateFuncType(Protocol[_T_co]): - def __call__(self) -> _T_co: - ... + def __call__(self) -> _T_co: ... class _ScopeFuncType(Protocol): - def __call__(self) -> Any: - ... + def __call__(self) -> Any: ... class ScopedRegistry(Generic[_T]): diff --git a/lib/sqlalchemy/util/_concurrency_py3k.py b/lib/sqlalchemy/util/_concurrency_py3k.py index 82a349409b0..42ceb8122ee 100644 --- a/lib/sqlalchemy/util/_concurrency_py3k.py +++ b/lib/sqlalchemy/util/_concurrency_py3k.py @@ -33,8 +33,7 @@ class greenlet(Protocol): dead: bool gr_context: Optional[Context] - def __init__(self, fn: Callable[..., Any], driver: greenlet): - ... + def __init__(self, fn: Callable[..., Any], driver: greenlet): ... def throw(self, *arg: Any) -> Any: return None @@ -42,8 +41,7 @@ def throw(self, *arg: Any) -> Any: def switch(self, value: Any) -> Any: return None - def getcurrent() -> greenlet: - ... + def getcurrent() -> greenlet: ... else: from greenlet import getcurrent @@ -85,8 +83,7 @@ def __init__(self, fn: Callable[..., Any], driver: greenlet): def iscoroutine( awaitable: Awaitable[_T_co], - ) -> TypeGuard[Coroutine[Any, Any, _T_co]]: - ... + ) -> TypeGuard[Coroutine[Any, Any, _T_co]]: ... else: iscoroutine = asyncio.iscoroutine diff --git a/lib/sqlalchemy/util/_py_collections.py b/lib/sqlalchemy/util/_py_collections.py index 745620d92a2..dfb9af2e13d 100644 --- a/lib/sqlalchemy/util/_py_collections.py +++ b/lib/sqlalchemy/util/_py_collections.py @@ -59,11 +59,9 @@ def __setattr__(self, key: str, value: Any) -> NoReturn: class ImmutableDictBase(ReadOnlyContainer, Dict[_KT, _VT]): if TYPE_CHECKING: - def __new__(cls, *args: Any) -> Self: - ... + def __new__(cls, *args: Any) -> Self: ... - def __init__(cls, *args: Any): - ... + def __init__(cls, *args: Any): ... def _readonly(self, *arg: Any, **kw: Any) -> NoReturn: self._immutable() diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 0a8d87b1880..b122a3b35b3 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -411,15 +411,13 @@ def get_cls_kwargs( *, _set: Optional[Set[str]] = None, raiseerr: Literal[True] = ..., -) -> Set[str]: - ... +) -> Set[str]: ... @overload def get_cls_kwargs( cls: type, *, _set: Optional[Set[str]] = None, raiseerr: bool = False -) -> Optional[Set[str]]: - ... +) -> Optional[Set[str]]: ... def get_cls_kwargs( @@ -1092,23 +1090,19 @@ def __init__(self, fget: Callable[..., _T_co], doc: Optional[str] = None): self.__name__ = fget.__name__ @overload - def __get__(self: _GFD, obj: None, cls: Any) -> _GFD: - ... + def __get__(self: _GFD, obj: None, cls: Any) -> _GFD: ... @overload - def __get__(self, obj: object, cls: Any) -> _T_co: - ... + def __get__(self, obj: object, cls: Any) -> _T_co: ... def __get__(self: _GFD, obj: Any, cls: Any) -> Union[_GFD, _T_co]: raise NotImplementedError() if TYPE_CHECKING: - def __set__(self, instance: Any, value: Any) -> None: - ... + def __set__(self, instance: Any, value: Any) -> None: ... - def __delete__(self, instance: Any) -> None: - ... + def __delete__(self, instance: Any) -> None: ... def _reset(self, obj: Any) -> None: raise NotImplementedError() @@ -1247,12 +1241,10 @@ def __init__(self, fget: Callable[..., _T], doc: Optional[str] = None): self.__name__ = fget.__name__ @overload - def __get__(self: _MA, obj: None, cls: Any) -> _MA: - ... + def __get__(self: _MA, obj: None, cls: Any) -> _MA: ... @overload - def __get__(self, obj: Any, cls: Any) -> _T: - ... + def __get__(self, obj: Any, cls: Any) -> _T: ... def __get__(self, obj, cls): if obj is None: diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py index 08ee9ead8e1..99a68a3177a 100644 --- a/lib/sqlalchemy/util/queue.py +++ b/lib/sqlalchemy/util/queue.py @@ -57,8 +57,7 @@ class QueueCommon(Generic[_T]): maxsize: int use_lifo: bool - def __init__(self, maxsize: int = 0, use_lifo: bool = False): - ... + def __init__(self, maxsize: int = 0, use_lifo: bool = False): ... def empty(self) -> bool: raise NotImplementedError() @@ -242,8 +241,7 @@ class AsyncAdaptedQueue(QueueCommon[_T]): if typing.TYPE_CHECKING: @staticmethod - def await_(coroutine: Awaitable[Any]) -> _T: - ... + def await_(coroutine: Awaitable[Any]) -> _T: ... else: await_ = staticmethod(await_only) diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 743e071f251..a7d2a340e7d 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -112,11 +112,9 @@ class GenericProtocol(Protocol[_T]): # copied from TypeShed, required in order to implement # MutableMapping.update() class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): - def keys(self) -> Iterable[_KT]: - ... + def keys(self) -> Iterable[_KT]: ... - def __getitem__(self, __k: _KT) -> _VT_co: - ... + def __getitem__(self, __k: _KT) -> _VT_co: ... # work around https://github.com/microsoft/pyright/issues/3025 @@ -342,20 +340,17 @@ def is_fwd_ref( @overload -def de_optionalize_union_types(type_: str) -> str: - ... +def de_optionalize_union_types(type_: str) -> str: ... @overload -def de_optionalize_union_types(type_: Type[Any]) -> Type[Any]: - ... +def de_optionalize_union_types(type_: Type[Any]) -> Type[Any]: ... @overload def de_optionalize_union_types( type_: _AnnotationScanType, -) -> _AnnotationScanType: - ... +) -> _AnnotationScanType: ... def de_optionalize_union_types( @@ -499,14 +494,11 @@ def _get_type_name(type_: Type[Any]) -> str: class DescriptorProto(Protocol): - def __get__(self, instance: object, owner: Any) -> Any: - ... + def __get__(self, instance: object, owner: Any) -> Any: ... - def __set__(self, instance: Any, value: Any) -> None: - ... + def __set__(self, instance: Any, value: Any) -> None: ... - def __delete__(self, instance: Any) -> None: - ... + def __delete__(self, instance: Any) -> None: ... _DESC = TypeVar("_DESC", bound=DescriptorProto) @@ -525,14 +517,11 @@ class DescriptorReference(Generic[_DESC]): if TYPE_CHECKING: - def __get__(self, instance: object, owner: Any) -> _DESC: - ... + def __get__(self, instance: object, owner: Any) -> _DESC: ... - def __set__(self, instance: Any, value: _DESC) -> None: - ... + def __set__(self, instance: Any, value: _DESC) -> None: ... - def __delete__(self, instance: Any) -> None: - ... + def __delete__(self, instance: Any) -> None: ... _DESC_co = TypeVar("_DESC_co", bound=DescriptorProto, covariant=True) @@ -548,14 +537,11 @@ class RODescriptorReference(Generic[_DESC_co]): if TYPE_CHECKING: - def __get__(self, instance: object, owner: Any) -> _DESC_co: - ... + def __get__(self, instance: object, owner: Any) -> _DESC_co: ... - def __set__(self, instance: Any, value: Any) -> NoReturn: - ... + def __set__(self, instance: Any, value: Any) -> NoReturn: ... - def __delete__(self, instance: Any) -> NoReturn: - ... + def __delete__(self, instance: Any) -> NoReturn: ... _FN = TypeVar("_FN", bound=Optional[Callable[..., Any]]) @@ -572,14 +558,11 @@ class CallableReference(Generic[_FN]): if TYPE_CHECKING: - def __get__(self, instance: object, owner: Any) -> _FN: - ... + def __get__(self, instance: object, owner: Any) -> _FN: ... - def __set__(self, instance: Any, value: _FN) -> None: - ... + def __set__(self, instance: Any, value: _FN) -> None: ... - def __delete__(self, instance: Any) -> None: - ... + def __delete__(self, instance: Any) -> None: ... # $def ro_descriptor_reference(fn: Callable[]) diff --git a/setup.cfg b/setup.cfg index 45151ef4d4c..d51e4d854cc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -107,7 +107,7 @@ enable-extensions = G ignore = A003, D, - E203,E305,E711,E712,E721,E722,E741, + E203,E305,E701,E704,E711,E712,E721,E722,E741, N801,N802,N806, RST304,RST303,RST299,RST399, W503,W504,W601 diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py index 3a5a200d805..8bf2bfa1803 100644 --- a/test/aaa_profiling/test_orm.py +++ b/test/aaa_profiling/test_orm.py @@ -142,7 +142,6 @@ def go2(): class LoadManyToOneFromIdentityTest(fixtures.MappedTest): - """test overhead associated with many-to-one fetches. Prior to the refactor of LoadLazyAttribute and diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py index 74867ccbe21..b5ea40b120e 100644 --- a/test/dialect/mssql/test_compiler.py +++ b/test/dialect/mssql/test_compiler.py @@ -702,9 +702,9 @@ def test_schema_single_token_bracketed( select(tbl), "SELECT %(name)s.test.id FROM %(name)s.test" % {"name": rendered_schema}, - schema_translate_map={None: schemaname} - if use_schema_translate - else None, + schema_translate_map=( + {None: schemaname} if use_schema_translate else None + ), render_schema_translate=True if use_schema_translate else False, ) @@ -777,16 +777,20 @@ def test_force_schema_quoted_name_w_dot_case_sensitive( "test", metadata, Column("id", Integer, primary_key=True), - schema=quoted_name("Foo.dbo", True) - if not use_schema_translate - else None, + schema=( + quoted_name("Foo.dbo", True) + if not use_schema_translate + else None + ), ) self.assert_compile( select(tbl), "SELECT [Foo.dbo].test.id FROM [Foo.dbo].test", - schema_translate_map={None: quoted_name("Foo.dbo", True)} - if use_schema_translate - else None, + schema_translate_map=( + {None: quoted_name("Foo.dbo", True)} + if use_schema_translate + else None + ), render_schema_translate=True if use_schema_translate else False, ) @@ -804,9 +808,9 @@ def test_force_schema_quoted_w_dot_case_sensitive( self.assert_compile( select(tbl), "SELECT [Foo.dbo].test.id FROM [Foo.dbo].test", - schema_translate_map={None: "[Foo.dbo]"} - if use_schema_translate - else None, + schema_translate_map=( + {None: "[Foo.dbo]"} if use_schema_translate else None + ), render_schema_translate=True if use_schema_translate else False, ) @@ -824,9 +828,9 @@ def test_schema_autosplit_w_dot_case_insensitive( self.assert_compile( select(tbl), "SELECT foo.dbo.test.id FROM foo.dbo.test", - schema_translate_map={None: "foo.dbo"} - if use_schema_translate - else None, + schema_translate_map=( + {None: "foo.dbo"} if use_schema_translate else None + ), render_schema_translate=True if use_schema_translate else False, ) @@ -842,9 +846,9 @@ def test_schema_autosplit_w_dot_case_sensitive(self, use_schema_translate): self.assert_compile( select(tbl), "SELECT [Foo].dbo.test.id FROM [Foo].dbo.test", - schema_translate_map={None: "Foo.dbo"} - if use_schema_translate - else None, + schema_translate_map=( + {None: "Foo.dbo"} if use_schema_translate else None + ), render_schema_translate=True if use_schema_translate else False, ) diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py index ae2b7662ef5..7222ba47ae3 100644 --- a/test/dialect/mssql/test_reflection.py +++ b/test/dialect/mssql/test_reflection.py @@ -1028,10 +1028,13 @@ def define_tables(cls, metadata): for i in range(col_num) ], ) - cls.view_str = ( - view_str - ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % ( - ",".join("long_named_column_number_%d" % i for i in range(col_num)) + cls.view_str = view_str = ( + "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" + % ( + ",".join( + "long_named_column_number_%d" % i for i in range(col_num) + ) + ) ) assert len(view_str) > 4000 diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index b2e05d951d0..05b4b685427 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -567,7 +567,6 @@ def test_groupby_rollup(self): class SQLTest(fixtures.TestBase, AssertsCompiledSQL): - """Tests MySQL-dialect specific compilation.""" __dialect__ = mysql.dialect() diff --git a/test/dialect/mysql/test_for_update.py b/test/dialect/mysql/test_for_update.py index 5717a32997c..0895a098d1f 100644 --- a/test/dialect/mysql/test_for_update.py +++ b/test/dialect/mysql/test_for_update.py @@ -3,6 +3,7 @@ See #4246 """ + import contextlib from sqlalchemy import Column diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index f890b7ba9ce..005e60eaa14 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -3228,7 +3228,6 @@ def test_quote_raw_string_col(self): class DistinctOnTest(fixtures.MappedTest, AssertsCompiledSQL): - """Test 'DISTINCT' with SQL expression language and orm.Query with an emphasis on PG's 'DISTINCT ON' syntax. @@ -3382,7 +3381,6 @@ def test_distinct_on_subquery_named(self): class FullTextSearchTest(fixtures.TestBase, AssertsCompiledSQL): - """Tests for full text searching""" __dialect__ = postgresql.dialect() diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index db2d5e73dc6..919842a49c4 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -1219,9 +1219,9 @@ def test_readonly_flag_engine(self, testing_engine, pre_ping): def test_autocommit_pre_ping(self, testing_engine, autocommit): engine = testing_engine( options={ - "isolation_level": "AUTOCOMMIT" - if autocommit - else "SERIALIZABLE", + "isolation_level": ( + "AUTOCOMMIT" if autocommit else "SERIALIZABLE" + ), "pool_pre_ping": True, } ) @@ -1239,9 +1239,9 @@ def test_asyncpg_transactional_ping(self, testing_engine, autocommit): engine = testing_engine( options={ - "isolation_level": "AUTOCOMMIT" - if autocommit - else "SERIALIZABLE", + "isolation_level": ( + "AUTOCOMMIT" if autocommit else "SERIALIZABLE" + ), "pool_pre_ping": True, } ) diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py index 8d8d9a7ec9d..9822b3e60b9 100644 --- a/test/dialect/postgresql/test_query.py +++ b/test/dialect/postgresql/test_query.py @@ -1238,7 +1238,6 @@ def test_tuple_containment(self, connection): class ExtractTest(fixtures.TablesTest): - """The rationale behind this test is that for many years we've had a system of embedding type casts into the expressions rendered by visit_extract() on the postgreql platform. The reason for this cast is not clear. diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 0a98ef5045f..2088436eebf 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -1155,7 +1155,7 @@ def process_result_value(self, value, dialect): "one", "two", "three", - native_enum=True # make sure this is True because + native_enum=True, # make sure this is True because # it should *not* take effect due to # the variant ).with_variant( @@ -3234,7 +3234,6 @@ def test_bit_compile(self, type_, expected): class SpecialTypesTest(fixtures.TablesTest, ComparesTables): - """test DDL and reflection of PG-specific types""" __only_on__ = ("postgresql >= 8.3.0",) @@ -3325,7 +3324,6 @@ def test_bit_reflection(self, metadata, connection): class UUIDTest(fixtures.TestBase): - """Test postgresql-specific UUID cases. See also generic UUID tests in testing/suite/test_types @@ -3969,9 +3967,11 @@ def test_data_str(self, fn, op): self._test_clause( fn(self.col, self._data_str()), f"data_table.range {op} %(range_1)s", - self.col.type - if op in self._not_compare_op - else sqltypes.BOOLEANTYPE, + ( + self.col.type + if op in self._not_compare_op + else sqltypes.BOOLEANTYPE + ), ) @testing.combinations(*_all_fns, id_="as") @@ -3979,9 +3979,11 @@ def test_data_obj(self, fn, op): self._test_clause( fn(self.col, self._data_obj()), f"data_table.range {op} %(range_1)s::{self._col_str}", - self.col.type - if op in self._not_compare_op - else sqltypes.BOOLEANTYPE, + ( + self.col.type + if op in self._not_compare_op + else sqltypes.BOOLEANTYPE + ), ) @testing.combinations(*_comparisons, id_="as") @@ -3989,9 +3991,11 @@ def test_data_str_any(self, fn, op): self._test_clause( fn(self.col, any_(array([self._data_str()]))), f"data_table.range {op} ANY (ARRAY[%(param_1)s])", - self.col.type - if op in self._not_compare_op - else sqltypes.BOOLEANTYPE, + ( + self.col.type + if op in self._not_compare_op + else sqltypes.BOOLEANTYPE + ), ) def test_where_is_null(self): @@ -6279,9 +6283,11 @@ def test_imv_returning_datatypes( t.c.value, sort_by_parameter_order=bool(sort_by_parameter_order), ), - [{"value": value} for i in range(10)] - if multiple_rows - else {"value": value}, + ( + [{"value": value} for i in range(10)] + if multiple_rows + else {"value": value} + ), ) if multiple_rows: diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index d6e444bb301..d5ff0fc19de 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -1,4 +1,5 @@ """SQLite-specific tests.""" + import datetime import json import os @@ -912,7 +913,6 @@ def test_col_targeting_union(self, connection): class SQLTest(fixtures.TestBase, AssertsCompiledSQL): - """Tests SQLite-dialect specific compilation.""" __dialect__ = sqlite.dialect() @@ -1314,7 +1314,6 @@ def test_on_conflict_clause_primary_key_constraint(self): class InsertTest(fixtures.TestBase, AssertsExecutionResults): - """Tests inserts and autoincrement.""" __only_on__ = "sqlite" @@ -2508,7 +2507,6 @@ def test_constraint_cols( class SavepointTest(fixtures.TablesTest): - """test that savepoints work when we use the correct event setup""" __only_on__ = "sqlite" diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 6080f3dc6d0..4618dfff8d5 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -3654,12 +3654,12 @@ def mock_the_cursor(cursor, *arg): arg[-1].get_result_proxy = Mock(return_value=Mock(context=arg[-1])) return retval - m1.real_do_execute.side_effect = ( - m1.do_execute.side_effect - ) = mock_the_cursor - m1.real_do_executemany.side_effect = ( - m1.do_executemany.side_effect - ) = mock_the_cursor + m1.real_do_execute.side_effect = m1.do_execute.side_effect = ( + mock_the_cursor + ) + m1.real_do_executemany.side_effect = m1.do_executemany.side_effect = ( + mock_the_cursor + ) m1.real_do_execute_no_params.side_effect = ( m1.do_execute_no_params.side_effect ) = mock_the_cursor diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py index a7883efa2fd..e1515a23a86 100644 --- a/test/engine/test_reconnect.py +++ b/test/engine/test_reconnect.py @@ -1581,9 +1581,9 @@ def _run_with_retries(fn, context, cursor, statement, *arg, **kw): connection.rollback() time.sleep(retry_interval) - context.cursor = ( - cursor - ) = connection.connection.cursor() + context.cursor = cursor = ( + connection.connection.cursor() + ) else: raise else: diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py index d6d059cbef9..e21881b3334 100644 --- a/test/ext/declarative/test_inheritance.py +++ b/test/ext/declarative/test_inheritance.py @@ -934,22 +934,25 @@ class ActualDocument(ContactDocument): self.assert_compile( session.query(Document), - "SELECT pjoin.id AS pjoin_id, pjoin.doctype AS pjoin_doctype, " - "pjoin.type AS pjoin_type, pjoin.send_method AS pjoin_send_method " - "FROM " - "(SELECT actual_documents.id AS id, " - "actual_documents.send_method AS send_method, " - "actual_documents.doctype AS doctype, " - "'actual' AS type FROM actual_documents) AS pjoin" - if use_strict_attrs - else "SELECT pjoin.id AS pjoin_id, pjoin.send_method AS " - "pjoin_send_method, pjoin.doctype AS pjoin_doctype, " - "pjoin.type AS pjoin_type " - "FROM " - "(SELECT actual_documents.id AS id, " - "actual_documents.send_method AS send_method, " - "actual_documents.doctype AS doctype, " - "'actual' AS type FROM actual_documents) AS pjoin", + ( + "SELECT pjoin.id AS pjoin_id, pjoin.doctype AS pjoin_doctype, " + "pjoin.type AS pjoin_type, " + "pjoin.send_method AS pjoin_send_method " + "FROM " + "(SELECT actual_documents.id AS id, " + "actual_documents.send_method AS send_method, " + "actual_documents.doctype AS doctype, " + "'actual' AS type FROM actual_documents) AS pjoin" + if use_strict_attrs + else "SELECT pjoin.id AS pjoin_id, pjoin.send_method AS " + "pjoin_send_method, pjoin.doctype AS pjoin_doctype, " + "pjoin.type AS pjoin_type " + "FROM " + "(SELECT actual_documents.id AS id, " + "actual_documents.send_method AS send_method, " + "actual_documents.doctype AS doctype, " + "'actual' AS type FROM actual_documents) AS pjoin" + ), ) @testing.combinations(True, False) diff --git a/test/ext/mypy/plugin_files/mapped_attr_assign.py b/test/ext/mypy/plugin_files/mapped_attr_assign.py index 06bc24d9eb0..c7244c27a61 100644 --- a/test/ext/mypy/plugin_files/mapped_attr_assign.py +++ b/test/ext/mypy/plugin_files/mapped_attr_assign.py @@ -3,6 +3,7 @@ """ + from typing import Optional from sqlalchemy import Column diff --git a/test/ext/mypy/plugin_files/typing_err3.py b/test/ext/mypy/plugin_files/typing_err3.py index cbdbf009a0e..146b96b2a73 100644 --- a/test/ext/mypy/plugin_files/typing_err3.py +++ b/test/ext/mypy/plugin_files/typing_err3.py @@ -2,6 +2,7 @@ type checked. """ + from typing import List from sqlalchemy import Column diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py index 87812c9ac63..7e2b31a9b5b 100644 --- a/test/ext/test_associationproxy.py +++ b/test/ext/test_associationproxy.py @@ -3830,11 +3830,11 @@ class User(decl_base): id: Mapped[int] = mapped_column(primary_key=True) - user_keyword_associations: Mapped[ - List[UserKeywordAssociation] - ] = relationship( - back_populates="user", - cascade="all, delete-orphan", + user_keyword_associations: Mapped[List[UserKeywordAssociation]] = ( + relationship( + back_populates="user", + cascade="all, delete-orphan", + ) ) keywords: AssociationProxy[list[str]] = association_proxy( @@ -3886,12 +3886,12 @@ class User(dc_decl_base): primary_key=True, repr=True, init=False ) - user_keyword_associations: Mapped[ - List[UserKeywordAssociation] - ] = relationship( - back_populates="user", - cascade="all, delete-orphan", - init=False, + user_keyword_associations: Mapped[List[UserKeywordAssociation]] = ( + relationship( + back_populates="user", + cascade="all, delete-orphan", + init=False, + ) ) if embed_in_field: diff --git a/test/ext/test_automap.py b/test/ext/test_automap.py index c84bc1c78eb..a3ba1189b3d 100644 --- a/test/ext/test_automap.py +++ b/test/ext/test_automap.py @@ -667,11 +667,14 @@ def _make_tables(self, e): m, Column("id", Integer, primary_key=True), Column("data", String(50)), - Column( - "t_%d_id" % (i - 1), ForeignKey("table_%d.id" % (i - 1)) - ) - if i > 4 - else None, + ( + Column( + "t_%d_id" % (i - 1), + ForeignKey("table_%d.id" % (i - 1)), + ) + if i > 4 + else None + ), ) m.drop_all(e) m.create_all(e) diff --git a/test/ext/test_compiler.py b/test/ext/test_compiler.py index aa03dabc903..707e02dac10 100644 --- a/test/ext/test_compiler.py +++ b/test/ext/test_compiler.py @@ -209,9 +209,11 @@ def sqlite_my_function(element, compiler, **kw): self.assert_compile( stmt, - "SELECT my_function(t1.q) AS my_function_1 FROM t1" - if named - else "SELECT my_function(t1.q) AS anon_1 FROM t1", + ( + "SELECT my_function(t1.q) AS my_function_1 FROM t1" + if named + else "SELECT my_function(t1.q) AS anon_1 FROM t1" + ), dialect="sqlite", ) diff --git a/test/ext/test_extendedattr.py b/test/ext/test_extendedattr.py index dd5b7158296..41637c358e5 100644 --- a/test/ext/test_extendedattr.py +++ b/test/ext/test_extendedattr.py @@ -760,7 +760,6 @@ class C: class ExtendedEventsTest(_ExtBase, fixtures.ORMTest): - """Allow custom Events implementations.""" @modifies_instrumentation_finders diff --git a/test/orm/declarative/test_abs_import_only.py b/test/orm/declarative/test_abs_import_only.py index e1447364e66..287240575c8 100644 --- a/test/orm/declarative/test_abs_import_only.py +++ b/test/orm/declarative/test_abs_import_only.py @@ -64,9 +64,9 @@ class Foo(decl_base): if construct.Mapped: bars: orm.Mapped[typing.List[Bar]] = orm.relationship() elif construct.WriteOnlyMapped: - bars: orm.WriteOnlyMapped[ - typing.List[Bar] - ] = orm.relationship() + bars: orm.WriteOnlyMapped[typing.List[Bar]] = ( + orm.relationship() + ) elif construct.DynamicMapped: bars: orm.DynamicMapped[typing.List[Bar]] = orm.relationship() else: diff --git a/test/orm/declarative/test_dc_transforms.py b/test/orm/declarative/test_dc_transforms.py index cbe08f30e17..8408f696176 100644 --- a/test/orm/declarative/test_dc_transforms.py +++ b/test/orm/declarative/test_dc_transforms.py @@ -179,9 +179,9 @@ class GenericSetting( JSON, init=True, default_factory=lambda: {} ) - new_instance: GenericSetting[ # noqa: F841 - Dict[str, Any] - ] = GenericSetting(key="x", value={"foo": "bar"}) + new_instance: GenericSetting[Dict[str, Any]] = ( # noqa: F841 + GenericSetting(key="x", value={"foo": "bar"}) + ) def test_no_anno_doesnt_go_into_dc( self, dc_decl_base: Type[MappedAsDataclass] diff --git a/test/orm/declarative/test_inheritance.py b/test/orm/declarative/test_inheritance.py index c5b908cd822..1b633d1bcf0 100644 --- a/test/orm/declarative/test_inheritance.py +++ b/test/orm/declarative/test_inheritance.py @@ -1067,7 +1067,6 @@ class Person(decl_base): target_id = Column(Integer, primary_key=True) class Engineer(Person): - """single table inheritance""" if decl_type.legacy: @@ -1084,7 +1083,6 @@ def target_id(cls): ) class Manager(Person): - """single table inheritance""" if decl_type.legacy: @@ -1468,7 +1466,6 @@ class A(a_1): class OverlapColPrecedenceTest(DeclarativeTestBase): - """test #1892 cases when declarative does column precedence.""" def _run_test(self, Engineer, e_id, p_id): diff --git a/test/orm/declarative/test_mixin.py b/test/orm/declarative/test_mixin.py index 900133df593..32f737484e2 100644 --- a/test/orm/declarative/test_mixin.py +++ b/test/orm/declarative/test_mixin.py @@ -672,11 +672,9 @@ def target(cls): return relationship("Other") class Engineer(Mixin, Person): - """single table inheritance""" class Manager(Mixin, Person): - """single table inheritance""" class Other(Base): diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index e2a442767ae..39ce5051bab 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -1517,20 +1517,20 @@ class User(Base): data: Mapped[Union[float, Decimal]] = mapped_column() reverse_data: Mapped[Union[Decimal, float]] = mapped_column() - optional_data: Mapped[ - Optional[Union[float, Decimal]] - ] = mapped_column() + optional_data: Mapped[Optional[Union[float, Decimal]]] = ( + mapped_column() + ) # use Optional directly - reverse_optional_data: Mapped[ - Optional[Union[Decimal, float]] - ] = mapped_column() + reverse_optional_data: Mapped[Optional[Union[Decimal, float]]] = ( + mapped_column() + ) # use Union with None, same as Optional but presents differently # (Optional object with __origin__ Union vs. Union) - reverse_u_optional_data: Mapped[ - Union[Decimal, float, None] - ] = mapped_column() + reverse_u_optional_data: Mapped[Union[Decimal, float, None]] = ( + mapped_column() + ) float_data: Mapped[float] = mapped_column() decimal_data: Mapped[Decimal] = mapped_column() @@ -1538,14 +1538,14 @@ class User(Base): if compat.py310: pep604_data: Mapped[float | Decimal] = mapped_column() pep604_reverse: Mapped[Decimal | float] = mapped_column() - pep604_optional: Mapped[ - Decimal | float | None - ] = mapped_column() + pep604_optional: Mapped[Decimal | float | None] = ( + mapped_column() + ) pep604_data_fwd: Mapped["float | Decimal"] = mapped_column() pep604_reverse_fwd: Mapped["Decimal | float"] = mapped_column() - pep604_optional_fwd: Mapped[ - "Decimal | float | None" - ] = mapped_column() + pep604_optional_fwd: Mapped["Decimal | float | None"] = ( + mapped_column() + ) is_(User.__table__.c.data.type, our_type) is_false(User.__table__.c.data.nullable) @@ -2508,9 +2508,9 @@ class A(decl_base): collection_class=list ) elif datatype.collections_mutable_sequence: - bs: Mapped[ - collections.abc.MutableSequence[B] - ] = relationship(collection_class=list) + bs: Mapped[collections.abc.MutableSequence[B]] = ( + relationship(collection_class=list) + ) else: datatype.fail() @@ -2537,15 +2537,15 @@ class A(decl_base): if datatype.typing_sequence: bs: Mapped[typing.Sequence[B]] = relationship() elif datatype.collections_sequence: - bs: Mapped[ - collections.abc.Sequence[B] - ] = relationship() + bs: Mapped[collections.abc.Sequence[B]] = ( + relationship() + ) elif datatype.typing_mutable_sequence: bs: Mapped[typing.MutableSequence[B]] = relationship() elif datatype.collections_mutable_sequence: - bs: Mapped[ - collections.abc.MutableSequence[B] - ] = relationship() + bs: Mapped[collections.abc.MutableSequence[B]] = ( + relationship() + ) else: datatype.fail() diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 0f1bb452d52..c61dceea1ff 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -1508,20 +1508,20 @@ class User(Base): data: Mapped[Union[float, Decimal]] = mapped_column() reverse_data: Mapped[Union[Decimal, float]] = mapped_column() - optional_data: Mapped[ - Optional[Union[float, Decimal]] - ] = mapped_column() + optional_data: Mapped[Optional[Union[float, Decimal]]] = ( + mapped_column() + ) # use Optional directly - reverse_optional_data: Mapped[ - Optional[Union[Decimal, float]] - ] = mapped_column() + reverse_optional_data: Mapped[Optional[Union[Decimal, float]]] = ( + mapped_column() + ) # use Union with None, same as Optional but presents differently # (Optional object with __origin__ Union vs. Union) - reverse_u_optional_data: Mapped[ - Union[Decimal, float, None] - ] = mapped_column() + reverse_u_optional_data: Mapped[Union[Decimal, float, None]] = ( + mapped_column() + ) float_data: Mapped[float] = mapped_column() decimal_data: Mapped[Decimal] = mapped_column() @@ -1529,14 +1529,14 @@ class User(Base): if compat.py310: pep604_data: Mapped[float | Decimal] = mapped_column() pep604_reverse: Mapped[Decimal | float] = mapped_column() - pep604_optional: Mapped[ - Decimal | float | None - ] = mapped_column() + pep604_optional: Mapped[Decimal | float | None] = ( + mapped_column() + ) pep604_data_fwd: Mapped["float | Decimal"] = mapped_column() pep604_reverse_fwd: Mapped["Decimal | float"] = mapped_column() - pep604_optional_fwd: Mapped[ - "Decimal | float | None" - ] = mapped_column() + pep604_optional_fwd: Mapped["Decimal | float | None"] = ( + mapped_column() + ) is_(User.__table__.c.data.type, our_type) is_false(User.__table__.c.data.nullable) @@ -2499,9 +2499,9 @@ class A(decl_base): collection_class=list ) elif datatype.collections_mutable_sequence: - bs: Mapped[ - collections.abc.MutableSequence[B] - ] = relationship(collection_class=list) + bs: Mapped[collections.abc.MutableSequence[B]] = ( + relationship(collection_class=list) + ) else: datatype.fail() @@ -2528,15 +2528,15 @@ class A(decl_base): if datatype.typing_sequence: bs: Mapped[typing.Sequence[B]] = relationship() elif datatype.collections_sequence: - bs: Mapped[ - collections.abc.Sequence[B] - ] = relationship() + bs: Mapped[collections.abc.Sequence[B]] = ( + relationship() + ) elif datatype.typing_mutable_sequence: bs: Mapped[typing.MutableSequence[B]] = relationship() elif datatype.collections_mutable_sequence: - bs: Mapped[ - collections.abc.MutableSequence[B] - ] = relationship() + bs: Mapped[collections.abc.MutableSequence[B]] = ( + relationship() + ) else: datatype.fail() diff --git a/test/orm/inheritance/test_assorted_poly.py b/test/orm/inheritance/test_assorted_poly.py index 0f9a623bdac..49d90f6c437 100644 --- a/test/orm/inheritance/test_assorted_poly.py +++ b/test/orm/inheritance/test_assorted_poly.py @@ -2476,9 +2476,9 @@ class Retailer(Customer): __mapper_args__ = { "polymorphic_identity": "retailer", - "polymorphic_load": "inline" - if use_poly_on_retailer - else None, + "polymorphic_load": ( + "inline" if use_poly_on_retailer else None + ), } return Customer, Store, Retailer diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py index abd6c86b570..a76f563f818 100644 --- a/test/orm/inheritance/test_basic.py +++ b/test/orm/inheritance/test_basic.py @@ -1933,7 +1933,7 @@ def test_refresh_column(self): # a.id is not included in the SELECT list "SELECT b.data FROM a JOIN b ON a.id = b.id " "WHERE a.id = :pk_1", - [{"pk_1": pk}] + [{"pk_1": pk}], # if we used load_scalar_attributes(), it would look like # this # "SELECT b.data AS b_data FROM b WHERE :param_1 = b.id", diff --git a/test/orm/inheritance/test_relationship.py b/test/orm/inheritance/test_relationship.py index daaf937b912..be42dc60904 100644 --- a/test/orm/inheritance/test_relationship.py +++ b/test/orm/inheritance/test_relationship.py @@ -2896,9 +2896,11 @@ def test_query_auto(self, autoalias): m1 = aliased(Manager, flat=True) q = sess.query(Engineer, m1).join(Engineer.manager.of_type(m1)) - with _aliased_join_warning( - r"Manager\(managers\)" - ) if autoalias else nullcontext(): + with ( + _aliased_join_warning(r"Manager\(managers\)") + if autoalias + else nullcontext() + ): self.assert_compile( q, "SELECT engineers.id AS " diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py index 52f3cf9c9f7..f45194f29c5 100644 --- a/test/orm/inheritance/test_single.py +++ b/test/orm/inheritance/test_single.py @@ -1909,9 +1909,11 @@ def test_single_inh_subclass_join_joined_inh_subclass(self, autoalias): e1 = aliased(Engineer, flat=True) q = s.query(Boss).join(e1, e1.manager_id == Boss.id) - with _aliased_join_warning( - r"Mapper\[Engineer\(engineer\)\]" - ) if autoalias else nullcontext(): + with ( + _aliased_join_warning(r"Mapper\[Engineer\(engineer\)\]") + if autoalias + else nullcontext() + ): self.assert_compile( q, "SELECT manager.id AS manager_id, employee.id AS employee_id, " @@ -1974,9 +1976,11 @@ def test_joined_inh_subclass_join_single_inh_subclass(self, autoalias): b1 = aliased(Boss, flat=True) q = s.query(Engineer).join(b1, Engineer.manager_id == b1.id) - with _aliased_join_warning( - r"Mapper\[Boss\(manager\)\]" - ) if autoalias else nullcontext(): + with ( + _aliased_join_warning(r"Mapper\[Boss\(manager\)\]") + if autoalias + else nullcontext() + ): self.assert_compile( q, "SELECT engineer.id AS engineer_id, " diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py index 677f8f20736..f14cdda5b66 100644 --- a/test/orm/test_assorted_eager.py +++ b/test/orm/test_assorted_eager.py @@ -6,6 +6,7 @@ be cleaned up and modernized. """ + import datetime import sqlalchemy as sa diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py index ded2c25db79..f9a1ba38659 100644 --- a/test/orm/test_composites.py +++ b/test/orm/test_composites.py @@ -411,11 +411,11 @@ def test_bulk_insert_heterogeneous(self, type_): assert_data = [ { "start": d["start"] if "start" in d else None, - "end": d["end"] - if "end" in d - else Point(d["x2"], d["y2"]) - if "x2" in d - else None, + "end": ( + d["end"] + if "end" in d + else Point(d["x2"], d["y2"]) if "x2" in d else None + ), "graph_id": d["graph_id"], } for d in data @@ -916,9 +916,11 @@ def test_event_listener_no_value_to_set( mock.call( e1, Point(5, 6), - LoaderCallableStatus.NO_VALUE - if not active_history - else None, + ( + LoaderCallableStatus.NO_VALUE + if not active_history + else None + ), Edge.start.impl, ) ], @@ -965,9 +967,11 @@ def test_event_listener_set_to_new( mock.call( e1, Point(7, 8), - LoaderCallableStatus.NO_VALUE - if not active_history - else Point(5, 6), + ( + LoaderCallableStatus.NO_VALUE + if not active_history + else Point(5, 6) + ), Edge.start.impl, ) ], @@ -1019,9 +1023,11 @@ def test_event_listener_set_to_deleted( [ mock.call( e1, - LoaderCallableStatus.NO_VALUE - if not active_history - else Point(5, 6), + ( + LoaderCallableStatus.NO_VALUE + if not active_history + else Point(5, 6) + ), Edge.start.impl, ) ], diff --git a/test/orm/test_cycles.py b/test/orm/test_cycles.py index 7f0f504b569..cffde9bdab9 100644 --- a/test/orm/test_cycles.py +++ b/test/orm/test_cycles.py @@ -5,6 +5,7 @@ T1/T2. """ + from itertools import count from sqlalchemy import bindparam diff --git a/test/orm/test_deprecations.py b/test/orm/test_deprecations.py index 23248349cd2..f943d8dfe42 100644 --- a/test/orm/test_deprecations.py +++ b/test/orm/test_deprecations.py @@ -2269,11 +2269,13 @@ def _test(self, bound_session, session_present, expect_bound): eq_ignore_whitespace( str(q), - "SELECT users.id AS users_id, users.name AS users_name " - "FROM users WHERE users.id = ?" - if expect_bound - else "SELECT users.id AS users_id, users.name AS users_name " - "FROM users WHERE users.id = :id_1", + ( + "SELECT users.id AS users_id, users.name AS users_name " + "FROM users WHERE users.id = ?" + if expect_bound + else "SELECT users.id AS users_id, users.name AS users_name " + "FROM users WHERE users.id = :id_1" + ), ) def test_query_bound_session(self): @@ -2307,7 +2309,6 @@ def go(): class RequirementsTest(fixtures.MappedTest): - """Tests the contract for user classes.""" @classmethod diff --git a/test/orm/test_dynamic.py b/test/orm/test_dynamic.py index 83f3101f209..cce3f8c18a8 100644 --- a/test/orm/test_dynamic.py +++ b/test/orm/test_dynamic.py @@ -1444,9 +1444,11 @@ def test_delete_cascade( addresses_args={ "order_by": addresses.c.id, "backref": "user", - "cascade": "save-update" - if not delete_cascade_configured - else "all, delete", + "cascade": ( + "save-update" + if not delete_cascade_configured + else "all, delete" + ), } ) @@ -1519,9 +1521,11 @@ class A(decl_base): data: Mapped[str] bs: WriteOnlyMapped["B"] = relationship( # noqa: F821 passive_deletes=passive_deletes, - cascade="all, delete-orphan" - if cascade_deletes - else "save-update, merge", + cascade=( + "all, delete-orphan" + if cascade_deletes + else "save-update, merge" + ), order_by="B.id", ) @@ -1986,9 +1990,11 @@ def _assert_history(self, obj, compare, compare_passive=None): attributes.get_history( obj, attrname, - PassiveFlag.PASSIVE_NO_FETCH - if self.lazy == "write_only" - else PassiveFlag.PASSIVE_OFF, + ( + PassiveFlag.PASSIVE_NO_FETCH + if self.lazy == "write_only" + else PassiveFlag.PASSIVE_OFF + ), ), compare, ) diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index b1b6e86b794..2e762c2d3cb 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -3697,7 +3697,6 @@ def test_joined_across(self): class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL): - """test #2188""" __dialect__ = "default" @@ -3892,7 +3891,6 @@ def test_standalone_negated(self): class LoadOnExistingTest(_fixtures.FixtureTest): - """test that loaders from a base Query fully populate.""" run_inserts = "once" @@ -5309,7 +5307,6 @@ def go(): class CorrelatedSubqueryTest(fixtures.MappedTest): - """tests for #946, #947, #948. The "users" table is joined to "stuff", and the relationship @@ -6633,7 +6630,6 @@ def go(): class SecondaryOptionsTest(fixtures.MappedTest): - """test that the contains_eager() option doesn't bleed into a secondary load.""" diff --git a/test/orm/test_events.py b/test/orm/test_events.py index 56d16dfcd76..02e00fe9479 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -390,9 +390,9 @@ def do_orm_execute(ctx): is_orm_statement=ctx.is_orm_statement, is_relationship_load=ctx.is_relationship_load, is_column_load=ctx.is_column_load, - lazy_loaded_from=ctx.lazy_loaded_from - if ctx.is_select - else None, + lazy_loaded_from=( + ctx.lazy_loaded_from if ctx.is_select else None + ), ) return canary @@ -1545,9 +1545,11 @@ def _combinations(fn): ( lambda session: session, "loaded_as_persistent", - lambda session, instance: instance.unloaded - if instance.__class__.__name__ == "A" - else None, + lambda session, instance: ( + instance.unloaded + if instance.__class__.__name__ == "A" + else None + ), ), argnames="target, event_name, fn", )(fn) @@ -1669,7 +1671,6 @@ class C(B): class DeferredMapperEventsTest(RemoveORMEventsGlobally, _fixtures.FixtureTest): - """ "test event listeners against unmapped classes. This incurs special logic. Note if we ever do the "remove" case, diff --git a/test/orm/test_hasparent.py b/test/orm/test_hasparent.py index 8f61c11970d..72c90b6d5c9 100644 --- a/test/orm/test_hasparent.py +++ b/test/orm/test_hasparent.py @@ -1,4 +1,5 @@ """test the current state of the hasparent() flag.""" + from sqlalchemy import ForeignKey from sqlalchemy import Integer from sqlalchemy import testing diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py index 4ab9617123c..64c86853d27 100644 --- a/test/orm/test_lazy_relations.py +++ b/test/orm/test_lazy_relations.py @@ -993,7 +993,6 @@ def go(): class GetterStateTest(_fixtures.FixtureTest): - """test lazyloader on non-existent attribute returns expected attribute symbols, maintain expected state""" @@ -1080,11 +1079,13 @@ def _u_ad_fixture(self, populate_user, dont_use_get=False): properties={ "user": relationship( User, - primaryjoin=and_( - users.c.id == addresses.c.user_id, users.c.id != 27 - ) - if dont_use_get - else None, + primaryjoin=( + and_( + users.c.id == addresses.c.user_id, users.c.id != 27 + ) + if dont_use_get + else None + ), back_populates="addresses", ) }, diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py index f90803d6e4d..f93c18d2161 100644 --- a/test/orm/test_mapper.py +++ b/test/orm/test_mapper.py @@ -2555,7 +2555,6 @@ class B(OldStyle, NewStyle): class RequirementsTest(fixtures.MappedTest): - """Tests the contract for user classes.""" @classmethod diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py index 0c8e2651cdb..c313c4b33da 100644 --- a/test/orm/test_merge.py +++ b/test/orm/test_merge.py @@ -1476,9 +1476,7 @@ def test_relationship_population_maintained( CountStatements( 0 if load.noload - else 1 - if merge_persistent.merge_persistent - else 2 + else 1 if merge_persistent.merge_persistent else 2 ) ) diff --git a/test/orm/test_options.py b/test/orm/test_options.py index 7c96539583f..9362d52470e 100644 --- a/test/orm/test_options.py +++ b/test/orm/test_options.py @@ -976,9 +976,11 @@ def test_wrong_type_in_option_cls(self, first_element): Keyword = self.classes.Keyword self._assert_eager_with_entity_exception( [Item], - lambda: (joinedload(Keyword),) - if first_element - else (Load(Item).joinedload(Keyword),), + lambda: ( + (joinedload(Keyword),) + if first_element + else (Load(Item).joinedload(Keyword),) + ), "expected ORM mapped attribute for loader " "strategy argument", ) @@ -990,9 +992,11 @@ def test_wrong_type_in_option_any_random_type(self, rando, first_element): Item = self.classes.Item self._assert_eager_with_entity_exception( [Item], - lambda: (joinedload(rando),) - if first_element - else (Load(Item).joinedload(rando)), + lambda: ( + (joinedload(rando),) + if first_element + else (Load(Item).joinedload(rando)) + ), "expected ORM mapped attribute for loader strategy argument", ) @@ -1002,9 +1006,11 @@ def test_wrong_type_in_option_descriptor(self, first_element): self._assert_eager_with_entity_exception( [OrderWProp], - lambda: (joinedload(OrderWProp.some_attr),) - if first_element - else (Load(OrderWProp).joinedload(OrderWProp.some_attr),), + lambda: ( + (joinedload(OrderWProp.some_attr),) + if first_element + else (Load(OrderWProp).joinedload(OrderWProp.some_attr),) + ), "expected ORM mapped attribute for loader strategy argument", ) diff --git a/test/orm/test_relationship_criteria.py b/test/orm/test_relationship_criteria.py index aebdf6922ae..69279f60044 100644 --- a/test/orm/test_relationship_criteria.py +++ b/test/orm/test_relationship_criteria.py @@ -1908,9 +1908,11 @@ def go(value): eq_( result.scalars().unique().all(), - self._user_minus_edwood(*user_address_fixture) - if value == "ed@wood.com" - else self._user_minus_edlala(*user_address_fixture), + ( + self._user_minus_edwood(*user_address_fixture) + if value == "ed@wood.com" + else self._user_minus_edlala(*user_address_fixture) + ), ) asserter.assert_( @@ -1976,9 +1978,11 @@ def go(value): eq_( result.scalars().unique().all(), - self._user_minus_edwood(*user_address_fixture) - if value == "ed@wood.com" - else self._user_minus_edlala(*user_address_fixture), + ( + self._user_minus_edwood(*user_address_fixture) + if value == "ed@wood.com" + else self._user_minus_edlala(*user_address_fixture) + ), ) asserter.assert_( @@ -2033,9 +2037,11 @@ def go(value): eq_( result.scalars().unique().all(), - self._user_minus_edwood(*user_address_fixture) - if value == "ed@wood.com" - else self._user_minus_edlala(*user_address_fixture), + ( + self._user_minus_edwood(*user_address_fixture) + if value == "ed@wood.com" + else self._user_minus_edlala(*user_address_fixture) + ), ) asserter.assert_( @@ -2129,9 +2135,11 @@ def go(value): eq_( result, - self._user_minus_edwood(*user_address_fixture) - if value == "ed@wood.com" - else self._user_minus_edlala(*user_address_fixture), + ( + self._user_minus_edwood(*user_address_fixture) + if value == "ed@wood.com" + else self._user_minus_edlala(*user_address_fixture) + ), ) @testing.combinations((True,), (False,), argnames="use_compiled_cache") @@ -2237,9 +2245,11 @@ def go(value): eq_( result.scalars().unique().all(), - self._user_minus_edwood(*user_address_fixture) - if value == "ed@wood.com" - else self._user_minus_edlala(*user_address_fixture), + ( + self._user_minus_edwood(*user_address_fixture) + if value == "ed@wood.com" + else self._user_minus_edlala(*user_address_fixture) + ), ) asserter.assert_( @@ -2309,9 +2319,11 @@ def go(value): eq_( result.scalars().unique().all(), - self._user_minus_edwood(*user_address_fixture) - if value == "ed@wood.com" - else self._user_minus_edlala(*user_address_fixture), + ( + self._user_minus_edwood(*user_address_fixture) + if value == "ed@wood.com" + else self._user_minus_edlala(*user_address_fixture) + ), ) asserter.assert_( diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index 969196ad8ca..c42ec112ebc 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -183,7 +183,6 @@ def _assert_raises_no_local_remote(self, fn, relname, *arg, **kw): class DependencyTwoParentTest(fixtures.MappedTest): - """Test flush() when a mapper is dependent on multiple relationships""" run_setup_mappers = "once" @@ -430,7 +429,6 @@ def test_collection_relationship_overrides_fk(self): class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): - """Tests the ultimate join condition, a single column that points to itself, e.g. within a SQL function or similar. The test is against a materialized path setup. @@ -1022,7 +1020,6 @@ def test_works_two(self): class CompositeSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): - """Tests a composite FK where, in the relationship(), one col points to itself in the same table. @@ -1506,7 +1503,6 @@ def test_joins_fully(self): class SynonymsAsFKsTest(fixtures.MappedTest): - """Syncrules on foreign keys that are also primary""" @classmethod @@ -1578,7 +1574,6 @@ def test_synonym_fk(self): class FKsAsPksTest(fixtures.MappedTest): - """Syncrules on foreign keys that are also primary""" @classmethod @@ -1863,7 +1858,6 @@ def test_delete_manual_BtoA(self): class UniqueColReferenceSwitchTest(fixtures.MappedTest): - """test a relationship based on a primary join against a unique non-pk column""" @@ -1928,7 +1922,6 @@ def test_switch_parent(self): class RelationshipToSelectableTest(fixtures.MappedTest): - """Test a map to a select that relates to a map to the table.""" @classmethod @@ -2022,7 +2015,6 @@ class LineItem(BasicEntity): class FKEquatedToConstantTest(fixtures.MappedTest): - """test a relationship with a non-column entity in the primary join, is not viewonly, and also has the non-column's clause mentioned in the foreign keys list. @@ -2159,7 +2151,6 @@ def test_backref(self): class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): - """test ambiguous joins due to FKs on both sides treated as self-referential. @@ -2254,7 +2245,6 @@ def test_mapping(self): class ManualBackrefTest(_fixtures.FixtureTest): - """Test explicit relationships that are backrefs to each other.""" run_inserts = None @@ -2393,7 +2383,6 @@ def test_back_propagates_not_relationship(self): class NoLoadBackPopulates(_fixtures.FixtureTest): - """test the noload stratgegy which unlike others doesn't use lazyloader to set up instrumentation""" @@ -2640,7 +2629,6 @@ def teardown_test(self): class TypeMatchTest(fixtures.MappedTest): - """test errors raised when trying to add items whose type is not handled by a relationship""" @@ -2908,7 +2896,6 @@ class T2(BasicEntity): class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL): - """test op() in conjunction with join conditions""" run_create_tables = run_deletes = None @@ -3186,7 +3173,6 @@ class B(ComparableEntity): class ViewOnlyOverlappingNames(fixtures.MappedTest): - """'viewonly' mappings with overlapping PK column names.""" @classmethod @@ -3442,7 +3428,6 @@ def rel(): class ViewOnlyUniqueNames(fixtures.MappedTest): - """'viewonly' mappings with unique PK column names.""" @classmethod @@ -3544,7 +3529,6 @@ class C3(BasicEntity): class ViewOnlyLocalRemoteM2M(fixtures.TestBase): - """test that local-remote is correctly determined for m2m""" def test_local_remote(self, registry): @@ -3583,7 +3567,6 @@ class B: class ViewOnlyNonEquijoin(fixtures.MappedTest): - """'viewonly' mappings based on non-equijoins.""" @classmethod @@ -3645,7 +3628,6 @@ class Bar(ComparableEntity): class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest): - """'viewonly' mappings that contain the same 'remote' column twice""" @classmethod @@ -3719,7 +3701,6 @@ class Bar(ComparableEntity): class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest): - """'viewonly' mappings that contain the same 'local' column twice""" @classmethod @@ -3794,7 +3775,6 @@ class Bar(ComparableEntity): class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest): - """'viewonly' mappings with a complex join condition.""" @classmethod @@ -3996,7 +3976,6 @@ def go(): class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest): - """test a complex annotation using between(). Using declarative here as an integration test for the local() @@ -4613,7 +4592,6 @@ class B(Base): class SecondaryNestedJoinTest( fixtures.MappedTest, AssertsCompiledSQL, testing.AssertsExecutionResults ): - """test support for a relationship where the 'secondary' table is a compound join(). @@ -6381,7 +6359,6 @@ def go(): class RelationDeprecationTest(fixtures.MappedTest): - """test usage of the old 'relation' function.""" run_inserts = "once" diff --git a/test/orm/test_selectable.py b/test/orm/test_selectable.py index 3a7029110e4..d4ea0e29195 100644 --- a/test/orm/test_selectable.py +++ b/test/orm/test_selectable.py @@ -1,4 +1,5 @@ """Generic mapping to Select statements""" + import sqlalchemy as sa from sqlalchemy import column from sqlalchemy import Integer diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py index d6f22622ea6..e502a888330 100644 --- a/test/orm/test_transaction.py +++ b/test/orm/test_transaction.py @@ -1285,7 +1285,6 @@ def test_concurrent_commit_persistent(self): class CleanSavepointTest(FixtureTest): - """test the behavior for [ticket:2452] - rollback on begin_nested() only expires objects tracked as being modified in that transaction. @@ -2625,12 +2624,14 @@ class A: self.session = Session( self.connection, - join_transaction_mode="create_savepoint" - if ( - self.join_mode.create_savepoint - or self.join_mode.create_savepoint_w_savepoint - ) - else "conditional_savepoint", + join_transaction_mode=( + "create_savepoint" + if ( + self.join_mode.create_savepoint + or self.join_mode.create_savepoint_w_savepoint + ) + else "conditional_savepoint" + ), ) def teardown_session(self): diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index 0937c354f98..3b3175e10ec 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -1149,9 +1149,9 @@ def test_insert(self, eager_defaults): mp = self.mapper_registry.map_imperatively( Hoho, default_t, - eager_defaults="auto" - if eager_defaults.auto - else bool(eager_defaults), + eager_defaults=( + "auto" if eager_defaults.auto else bool(eager_defaults) + ), ) h1 = Hoho(hoho=althohoval) diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py index 1a5b697b8ef..e01220d1150 100644 --- a/test/orm/test_unitofworkv2.py +++ b/test/orm/test_unitofworkv2.py @@ -2171,7 +2171,6 @@ class T(ComparableEntity): class LoadersUsingCommittedTest(UOWTest): - """Test that events which occur within a flush() get the same attribute loading behavior as on the outside of the flush, and that the unit of work itself uses the @@ -2260,7 +2259,6 @@ def _test_before_update_o2m(self, passive_updates): Address, User = self.classes.Address, self.classes.User class AvoidReferencialError(Exception): - """the test here would require ON UPDATE CASCADE on FKs for the flush to fully succeed; this exception is used to cancel the flush before we get that far. diff --git a/test/perf/many_table_reflection.py b/test/perf/many_table_reflection.py index d65c272430a..4fa768a74e2 100644 --- a/test/perf/many_table_reflection.py +++ b/test/perf/many_table_reflection.py @@ -41,9 +41,9 @@ def generate_table(meta: sa.MetaData, min_cols, max_cols, dialect_name): f"table_{table_num}_col_{i + 1}", *args, primary_key=i == 0, - comment=f"primary key of table_{table_num}" - if i == 0 - else None, + comment=( + f"primary key of table_{table_num}" if i == 0 else None + ), index=random.random() > 0.97 and i > 0, unique=random.random() > 0.97 and i > 0, ) diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index d6bc098964c..5756bb6927c 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -7525,7 +7525,6 @@ def test_val_and_null(self): class ResultMapTest(fixtures.TestBase): - """test the behavior of the 'entry stack' and the determination when the result_map needs to be populated. @@ -7740,9 +7739,9 @@ def test_select_wraps_for_translate_ambiguity(self): with mock.patch.object( dialect.statement_compiler, "translate_select_structure", - lambda self, to_translate, **kw: wrapped_again - if to_translate is stmt - else to_translate, + lambda self, to_translate, **kw: ( + wrapped_again if to_translate is stmt else to_translate + ), ): compiled = stmt.compile(dialect=dialect) @@ -7799,9 +7798,9 @@ def test_select_wraps_for_translate_ambiguity_dupe_cols(self): with mock.patch.object( dialect.statement_compiler, "translate_select_structure", - lambda self, to_translate, **kw: wrapped_again - if to_translate is stmt - else to_translate, + lambda self, to_translate, **kw: ( + wrapped_again if to_translate is stmt else to_translate + ), ): compiled = stmt.compile(dialect=dialect) diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index 23ac87a2148..0b665b84da6 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -613,7 +613,7 @@ def test_order_by_group_by_label_w_scalar_subquery( stmt, "WITH anon_1 AS (SELECT test.a AS b FROM test %s b) " "SELECT (SELECT anon_1.b FROM anon_1) AS c" - % ("ORDER BY" if order_by == "order_by" else "GROUP BY") + % ("ORDER BY" if order_by == "order_by" else "GROUP BY"), # prior to the fix, the use_object version came out as: # "WITH anon_1 AS (SELECT test.a AS b FROM test " # "ORDER BY test.a) " diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py index bbfb3b07782..bcfdfcdb9c9 100644 --- a/test/sql/test_defaults.py +++ b/test/sql/test_defaults.py @@ -1234,7 +1234,6 @@ def test_col_w_nonoptional_sequence_non_autoinc_no_firing( class SpecialTypePKTest(fixtures.TestBase): - """test process_result_value in conjunction with primary key columns. Also tests that "autoincrement" checks are against diff --git a/test/sql/test_external_traversal.py b/test/sql/test_external_traversal.py index e474e75d756..0204d6e6fcb 100644 --- a/test/sql/test_external_traversal.py +++ b/test/sql/test_external_traversal.py @@ -54,7 +54,6 @@ class TraversalTest( fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL ): - """test ClauseVisitor's traversal, particularly its ability to copy and modify a ClauseElement in place.""" @@ -362,7 +361,6 @@ class CustomObj(Column): class BinaryEndpointTraversalTest(fixtures.TestBase): - """test the special binary product visit""" def _assert_traversal(self, expr, expected): @@ -443,7 +441,6 @@ def test_subquery(self): class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): - """test copy-in-place behavior of various ClauseElements.""" __dialect__ = "default" @@ -2716,7 +2713,6 @@ def test_splice_2(self): class SelectTest(fixtures.TestBase, AssertsCompiledSQL): - """tests the generative capability of Select""" __dialect__ = "default" @@ -2811,7 +2807,6 @@ def _NOTYET_test_execution_options_in_text(self): class ValuesBaseTest(fixtures.TestBase, AssertsCompiledSQL): - """Tests the generative capability of Insert, Update""" __dialect__ = "default" diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py index e9eda0e5bd2..4c6c5407b5a 100644 --- a/test/sql/test_insert_exec.py +++ b/test/sql/test_insert_exec.py @@ -472,7 +472,6 @@ def test_no_inserted_pk_on_returning( class TableInsertTest(fixtures.TablesTest): - """test for consistent insert behavior across dialects regarding the inline() method, values() method, lower-case 't' tables. @@ -1766,9 +1765,11 @@ def test_no_sentinel_on_non_int_ss_function( Column( "id", Uuid(), - server_default=func.gen_random_uuid() - if default_type.server_side - else None, + server_default=( + func.gen_random_uuid() + if default_type.server_side + else None + ), default=uuid.uuid4 if default_type.client_side else None, primary_key=True, insert_sentinel=bool(add_insert_sentinel), diff --git a/test/sql/test_lambdas.py b/test/sql/test_lambdas.py index eed861fe17b..627310d8f17 100644 --- a/test/sql/test_lambdas.py +++ b/test/sql/test_lambdas.py @@ -413,9 +413,11 @@ def run_my_statement(parameter, add_criteria=False): stmt = lambda_stmt(lambda: select(tab)) stmt = stmt.add_criteria( - lambda s: s.where(tab.c.col > parameter) - if add_criteria - else s.where(tab.c.col == parameter), + lambda s: ( + s.where(tab.c.col > parameter) + if add_criteria + else s.where(tab.c.col == parameter) + ), ) stmt += lambda s: s.order_by(tab.c.id) @@ -437,9 +439,11 @@ def run_my_statement(parameter, add_criteria=False): stmt = lambda_stmt(lambda: select(tab)) stmt = stmt.add_criteria( - lambda s: s.where(tab.c.col > parameter) - if add_criteria - else s.where(tab.c.col == parameter), + lambda s: ( + s.where(tab.c.col > parameter) + if add_criteria + else s.where(tab.c.col == parameter) + ), track_on=[add_criteria], ) @@ -1945,9 +1949,9 @@ def test_detect_change_in_binds_tracking_negative(self): # lambda produces either "t1 IN vv" or "t2 IN qq" based on the # argument. will not produce a consistent cache key elem = lambdas.DeferredLambdaElement( - lambda tab: tab.c.q.in_(vv) - if tab.name == "t1" - else tab.c.q.in_(qq), + lambda tab: ( + tab.c.q.in_(vv) if tab.name == "t1" else tab.c.q.in_(qq) + ), roles.WhereHavingRole, lambda_args=(t1,), opts=lambdas.LambdaOptions(track_closure_variables=False), diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index aa3cec3dad3..3592bc6f006 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -4146,7 +4146,6 @@ def test_pickle_ck_binary_annotated_col(self, no_pickle_annotated): class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase): - """Test Column() construction.""" __dialect__ = "default" @@ -4562,7 +4561,6 @@ def test_dont_merge_column( class ColumnDefaultsTest(fixtures.TestBase): - """test assignment of default fixures to columns""" def _fixture(self, *arg, **kw): @@ -5792,9 +5790,11 @@ def test_fk_ref_local_referent_has_no_type(self, col_has_type): "b", metadata, Column("id", Integer, primary_key=True), - Column("aid", ForeignKey("a.id")) - if not col_has_type - else Column("aid", Integer, ForeignKey("a.id")), + ( + Column("aid", ForeignKey("a.id")) + if not col_has_type + else Column("aid", Integer, ForeignKey("a.id")) + ), ) fks = list( c for c in b.constraints if isinstance(c, ForeignKeyConstraint) diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index 640e70a0a65..c0b5cb47d66 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -483,19 +483,24 @@ def test_associatives(self, op, reverse, negate): if negate: self.assert_compile( select(~expr), - f"SELECT NOT (t.q{opstring}t.p{opstring}{exprs}) " - "AS anon_1 FROM t" - if not reverse - else f"SELECT NOT ({exprs}{opstring}t.q{opstring}t.p) " - "AS anon_1 FROM t", + ( + f"SELECT NOT (t.q{opstring}t.p{opstring}{exprs}) " + "AS anon_1 FROM t" + if not reverse + else f"SELECT NOT ({exprs}{opstring}t.q{opstring}t.p) " + "AS anon_1 FROM t" + ), ) else: self.assert_compile( select(expr), - f"SELECT t.q{opstring}t.p{opstring}{exprs} AS anon_1 FROM t" - if not reverse - else f"SELECT {exprs}{opstring}t.q{opstring}t.p " - f"AS anon_1 FROM t", + ( + f"SELECT t.q{opstring}t.p{opstring}{exprs} " + "AS anon_1 FROM t" + if not reverse + else f"SELECT {exprs}{opstring}t.q{opstring}t.p " + "AS anon_1 FROM t" + ), ) @testing.combinations( @@ -565,9 +570,11 @@ def test_non_associatives(self, op, reverse, negate): self.assert_compile( select(~expr), - f"SELECT {str_expr} AS anon_1 FROM t" - if not reverse - else f"SELECT {str_expr} AS anon_1 FROM t", + ( + f"SELECT {str_expr} AS anon_1 FROM t" + if not reverse + else f"SELECT {str_expr} AS anon_1 FROM t" + ), ) else: if reverse: @@ -583,9 +590,11 @@ def test_non_associatives(self, op, reverse, negate): self.assert_compile( select(expr), - f"SELECT {str_expr} AS anon_1 FROM t" - if not reverse - else f"SELECT {str_expr} AS anon_1 FROM t", + ( + f"SELECT {str_expr} AS anon_1 FROM t" + if not reverse + else f"SELECT {str_expr} AS anon_1 FROM t" + ), ) @@ -650,9 +659,11 @@ def test_modulus(self, modulus, paramstyle): col = column("somecol", modulus()) self.assert_compile( col.modulus(), - "somecol %%" - if paramstyle in ("format", "pyformat") - else "somecol %", + ( + "somecol %%" + if paramstyle in ("format", "pyformat") + else "somecol %" + ), dialect=default.DefaultDialect(paramstyle=paramstyle), ) @@ -667,9 +678,11 @@ def test_modulus_prefix(self, modulus, paramstyle): col = column("somecol", modulus()) self.assert_compile( col.modulus_prefix(), - "%% somecol" - if paramstyle in ("format", "pyformat") - else "% somecol", + ( + "%% somecol" + if paramstyle in ("format", "pyformat") + else "% somecol" + ), dialect=default.DefaultDialect(paramstyle=paramstyle), ) @@ -1272,7 +1285,6 @@ def _adapt_expression(self, op, other_comparator): class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL): - """test standalone booleans being wrapped in an AsBoolean, as well as true/false compilation.""" @@ -1433,7 +1445,6 @@ def test_twelve(self): class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL): - """test interaction of and_()/or_() with boolean , null constants""" __dialect__ = default.DefaultDialect(supports_native_boolean=True) diff --git a/test/sql/test_query.py b/test/sql/test_query.py index 54943897e11..5d7788fcf1c 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -1076,7 +1076,6 @@ def test_select_distinct_limit_offset(self, connection): class CompoundTest(fixtures.TablesTest): - """test compound statements like UNION, INTERSECT, particularly their ability to nest on different databases.""" @@ -1463,7 +1462,6 @@ def test_composite_alias(self, connection): class JoinTest(fixtures.TablesTest): - """Tests join execution. The compiled SQL emitted by the dialect might be ANSI joins or diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py index 08c9c4207ef..51382b19b4a 100644 --- a/test/sql/test_quote.py +++ b/test/sql/test_quote.py @@ -858,7 +858,6 @@ def test_quote_flag_propagate_anon_label(self): class PreparerTest(fixtures.TestBase): - """Test the db-agnostic quoting services of IdentifierPreparer.""" def test_unformat(self): diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py index a5d1befa206..be1f57121b5 100644 --- a/test/sql/test_resultset.py +++ b/test/sql/test_resultset.py @@ -1301,11 +1301,15 @@ def test_label_against_star( stmt = select( *[ - text("*") - if colname == "*" - else users.c.user_name.label("name_label") - if colname == "name_label" - else users.c[colname] + ( + text("*") + if colname == "*" + else ( + users.c.user_name.label("name_label") + if colname == "name_label" + else users.c[colname] + ) + ) for colname in cols ] ) diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py index 4d55c435db1..6cccd01d4a9 100644 --- a/test/sql/test_returning.py +++ b/test/sql/test_returning.py @@ -690,7 +690,6 @@ def test_insert(self, connection): class KeyReturningTest(fixtures.TablesTest, AssertsExecutionResults): - """test returning() works with columns that define 'key'.""" __requires__ = ("insert_returning",) @@ -1561,9 +1560,11 @@ def test_upsert_data_w_defaults(self, connection, update_cols): config, t1, (t1.c.id, t1.c.insdef, t1.c.data), - set_lambda=(lambda excluded: {"data": excluded.data + " excluded"}) - if update_cols - else None, + set_lambda=( + (lambda excluded: {"data": excluded.data + " excluded"}) + if update_cols + else None + ), ) upserted_rows = connection.execute( diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index d3b7b47841f..0c0c23b8700 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -1,4 +1,5 @@ """Test various algorithmic properties of selectables.""" + from itertools import zip_longest from sqlalchemy import and_ @@ -1962,7 +1963,6 @@ def test_fk_join(self): class AnonLabelTest(fixtures.TestBase): - """Test behaviors fixed by [ticket:2168].""" def test_anon_labels_named_column(self): diff --git a/test/sql/test_text.py b/test/sql/test_text.py index de40c8f4298..301ad9ffdf8 100644 --- a/test/sql/test_text.py +++ b/test/sql/test_text.py @@ -71,7 +71,6 @@ def test_text_adds_to_result_map(self): class SelectCompositionTest(fixtures.TestBase, AssertsCompiledSQL): - """test the usage of text() implicit within the select() construct when strings are passed.""" diff --git a/test/sql/test_types.py b/test/sql/test_types.py index eb91d9c4cdf..76249f56174 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -1417,9 +1417,11 @@ def col_to_bind(col): # on the way in here eq_( conn.execute(new_stmt).fetchall(), - [("x", "BIND_INxBIND_OUT")] - if coerce_fn is type_coerce - else [("x", "xBIND_OUT")], + ( + [("x", "BIND_INxBIND_OUT")] + if coerce_fn is type_coerce + else [("x", "xBIND_OUT")] + ), ) def test_cast_bind(self, connection): @@ -1441,9 +1443,11 @@ def _test_bind(self, coerce_fn, conn): eq_( conn.execute(stmt).fetchall(), - [("x", "BIND_INxBIND_OUT")] - if coerce_fn is type_coerce - else [("x", "xBIND_OUT")], + ( + [("x", "BIND_INxBIND_OUT")] + if coerce_fn is type_coerce + else [("x", "xBIND_OUT")] + ), ) def test_cast_existing_typed(self, connection): @@ -3876,7 +3880,6 @@ def get_col_spec(self, **kw): class NumericRawSQLTest(fixtures.TestBase): - """Test what DBAPIs and dialects return without any typing information supplied at the SQLA level. @@ -4007,7 +4010,6 @@ def test_integer_literal_processor(self): class BooleanTest( fixtures.TablesTest, AssertsExecutionResults, AssertsCompiledSQL ): - """test edge cases for booleans. Note that the main boolean test suite is now in testing/suite/test_types.py diff --git a/test/typing/plain_files/ext/asyncio/async_sessionmaker.py b/test/typing/plain_files/ext/asyncio/async_sessionmaker.py index 664ff0411df..d9997141a10 100644 --- a/test/typing/plain_files/ext/asyncio/async_sessionmaker.py +++ b/test/typing/plain_files/ext/asyncio/async_sessionmaker.py @@ -2,6 +2,7 @@ for asynchronous ORM use. """ + from __future__ import annotations import asyncio diff --git a/test/typing/plain_files/orm/issue_9340.py b/test/typing/plain_files/orm/issue_9340.py index 72dc72df1ec..a4fe8c08831 100644 --- a/test/typing/plain_files/orm/issue_9340.py +++ b/test/typing/plain_files/orm/issue_9340.py @@ -10,8 +10,7 @@ from sqlalchemy.orm import with_polymorphic -class Base(DeclarativeBase): - ... +class Base(DeclarativeBase): ... class Message(Base): diff --git a/test/typing/plain_files/orm/mapped_covariant.py b/test/typing/plain_files/orm/mapped_covariant.py index 1a17ee3848b..9f964021b31 100644 --- a/test/typing/plain_files/orm/mapped_covariant.py +++ b/test/typing/plain_files/orm/mapped_covariant.py @@ -24,8 +24,7 @@ class ChildProtocol(Protocol): # Read-only for simplicity, mutable protocol members are complicated, # see https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected @property - def parent(self) -> Mapped[ParentProtocol]: - ... + def parent(self) -> Mapped[ParentProtocol]: ... def get_parent_name(child: ChildProtocol) -> str: diff --git a/test/typing/plain_files/orm/relationship.py b/test/typing/plain_files/orm/relationship.py index d0ab35249d1..6bfe19cc4e8 100644 --- a/test/typing/plain_files/orm/relationship.py +++ b/test/typing/plain_files/orm/relationship.py @@ -1,6 +1,7 @@ """this suite experiments with other kinds of relationship syntaxes. """ + from __future__ import annotations import typing diff --git a/test/typing/plain_files/orm/trad_relationship_uselist.py b/test/typing/plain_files/orm/trad_relationship_uselist.py index 8d7d7e71a2e..9282181f01b 100644 --- a/test/typing/plain_files/orm/trad_relationship_uselist.py +++ b/test/typing/plain_files/orm/trad_relationship_uselist.py @@ -2,6 +2,7 @@ """ + import typing from typing import cast from typing import Dict diff --git a/test/typing/plain_files/orm/traditional_relationship.py b/test/typing/plain_files/orm/traditional_relationship.py index 02afc7c8012..bd6bada528c 100644 --- a/test/typing/plain_files/orm/traditional_relationship.py +++ b/test/typing/plain_files/orm/traditional_relationship.py @@ -5,6 +5,7 @@ if no uselists are present. """ + import typing from typing import List from typing import Set diff --git a/test/typing/plain_files/sql/common_sql_element.py b/test/typing/plain_files/sql/common_sql_element.py index 57aae8fac81..bc9faca96b4 100644 --- a/test/typing/plain_files/sql/common_sql_element.py +++ b/test/typing/plain_files/sql/common_sql_element.py @@ -6,7 +6,6 @@ """ - from __future__ import annotations from sqlalchemy import asc diff --git a/tox.ini b/tox.ini index 7919ef338dd..cd91a51a7ea 100644 --- a/tox.ini +++ b/tox.ini @@ -227,7 +227,7 @@ deps= # in case it requires a version pin pydocstyle pygments - black==23.3.0 + black==24.1.1 slotscheck>=0.17.0 # required by generate_tuple_map_overloads From 33393ab4219ca72f0a7b586c44855a6e2321b130 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 5 Feb 2024 16:06:28 -0500 Subject: [PATCH 091/544] run postfetch_post_update for version_id_col even if delete Fixed issue where using :meth:`_orm.Session.delete` along with the :paramref:`_orm.Mapper.version_id_col` feature would fail to use the correct version identifier in the case that an additional UPDATE were emitted against the target object as a result of the use of :paramref:`_orm.relationship.post_update` on the object. The issue is similar to :ticket:`10800` just fixed in version 2.0.25 for the case of updates alone. Fixes: #10967 Change-Id: I959e9a2cc3e750e86e8de7b12b28ee1e819ed6d8 (cherry picked from commit 367e0e27a2e6930c66f2f98fbe477f9b1f06e2ca) --- doc/build/changelog/unreleased_20/10967.rst | 11 ++++++ lib/sqlalchemy/orm/persistence.py | 25 ++++++++---- test/orm/test_versioning.py | 44 +++++++++++++++++++-- 3 files changed, 69 insertions(+), 11 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10967.rst diff --git a/doc/build/changelog/unreleased_20/10967.rst b/doc/build/changelog/unreleased_20/10967.rst new file mode 100644 index 00000000000..b0ed4d1bc06 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10967.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, orm + :tickets: 10967 + + Fixed issue where using :meth:`_orm.Session.delete` along with the + :paramref:`_orm.Mapper.version_id_col` feature would fail to use the + correct version identifier in the case that an additional UPDATE were + emitted against the target object as a result of the use of + :paramref:`_orm.relationship.post_update` on the object. The issue is + similar to :ticket:`10800` just fixed in version 2.0.25 for the case of + updates alone. diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 0c2529d5d13..a455957c3f1 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -1570,16 +1570,25 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, states): def _postfetch_post_update( mapper, uowtransaction, table, state, dict_, result, params ): - if uowtransaction.is_deleted(state): - return - - prefetch_cols = result.context.compiled.prefetch - postfetch_cols = result.context.compiled.postfetch - - if ( + needs_version_id = ( mapper.version_id_col is not None and mapper.version_id_col in mapper._cols_by_table[table] - ): + ) + + if not uowtransaction.is_deleted(state): + # post updating after a regular INSERT or UPDATE, do a full postfetch + prefetch_cols = result.context.compiled.prefetch + postfetch_cols = result.context.compiled.postfetch + elif needs_version_id: + # post updating before a DELETE with a version_id_col, need to + # postfetch just version_id_col + prefetch_cols = postfetch_cols = () + else: + # post updating before a DELETE without a version_id_col, + # don't need to postfetch + return + + if needs_version_id: prefetch_cols = list(prefetch_cols) + [mapper.version_id_col] refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush) diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py index a0325059a81..1cf3140a56c 100644 --- a/test/orm/test_versioning.py +++ b/test/orm/test_versioning.py @@ -2032,8 +2032,6 @@ def test_round_trip(self, fixture_session): class PostUpdateVersioningTest(fixtures.DeclarativeMappedTest): - """test for #10800""" - @classmethod def setup_classes(cls): Base = cls.DeclarativeBasic @@ -2063,7 +2061,8 @@ class Parent(Base): "version_id_col": version_id, } - def test_bumped_version_id(self): + def test_bumped_version_id_on_update(self): + """test for #10800""" User, Parent = self.classes("User", "Parent") session = fixture_session() @@ -2115,3 +2114,42 @@ def test_bumped_version_id(self): ], ), ) + + def test_bumped_version_id_on_delete(self): + """test for #10967""" + + User, Parent = self.classes("User", "Parent") + + session = fixture_session() + u1 = User(id=1) + p1 = Parent(id=1, updated_by=u1) + session.add(u1) + session.add(p1) + + session.flush() + + session.delete(p1) + + with self.sql_execution_asserter(testing.db) as asserter: + session.commit() + + asserter.assert_( + CompiledSQL( + "UPDATE parent SET version_id=:version_id, " + "updated_by_id=:updated_by_id WHERE parent.id = :parent_id " + "AND parent.version_id = :parent_version_id", + [ + { + "version_id": 2, + "updated_by_id": None, + "parent_id": 1, + "parent_version_id": 1, + } + ], + ), + CompiledSQL( + "DELETE FROM parent WHERE parent.id = :id AND " + "parent.version_id = :version_id", + [{"id": 1, "version_id": 2}], + ), + ) From e766aa473f983dfbf926246ec14265220aa97103 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 5 Feb 2024 12:02:19 -0500 Subject: [PATCH 092/544] add additional IMV UUID tests, fix pymssql case Fixed an issue regarding the use of the :class:`.Uuid` datatype with the :paramref:`.Uuid.as_uuid` parameter set to False, when using the pymssql dialect. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" feature) would not correctly align primary key UUID values for bulk INSERT statements, resulting in errors. This change also adds a small degree of generalization to the Uuid datatype by adding the native/non-native compilation conditional to the base compiler. Patch is originally part of Ib920871102b9b64f2cba9697f5cb72b6263e4ed8 which is implementing native UUID for mariadb in 2.1 only. Change-Id: I96cbec5c0ece312b345206aa5a5db2ffcf732d41 (cherry picked from commit 9b1c9d5d2e2f9a1e83cf80ca5cd834de213e59ea) --- .../unreleased_20/uuid_imv_fixes.rst | 20 ++++++++ lib/sqlalchemy/dialects/mssql/base.py | 50 ++++++++----------- lib/sqlalchemy/sql/compiler.py | 9 +++- lib/sqlalchemy/sql/sqltypes.py | 25 ++++++++++ lib/sqlalchemy/testing/requirements.py | 5 +- lib/sqlalchemy/testing/suite/test_insert.py | 6 +++ test/sql/test_insert_exec.py | 18 ++++++- 7 files changed, 98 insertions(+), 35 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/uuid_imv_fixes.rst diff --git a/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst b/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst new file mode 100644 index 00000000000..79aa132b21e --- /dev/null +++ b/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst @@ -0,0 +1,20 @@ +.. change:: + :tags: bug, mssql + + Fixed an issue regarding the use of the :class:`.Uuid` datatype with the + :paramref:`.Uuid.as_uuid` parameter set to False, when using the pymssql + dialect. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" + feature) would not correctly align primary key UUID values for bulk INSERT + statements, resulting in errors. Similar issues were fixed for the + PostgreSQL drivers as well. + + +.. change:: + :tags: bug, postgresql + + Fixed an issue regarding the use of the :class:`.Uuid` datatype with the + :paramref:`.Uuid.as_uuid` parameter set to False, when using the pymssql + dialect. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" + feature) would not correctly align primary key UUID values for bulk INSERT + statements, resulting in errors. Similar issues were fixed for the + pymssql driver as well. diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index e015dccdc99..83327899fa9 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1426,7 +1426,6 @@ class ROWVERSION(TIMESTAMP): class NTEXT(sqltypes.UnicodeText): - """MSSQL NTEXT type, for variable-length unicode text up to 2^30 characters.""" @@ -1557,36 +1556,26 @@ def process(value): return process def _sentinel_value_resolver(self, dialect): - """Return a callable that will receive the uuid object or string - as it is normally passed to the DB in the parameter set, after - bind_processor() is called. Convert this value to match - what it would be as coming back from an INSERT..OUTPUT inserted. + if not self.native_uuid: + # dealing entirely with strings going in and out of + # CHAR(32) + return None - for the UUID type, there are four varieties of settings so here - we seek to convert to the string or UUID representation that comes - back from the driver. - - """ - character_based_uuid = ( - not dialect.supports_native_uuid or not self.native_uuid - ) + # true if we expect the returned UUID values to be strings + # pymssql sends UUID objects back, pyodbc sends strings, + # however pyodbc converts them to uppercase coming back, so + # need special logic here + character_based_uuid = not dialect.supports_native_uuid if character_based_uuid: - if self.native_uuid: - # for pyodbc, uuid.uuid() objects are accepted for incoming - # data, as well as strings. but the driver will always return - # uppercase strings in result sets. - def process(value): - return str(value).upper() - - else: - - def process(value): - return str(value) + # we sent UUID objects in all cases, see bind_processor() + def process(uuid_value): + return str(uuid_value).upper() return process + elif not self.as_uuid: + return _python_UUID else: - # for pymssql, we get uuid.uuid() objects back. return None @@ -2483,10 +2472,12 @@ def _render_json_extract_from_binary(self, binary, operator, **kw): type_expression = "ELSE CAST(JSON_VALUE(%s, %s) AS %s)" % ( self.process(binary.left, **kw), self.process(binary.right, **kw), - "FLOAT" - if isinstance(binary.type, sqltypes.Float) - else "NUMERIC(%s, %s)" - % (binary.type.precision, binary.type.scale), + ( + "FLOAT" + if isinstance(binary.type, sqltypes.Float) + else "NUMERIC(%s, %s)" + % (binary.type.precision, binary.type.scale) + ), ) elif binary.type._type_affinity is sqltypes.Boolean: # the NULL handling is particularly weird with boolean, so @@ -2522,7 +2513,6 @@ def visit_sequence(self, seq, **kw): class MSSQLStrictCompiler(MSSQLCompiler): - """A subclass of MSSQLCompiler which disables the usage of bind parameters where not allowed natively by MS-SQL. diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 6c82bab8316..e148ff60522 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -5747,7 +5747,6 @@ def visit_insert( returning_cols = self.implicit_returning or insert_stmt._returning if returning_cols: add_sentinel_cols = crud_params_struct.use_sentinel_columns - if add_sentinel_cols is not None: assert use_insertmanyvalues @@ -7052,6 +7051,9 @@ def visit_NVARCHAR(self, type_, **kw): def visit_TEXT(self, type_, **kw): return self._render_string_type(type_, "TEXT") + def visit_UUID(self, type_, **kw): + return "UUID" + def visit_BLOB(self, type_, **kw): return "BLOB" @@ -7065,7 +7067,10 @@ def visit_BOOLEAN(self, type_, **kw): return "BOOLEAN" def visit_uuid(self, type_, **kw): - return self._render_string_type(type_, "CHAR", length_override=32) + if not type_.native_uuid or not self.dialect.supports_native_uuid: + return self._render_string_type(type_, "CHAR", length_override=32) + else: + return self.visit_UUID(type_, **kw) def visit_large_binary(self, type_, **kw): return self.visit_BLOB(type_, **kw) diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 0963e8ed200..b359fe97bd9 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -3723,6 +3723,31 @@ def process(value): return process + def _sentinel_value_resolver(self, dialect): + """For the "insertmanyvalues" feature only, return a callable that + will receive the uuid object or string + as it is normally passed to the DB in the parameter set, after + bind_processor() is called. Convert this value to match + what it would be as coming back from a RETURNING or similar + statement for the given backend. + + Individual dialects and drivers may need their own implementations + based on how their UUID types send data and how the drivers behave + (e.g. pyodbc) + + """ + if not self.native_uuid or not dialect.supports_native_uuid: + # dealing entirely with strings going in and out of + # CHAR(32) + return None + + elif self.as_uuid: + # we sent UUID objects and we are getting UUID objects back + return None + else: + # we sent strings and we are getting UUID objects back + return _python_UUID + class UUID(Uuid[_UUID_RETURN], type_api.NativeForEmulated): diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 31aac741d48..c5dc52be885 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -62,7 +62,10 @@ def index_ddl_if_exists(self): def uuid_data_type(self): """Return databases that support the UUID datatype.""" - return exclusions.closed() + return exclusions.skip_if( + lambda config: not config.db.dialect.supports_native_uuid, + "backend does not have a UUID datatype", + ) @property def foreign_keys(self): diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index cc30945cab6..09e94733651 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -551,6 +551,12 @@ def test_insert_w_floats( uuid.uuid4(), testing.requires.uuid_data_type, ), + ( + "generic_native_uuid_str", + Uuid(as_uuid=False, native_uuid=True), + str(uuid.uuid4()), + testing.requires.uuid_data_type, + ), ("UUID", UUID(), uuid.uuid4(), testing.requires.uuid_data_type), ( "LargeBinary1", diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py index e9eda0e5bd2..b60c5cfec9a 100644 --- a/test/sql/test_insert_exec.py +++ b/test/sql/test_insert_exec.py @@ -1445,6 +1445,7 @@ def test_invalid_identities( (ARRAY(Integer()), testing.requires.array_type), DateTime(), Uuid(), + Uuid(native_uuid=False), argnames="datatype", ) def test_inserts_w_all_nulls( @@ -1987,6 +1988,8 @@ def test_sentinel_col_configurations( "return_type", ["include_sentinel", "default_only", "return_defaults"] ) @testing.variation("add_sentinel_flag_to_col", [True, False]) + @testing.variation("native_uuid", [True, False]) + @testing.variation("as_uuid", [True, False]) def test_sentinel_on_non_autoinc_primary_key( self, metadata, @@ -1995,8 +1998,13 @@ def test_sentinel_on_non_autoinc_primary_key( sort_by_parameter_order, randomize_returning, add_sentinel_flag_to_col, + native_uuid, + as_uuid, ): uuids = [uuid.uuid4() for i in range(10)] + if not as_uuid: + uuids = [str(u) for u in uuids] + _some_uuids = iter(uuids) t1 = Table( @@ -2004,7 +2012,7 @@ def test_sentinel_on_non_autoinc_primary_key( metadata, Column( "id", - Uuid(), + Uuid(native_uuid=bool(native_uuid), as_uuid=bool(as_uuid)), default=functools.partial(next, _some_uuids), primary_key=True, insert_sentinel=bool(add_sentinel_flag_to_col), @@ -2096,6 +2104,8 @@ def test_sentinel_on_non_autoinc_primary_key( else: return_type.fail() + @testing.variation("native_uuid", [True, False]) + @testing.variation("as_uuid", [True, False]) def test_client_composite_pk( self, metadata, @@ -2103,15 +2113,19 @@ def test_client_composite_pk( randomize_returning, sort_by_parameter_order, warn_for_downgrades, + native_uuid, + as_uuid, ): uuids = [uuid.uuid4() for i in range(10)] + if not as_uuid: + uuids = [str(u) for u in uuids] t1 = Table( "data", metadata, Column( "id1", - Uuid(), + Uuid(as_uuid=bool(as_uuid), native_uuid=bool(native_uuid)), default=functools.partial(next, iter(uuids)), primary_key=True, ), From 153f287b9949462ec29d66fc9b329d0144a6ca7c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 18 Jan 2024 12:47:02 -0500 Subject: [PATCH 093/544] include cls locals in annotation evaluate Fixed issue where it was not possible to use a type (such as an enum) within a :class:`_orm.Mapped` container type if that type were declared locally within the class body. The scope of locals used for the eval now includes that of the class body itself. In addition, the expression within :class:`_orm.Mapped` may also refer to the class name itself, if used as a string or with future annotations mode. Fixes: #10899 Change-Id: Id4d07499558e457e63b483ff44c0972d9265409d (cherry picked from commit e9a05cf88811c4c4ca51b8103539a7727630d2f0) --- doc/build/changelog/unreleased_20/10899.rst | 10 +++ lib/sqlalchemy/util/typing.py | 16 +++- .../declarative/test_tm_future_annotations.py | 83 +++++++++++++++++++ .../test_tm_future_annotations_sync.py | 40 +++++++++ test/orm/declarative/test_typed_mapping.py | 40 +++++++++ 5 files changed, 187 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10899.rst diff --git a/doc/build/changelog/unreleased_20/10899.rst b/doc/build/changelog/unreleased_20/10899.rst new file mode 100644 index 00000000000..692381323ee --- /dev/null +++ b/doc/build/changelog/unreleased_20/10899.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, orm + :tickets: 10899 + + Fixed issue where it was not possible to use a type (such as an enum) + within a :class:`_orm.Mapped` container type if that type were declared + locally within the class body. The scope of locals used for the eval now + includes that of the class body itself. In addition, the expression within + :class:`_orm.Mapped` may also refer to the class name itself, if used as a + string or with future annotations mode. diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index a7d2a340e7d..2d9e2250a8b 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -154,7 +154,7 @@ def de_stringify_annotation( annotation = str_cleanup_fn(annotation, originating_module) annotation = eval_expression( - annotation, originating_module, locals_=locals_ + annotation, originating_module, locals_=locals_, in_class=cls ) if ( @@ -207,6 +207,7 @@ def eval_expression( module_name: str, *, locals_: Optional[Mapping[str, Any]] = None, + in_class: Optional[Type[Any]] = None, ) -> Any: try: base_globals: Dict[str, Any] = sys.modules[module_name].__dict__ @@ -217,7 +218,18 @@ def eval_expression( ) from ke try: - annotation = eval(expression, base_globals, locals_) + if in_class is not None: + cls_namespace = dict(in_class.__dict__) + cls_namespace.setdefault(in_class.__name__, in_class) + + # see #10899. We want the locals/globals to take precedence + # over the class namespace in this context, even though this + # is not the usual way variables would resolve. + cls_namespace.update(base_globals) + + annotation = eval(expression, cls_namespace, locals_) + else: + annotation = eval(expression, base_globals, locals_) except Exception as err: raise NameError( f"Could not de-stringify annotation {expression!r}" diff --git a/test/orm/declarative/test_tm_future_annotations.py b/test/orm/declarative/test_tm_future_annotations.py index 833518a4275..e3b5df0ad48 100644 --- a/test/orm/declarative/test_tm_future_annotations.py +++ b/test/orm/declarative/test_tm_future_annotations.py @@ -8,6 +8,7 @@ from __future__ import annotations +import enum from typing import ClassVar from typing import Dict from typing import List @@ -29,8 +30,11 @@ from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship +from sqlalchemy.sql import sqltypes +from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_raises_message from sqlalchemy.testing import is_ +from sqlalchemy.testing import is_true from .test_typed_mapping import expect_annotation_syntax_error from .test_typed_mapping import MappedColumnTest as _MappedColumnTest from .test_typed_mapping import RelationshipLHSTest as _RelationshipLHSTest @@ -112,6 +116,85 @@ class Foo(decl_base): select(Foo), "SELECT foo.id, foo.data, foo.data2 FROM foo" ) + def test_type_favors_outer(self, decl_base): + """test #10899, that we maintain favoring outer names vs. inner. + this is for backwards compatibility as well as what people + usually expect regarding the names of attributes in the class. + + """ + + class User(decl_base): + __tablename__ = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + uuid: Mapped[uuid.UUID] = mapped_column() + + is_true(isinstance(User.__table__.c.uuid.type, sqltypes.Uuid)) + + def test_type_inline_cls_qualified(self, decl_base): + """test #10899, where we test that we can refer to the class name + directly to refer to class-bound elements. + + """ + + class User(decl_base): + __tablename__ = "user" + + class Role(enum.Enum): + admin = "admin" + user = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + role: Mapped[User.Role] + + is_true(isinstance(User.__table__.c.role.type, sqltypes.Enum)) + eq_(User.__table__.c.role.type.length, 5) + is_(User.__table__.c.role.type.enum_class, User.Role) + + def test_type_inline_disambiguate(self, decl_base): + """test #10899, where we test that we can refer to an inner name + that's not in conflict directly without qualification. + + """ + + class User(decl_base): + __tablename__ = "user" + + class Role(enum.Enum): + admin = "admin" + user = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + role: Mapped[Role] + + is_true(isinstance(User.__table__.c.role.type, sqltypes.Enum)) + eq_(User.__table__.c.role.type.length, 5) + is_(User.__table__.c.role.type.enum_class, User.Role) + eq_(User.__table__.c.role.type.name, "role") # and not 'enum' + + def test_type_inner_can_be_qualified(self, decl_base): + """test #10899, same test as that of Role, using it to qualify against + a global variable with the same name. + + """ + + global SomeGlobalName + SomeGlobalName = None + + class User(decl_base): + __tablename__ = "user" + + class SomeGlobalName(enum.Enum): + admin = "admin" + user = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + role: Mapped[User.SomeGlobalName] + + is_true(isinstance(User.__table__.c.role.type, sqltypes.Enum)) + eq_(User.__table__.c.role.type.length, 5) + is_(User.__table__.c.role.type.enum_class, User.SomeGlobalName) + def test_indirect_mapped_name_local_level(self, decl_base): """test #8759. diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 39ce5051bab..9b55827d499 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -192,6 +192,46 @@ class Foo(decl_base): else: eq_(Foo.__table__.c.data.default.arg, 5) + def test_type_inline_declaration(self, decl_base): + """test #10899""" + + class User(decl_base): + __tablename__ = "user" + + class Role(enum.Enum): + admin = "admin" + user = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + role: Mapped[Role] + + is_true(isinstance(User.__table__.c.role.type, Enum)) + eq_(User.__table__.c.role.type.length, 5) + is_(User.__table__.c.role.type.enum_class, User.Role) + eq_(User.__table__.c.role.type.name, "role") # and not 'enum' + + def test_type_uses_inner_when_present(self, decl_base): + """test #10899, that we use inner name when appropriate""" + + class Role(enum.Enum): + foo = "foo" + bar = "bar" + + class User(decl_base): + __tablename__ = "user" + + class Role(enum.Enum): + admin = "admin" + user = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + role: Mapped[Role] + + is_true(isinstance(User.__table__.c.role.type, Enum)) + eq_(User.__table__.c.role.type.length, 5) + is_(User.__table__.c.role.type.enum_class, User.Role) + eq_(User.__table__.c.role.type.name, "role") # and not 'enum' + def test_legacy_declarative_base(self): typ = VARCHAR(50) Base = declarative_base(type_annotation_map={str: typ}) diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index c61dceea1ff..ba8ab455ca1 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -183,6 +183,46 @@ class Foo(decl_base): else: eq_(Foo.__table__.c.data.default.arg, 5) + def test_type_inline_declaration(self, decl_base): + """test #10899""" + + class User(decl_base): + __tablename__ = "user" + + class Role(enum.Enum): + admin = "admin" + user = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + role: Mapped[Role] + + is_true(isinstance(User.__table__.c.role.type, Enum)) + eq_(User.__table__.c.role.type.length, 5) + is_(User.__table__.c.role.type.enum_class, User.Role) + eq_(User.__table__.c.role.type.name, "role") # and not 'enum' + + def test_type_uses_inner_when_present(self, decl_base): + """test #10899, that we use inner name when appropriate""" + + class Role(enum.Enum): + foo = "foo" + bar = "bar" + + class User(decl_base): + __tablename__ = "user" + + class Role(enum.Enum): + admin = "admin" + user = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + role: Mapped[Role] + + is_true(isinstance(User.__table__.c.role.type, Enum)) + eq_(User.__table__.c.role.type.length, 5) + is_(User.__table__.c.role.type.enum_class, User.Role) + eq_(User.__table__.c.role.type.name, "role") # and not 'enum' + def test_legacy_declarative_base(self): typ = VARCHAR(50) Base = declarative_base(type_annotation_map={str: typ}) From 00ba18d6fa9d52f1cfb710b8581aef9cd5f75c25 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 6 Feb 2024 18:51:14 +0100 Subject: [PATCH 094/544] update .git-blame-ignore-revs to exclude black update commit Change-Id: Ia713fe6e880c3eba50386bcc1b04ca518160d7d5 --- .git-blame-ignore-revs | 1 + 1 file changed, 1 insertion(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index ec34535f218..b7b845c510d 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -8,3 +8,4 @@ 1e1a38e7801f410f244e4bbb44ec795ae152e04e # initial blackification 1e278de4cc9a4181e0747640a960e80efcea1ca9 # follow up mass style changes 058c230cea83811c3bebdd8259988c5c501f4f7e # Update black to v23.3.0 and flake8 to v6 +573d004e5f210a199d8b25335c71f973fee21a4b # Update black to 24.1.1 From 22e9e2216952cb82988abec18f2730daa99f7dd4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 18:53:28 +0100 Subject: [PATCH 095/544] Bump pypa/cibuildwheel from 2.16.2 to 2.16.5 (#10947) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.2 to 2.16.5. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.2...v2.16.5) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit af1f9a4f3b246a396231004744ef5705b0f0a845) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 4e242599fe4..7fd142d225f 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -73,7 +73,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.16.2 + uses: pypa/cibuildwheel@v2.16.5 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From ccaadd5f6fa23e1e78503e0a67bce5cb127de9e6 Mon Sep 17 00:00:00 2001 From: Artem Smirnov Date: Tue, 6 Feb 2024 19:55:41 +0200 Subject: [PATCH 096/544] Add bullets (#10973) * Add bullets * Fix as suggested (cherry picked from commit 70ee72b2cb1dbfa37f29628737ab7dccf2de0fa3) --- doc/build/orm/inheritance.rst | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst index 3764270d8c4..7a19de9ae42 100644 --- a/doc/build/orm/inheritance.rst +++ b/doc/build/orm/inheritance.rst @@ -3,12 +3,13 @@ Mapping Class Inheritance Hierarchies ===================================== -SQLAlchemy supports three forms of inheritance: **single table inheritance**, -where several types of classes are represented by a single table, **concrete -table inheritance**, where each type of class is represented by independent -tables, and **joined table inheritance**, where the class hierarchy is broken -up among dependent tables, each class represented by its own table that only -includes those attributes local to that class. +SQLAlchemy supports three forms of inheritance: + +* **single table inheritance** – several types of classes are represented by a single table; + +* **concrete table inheritance** – each type of class is represented by independent tables; + +* **joined table inheritance** – the class hierarchy is broken up among dependent tables. Each class represented by its own table that only includes those attributes local to that class. The most common forms of inheritance are single and joined table, while concrete inheritance presents more configurational challenges. From aa702df78f629c6dcf619cb70232fbfe22ffd489 Mon Sep 17 00:00:00 2001 From: Umer Zia Date: Tue, 6 Feb 2024 18:56:09 +0100 Subject: [PATCH 097/544] Improve formatting of data_select.rst (#10931) (cherry picked from commit 3a4e9063e47e660c2d49ba6e62d7f647a1b6e76a) --- doc/build/tutorial/data_select.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/tutorial/data_select.rst b/doc/build/tutorial/data_select.rst index c3732d5aa31..42b484de8e4 100644 --- a/doc/build/tutorial/data_select.rst +++ b/doc/build/tutorial/data_select.rst @@ -1124,7 +1124,7 @@ When using :meth:`_expression.Select.lateral`, the behavior of UNION, UNION ALL and other set operations ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -In SQL,SELECT statements can be merged together using the UNION or UNION ALL +In SQL, SELECT statements can be merged together using the UNION or UNION ALL SQL operation, which produces the set of all rows produced by one or more statements together. Other set operations such as INTERSECT [ALL] and EXCEPT [ALL] are also possible. From 5e96a4cabb268a03816e25b603c17c33d719915d Mon Sep 17 00:00:00 2001 From: whysage <67018871+whysage@users.noreply.github.com> Date: Tue, 6 Feb 2024 19:57:21 +0200 Subject: [PATCH 098/544] Fix mariadb run tests doc (#10848) (cherry picked from commit ffdbd326bbea8d7d68e08285c50d0da351ebf95a) --- README.unittests.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.unittests.rst b/README.unittests.rst index 57c6d42dad6..2ce4f0fff12 100644 --- a/README.unittests.rst +++ b/README.unittests.rst @@ -280,7 +280,7 @@ intended for production use! # configure the database sleep 20 - docker exec -ti mariadb mysql -u root -ppassword -w -e "CREATE DATABASE test_schema CHARSET utf8mb4; GRANT ALL ON test_schema.* TO scott;" + docker exec -ti mariadb mariadb -u root -ppassword -w -e "CREATE DATABASE test_schema CHARSET utf8mb4; GRANT ALL ON test_schema.* TO scott;" # To stop the container. It will also remove it. docker stop mariadb From eb2cf783855477a6d0808a318b1ce039b28e52a0 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 6 Feb 2024 19:44:47 +0100 Subject: [PATCH 099/544] remove unnecessary string concat in same line manually update the files to remove literal string concat on the same line, since black does not seem to be making progress in handling these Change-Id: I3c651374c5f3db5b8bc0c700328d67ca03743b7b (cherry picked from commit 3fbbe8d67b8b193dcf715905392b1c8f33e68f35) --- doc/build/changelog/migration_11.rst | 2 +- doc/build/orm/join_conditions.rst | 6 ++--- lib/sqlalchemy/dialects/oracle/base.py | 2 +- lib/sqlalchemy/engine/cursor.py | 16 ++++++------ lib/sqlalchemy/ext/associationproxy.py | 4 +-- lib/sqlalchemy/inspection.py | 6 ++--- lib/sqlalchemy/orm/interfaces.py | 2 +- lib/sqlalchemy/orm/relationships.py | 2 +- lib/sqlalchemy/orm/strategies.py | 4 +-- lib/sqlalchemy/orm/util.py | 4 +-- lib/sqlalchemy/sql/compiler.py | 4 +-- lib/sqlalchemy/sql/default_comparator.py | 2 +- lib/sqlalchemy/sql/schema.py | 2 +- lib/sqlalchemy/testing/plugin/plugin_base.py | 2 +- lib/sqlalchemy/testing/suite/test_rowcount.py | 2 +- lib/sqlalchemy/util/langhelpers.py | 2 +- test/dialect/mssql/test_compiler.py | 12 ++++----- test/dialect/mssql/test_query.py | 2 +- test/dialect/mysql/test_compiler.py | 4 +-- test/dialect/mysql/test_types.py | 14 ++++------ test/dialect/oracle/test_compiler.py | 6 ++--- test/dialect/oracle/test_dialect.py | 6 ++--- test/dialect/postgresql/test_compiler.py | 26 +++++++++---------- test/dialect/postgresql/test_dialect.py | 2 +- test/dialect/postgresql/test_query.py | 2 +- test/dialect/test_sqlite.py | 20 +++++++------- test/engine/test_parseconnect.py | 4 +-- test/engine/test_transaction.py | 4 +-- test/orm/declarative/test_basic.py | 8 +++--- test/orm/declarative/test_mixin.py | 2 +- test/orm/dml/test_bulk.py | 2 +- test/orm/inheritance/test_basic.py | 6 ++--- test/orm/test_bind.py | 4 +-- test/orm/test_core_compilation.py | 2 +- test/orm/test_cycles.py | 2 +- test/orm/test_deprecations.py | 2 +- test/orm/test_events.py | 2 +- test/orm/test_mapper.py | 2 +- test/orm/test_options.py | 2 +- test/orm/test_query.py | 14 +++++----- test/orm/test_selectin_relations.py | 2 +- test/orm/test_unitofwork.py | 6 ++--- test/orm/test_unitofworkv2.py | 2 +- test/perf/orm2010.py | 6 ++--- test/requirements.py | 6 ++--- test/sql/test_compiler.py | 20 +++++++------- test/sql/test_constraints.py | 14 +++++----- test/sql/test_cte.py | 2 +- test/sql/test_deprecations.py | 2 +- test/sql/test_external_traversal.py | 24 ++++++++--------- test/sql/test_insert.py | 10 +++---- test/sql/test_lambdas.py | 2 +- test/sql/test_metadata.py | 6 ++--- test/sql/test_operators.py | 4 +-- test/sql/test_quote.py | 4 +-- test/sql/test_resultset.py | 4 +-- test/sql/test_text.py | 4 +-- test/sql/test_types.py | 2 +- 58 files changed, 159 insertions(+), 173 deletions(-) diff --git a/doc/build/changelog/migration_11.rst b/doc/build/changelog/migration_11.rst index 8a1ba3ba0e6..15ef6fcd0c7 100644 --- a/doc/build/changelog/migration_11.rst +++ b/doc/build/changelog/migration_11.rst @@ -2129,7 +2129,7 @@ table to an integer "id" column on the other:: pets = relationship( "Pets", primaryjoin=( - "foreign(Pets.person_id)" "==cast(type_coerce(Person.id, Integer), Integer)" + "foreign(Pets.person_id)==cast(type_coerce(Person.id, Integer), Integer)" ), ) diff --git a/doc/build/orm/join_conditions.rst b/doc/build/orm/join_conditions.rst index a4a905c74cc..5846b5d206f 100644 --- a/doc/build/orm/join_conditions.rst +++ b/doc/build/orm/join_conditions.rst @@ -142,7 +142,7 @@ load those ``Address`` objects which specify a city of "Boston":: name = mapped_column(String) boston_addresses = relationship( "Address", - primaryjoin="and_(User.id==Address.user_id, " "Address.city=='Boston')", + primaryjoin="and_(User.id==Address.user_id, Address.city=='Boston')", ) @@ -297,7 +297,7 @@ a :func:`_orm.relationship`:: network = relationship( "Network", - primaryjoin="IPA.v4address.bool_op('<<')" "(foreign(Network.v4representation))", + primaryjoin="IPA.v4address.bool_op('<<')(foreign(Network.v4representation))", viewonly=True, ) @@ -702,7 +702,7 @@ join condition (requires version 0.9.2 at least to function as is):: d = relationship( "D", - secondary="join(B, D, B.d_id == D.id)." "join(C, C.d_id == D.id)", + secondary="join(B, D, B.d_id == D.id).join(C, C.d_id == D.id)", primaryjoin="and_(A.b_id == B.id, A.id == C.a_id)", secondaryjoin="D.id == B.d_id", uselist=False, diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index a6a81384154..a548b344997 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -594,7 +594,7 @@ ) NO_ARG_FNS = set( - "UID CURRENT_DATE SYSDATE USER " "CURRENT_TIME CURRENT_TIMESTAMP".split() + "UID CURRENT_DATE SYSDATE USER CURRENT_TIME CURRENT_TIMESTAMP".split() ) diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index c9390a9f11d..4b18ddb4340 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -1611,11 +1611,11 @@ def inserted_primary_key_rows(self): """ if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " "expression construct." + "Statement is not a compiled expression construct." ) elif not self.context.isinsert: raise exc.InvalidRequestError( - "Statement is not an insert() " "expression construct." + "Statement is not an insert() expression construct." ) elif self.context._is_explicit_returning: raise exc.InvalidRequestError( @@ -1682,11 +1682,11 @@ def last_updated_params(self): """ if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " "expression construct." + "Statement is not a compiled expression construct." ) elif not self.context.isupdate: raise exc.InvalidRequestError( - "Statement is not an update() " "expression construct." + "Statement is not an update() expression construct." ) elif self.context.executemany: return self.context.compiled_parameters @@ -1704,11 +1704,11 @@ def last_inserted_params(self): """ if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " "expression construct." + "Statement is not a compiled expression construct." ) elif not self.context.isinsert: raise exc.InvalidRequestError( - "Statement is not an insert() " "expression construct." + "Statement is not an insert() expression construct." ) elif self.context.executemany: return self.context.compiled_parameters @@ -1921,7 +1921,7 @@ def postfetch_cols(self): if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " "expression construct." + "Statement is not a compiled expression construct." ) elif not self.context.isinsert and not self.context.isupdate: raise exc.InvalidRequestError( @@ -1944,7 +1944,7 @@ def prefetch_cols(self): if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " "expression construct." + "Statement is not a compiled expression construct." ) elif not self.context.isinsert and not self.context.isupdate: raise exc.InvalidRequestError( diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 86043ba7992..80e6fdac987 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -1074,7 +1074,7 @@ def any( and (not self._target_is_object or self._value_is_scalar) ): raise exc.InvalidRequestError( - "'any()' not implemented for scalar " "attributes. Use has()." + "'any()' not implemented for scalar attributes. Use has()." ) return self._criterion_exists( criterion=criterion, is_has=False, **kwargs @@ -1098,7 +1098,7 @@ def has( or (self._target_is_object and not self._value_is_scalar) ): raise exc.InvalidRequestError( - "'has()' not implemented for collections. " "Use any()." + "'has()' not implemented for collections. Use any()." ) return self._criterion_exists( criterion=criterion, is_has=True, **kwargs diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py index 4ee48f38517..30d531957f8 100644 --- a/lib/sqlalchemy/inspection.py +++ b/lib/sqlalchemy/inspection.py @@ -157,9 +157,7 @@ def _inspects( def decorate(fn_or_cls: _F) -> _F: for type_ in types: if type_ in _registrars: - raise AssertionError( - "Type %s is already " "registered" % type_ - ) + raise AssertionError("Type %s is already registered" % type_) _registrars[type_] = fn_or_cls return fn_or_cls @@ -171,6 +169,6 @@ def decorate(fn_or_cls: _F) -> _F: def _self_inspects(cls: _TT) -> _TT: if cls in _registrars: - raise AssertionError("Type %s is already " "registered" % cls) + raise AssertionError("Type %s is already registered" % cls) _registrars[cls] = True return cls diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 68a6f645317..2f090588fe6 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -115,7 +115,7 @@ class ORMStatementRole(roles.StatementRole): __slots__ = () _role_name = ( - "Executable SQL or text() construct, including ORM " "aware objects" + "Executable SQL or text() construct, including ORM aware objects" ) diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index db76bd912f7..11ea5911279 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -951,7 +951,7 @@ def has( """ if self.property.uselist: raise sa_exc.InvalidRequestError( - "'has()' not implemented for collections. " "Use any()." + "'has()' not implemented for collections. Use any()." ) return self._criterion_exists(criterion, **kwargs) diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index e38a05f0613..20c3b9cc6b0 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -384,7 +384,7 @@ def __init__(self, parent, strategy_key): super().__init__(parent, strategy_key) if hasattr(self.parent_property, "composite_class"): raise NotImplementedError( - "Deferred loading for composite " "types not implemented yet" + "Deferred loading for composite types not implemented yet" ) self.raiseload = self.strategy_opts.get("raiseload", False) self.columns = self.parent_property.columns @@ -758,7 +758,7 @@ def __init__( self._equated_columns[c] = self._equated_columns[col] self.logger.info( - "%s will use Session.get() to " "optimize instance loads", self + "%s will use Session.get() to optimize instance loads", self ) def init_class_attribute(self, mapper): diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index c6102098a6a..90508206ee6 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -256,9 +256,7 @@ def __new__( self.delete_orphan = "delete-orphan" in values if self.delete_orphan and not self.delete: - util.warn( - "The 'delete-orphan' cascade " "option requires 'delete'." - ) + util.warn("The 'delete-orphan' cascade option requires 'delete'.") return self def __repr__(self): diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 064be4dfdf3..ba8e3ea450b 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -2532,7 +2532,7 @@ def visit_label( def _fallback_column_name(self, column): raise exc.CompileError( - "Cannot compile Column object until " "its 'name' is assigned." + "Cannot compile Column object until its 'name' is assigned." ) def visit_lambda_element(self, element, **kw): @@ -6638,7 +6638,7 @@ def visit_drop_view(self, drop, **kw): def _verify_index_table(self, index): if index.table is None: raise exc.CompileError( - "Index '%s' is not associated " "with any table." % index.name + "Index '%s' is not associated with any table." % index.name ) def visit_create_index( diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py index 5bf8d582e53..76131bcaa45 100644 --- a/lib/sqlalchemy/sql/default_comparator.py +++ b/lib/sqlalchemy/sql/default_comparator.py @@ -247,7 +247,7 @@ def _unsupported_impl( expr: ColumnElement[Any], op: OperatorType, *arg: Any, **kw: Any ) -> NoReturn: raise NotImplementedError( - "Operator '%s' is not supported on " "this expression" % op.__name__ + "Operator '%s' is not supported on this expression" % op.__name__ ) diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 96e350447a8..2932fffad47 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -2063,7 +2063,7 @@ def __init__( name = quoted_name(name, quote) elif quote is not None: raise exc.ArgumentError( - "Explicit 'name' is required when " "sending 'quote' argument" + "Explicit 'name' is required when sending 'quote' argument" ) # name = None is expected to be an interim state diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py index 11eb35cfa9b..a642668be93 100644 --- a/lib/sqlalchemy/testing/plugin/plugin_base.py +++ b/lib/sqlalchemy/testing/plugin/plugin_base.py @@ -90,7 +90,7 @@ def setup_options(make_option): action="append", type=str, dest="dburi", - help="Database uri. Multiple OK, " "first one is run by default.", + help="Database uri. Multiple OK, first one is run by default.", ) make_option( "--dbdriver", diff --git a/lib/sqlalchemy/testing/suite/test_rowcount.py b/lib/sqlalchemy/testing/suite/test_rowcount.py index c48ed355c91..a7dbd364f1b 100644 --- a/lib/sqlalchemy/testing/suite/test_rowcount.py +++ b/lib/sqlalchemy/testing/suite/test_rowcount.py @@ -204,7 +204,7 @@ def test_raw_sql_rowcount(self, connection): def test_text_rowcount(self, connection): # test issue #3622, make sure eager rowcount is called for text result = connection.execute( - text("update employees set department='Z' " "where department='C'") + text("update employees set department='Z' where department='C'") ) eq_(result.rowcount, 3) diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index b122a3b35b3..396a039771d 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -1951,7 +1951,7 @@ def chop_traceback( def attrsetter(attrname): - code = "def set(obj, value):" " obj.%s = value" % attrname + code = "def set(obj, value): obj.%s = value" % attrname env = locals().copy() exec(code, env) return env["set"] diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py index b5ea40b120e..59b13b91e0b 100644 --- a/test/dialect/mssql/test_compiler.py +++ b/test/dialect/mssql/test_compiler.py @@ -175,7 +175,7 @@ def test_insert(self): t = table("sometable", column("somecolumn")) self.assert_compile( t.insert(), - "INSERT INTO sometable (somecolumn) VALUES " "(:somecolumn)", + "INSERT INTO sometable (somecolumn) VALUES (:somecolumn)", ) def test_update(self): @@ -862,7 +862,7 @@ def test_delete_schema(self): ) self.assert_compile( tbl.delete().where(tbl.c.id == 1), - "DELETE FROM paj.test WHERE paj.test.id = " ":id_1", + "DELETE FROM paj.test WHERE paj.test.id = :id_1", ) s = select(tbl.c.id).where(tbl.c.id == 1) self.assert_compile( @@ -882,7 +882,7 @@ def test_delete_schema_multipart(self): ) self.assert_compile( tbl.delete().where(tbl.c.id == 1), - "DELETE FROM banana.paj.test WHERE " "banana.paj.test.id = :id_1", + "DELETE FROM banana.paj.test WHERE banana.paj.test.id = :id_1", ) s = select(tbl.c.id).where(tbl.c.id == 1) self.assert_compile( @@ -999,7 +999,7 @@ def test_function(self): ) self.assert_compile( select(func.max(t.c.col1)), - "SELECT max(sometable.col1) AS max_1 FROM " "sometable", + "SELECT max(sometable.col1) AS max_1 FROM sometable", ) def test_function_overrides(self): @@ -1072,7 +1072,7 @@ def test_delete_returning(self): ) d = delete(table1).returning(table1.c.myid, table1.c.name) self.assert_compile( - d, "DELETE FROM mytable OUTPUT deleted.myid, " "deleted.name" + d, "DELETE FROM mytable OUTPUT deleted.myid, deleted.name" ) d = ( delete(table1) @@ -1945,7 +1945,7 @@ def test_identity_object_no_primary_key_non_nullable(self): ) self.assert_compile( schema.CreateTable(tbl), - "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(3,1)" ")", + "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(3,1))", ) def test_identity_separate_from_primary_key(self): diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py index b68b21339ea..33f648b82a0 100644 --- a/test/dialect/mssql/test_query.py +++ b/test/dialect/mssql/test_query.py @@ -664,7 +664,7 @@ def test_scalar_strings_control(self, scalar_strings, connection): def test_scalar_strings_named_control(self, scalar_strings, connection): result = ( connection.exec_driver_sql( - "SELECT anon_1.my_string " "FROM scalar_strings() AS anon_1" + "SELECT anon_1.my_string FROM scalar_strings() AS anon_1" ) .scalars() .all() diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index 05b4b685427..6712300aa40 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -182,7 +182,7 @@ def test_create_index_with_prefix(self): self.assert_compile( schema.CreateIndex(idx), - "CREATE FULLTEXT INDEX test_idx1 " "ON testtbl (data(10))", + "CREATE FULLTEXT INDEX test_idx1 ON testtbl (data(10))", ) def test_create_index_with_text(self): @@ -876,7 +876,7 @@ def test_too_long_index(self): self.assert_compile( schema.CreateIndex(ix1), - "CREATE INDEX %s " "ON %s (%s)" % (exp, tname, cname), + "CREATE INDEX %s ON %s (%s)" % (exp, tname, cname), ) def test_innodb_autoincrement(self): diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py index 1d279e720db..c73e82a945b 100644 --- a/test/dialect/mysql/test_types.py +++ b/test/dialect/mysql/test_types.py @@ -385,7 +385,7 @@ def test_timestamp_fsp(self): mysql.MSTimeStamp(), DefaultClause( sql.text( - "'1999-09-09 09:09:09' " "ON UPDATE CURRENT_TIMESTAMP" + "'1999-09-09 09:09:09' ON UPDATE CURRENT_TIMESTAMP" ) ), ], @@ -398,7 +398,7 @@ def test_timestamp_fsp(self): mysql.MSTimeStamp, DefaultClause( sql.text( - "'1999-09-09 09:09:09' " "ON UPDATE CURRENT_TIMESTAMP" + "'1999-09-09 09:09:09' ON UPDATE CURRENT_TIMESTAMP" ) ), ], @@ -410,9 +410,7 @@ def test_timestamp_fsp(self): [ mysql.MSTimeStamp(), DefaultClause( - sql.text( - "CURRENT_TIMESTAMP " "ON UPDATE CURRENT_TIMESTAMP" - ) + sql.text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") ), ], {}, @@ -423,9 +421,7 @@ def test_timestamp_fsp(self): [ mysql.MSTimeStamp, DefaultClause( - sql.text( - "CURRENT_TIMESTAMP " "ON UPDATE CURRENT_TIMESTAMP" - ) + sql.text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") ), ], {"nullable": False}, @@ -1209,7 +1205,7 @@ def test_enum_compile(self): t1 = Table("sometable", MetaData(), Column("somecolumn", e1)) self.assert_compile( schema.CreateTable(t1), - "CREATE TABLE sometable (somecolumn " "ENUM('x','y','z'))", + "CREATE TABLE sometable (somecolumn ENUM('x','y','z'))", ) t1 = Table( "sometable", diff --git a/test/dialect/oracle/test_compiler.py b/test/dialect/oracle/test_compiler.py index c7a6858d4cb..2165aa0909d 100644 --- a/test/dialect/oracle/test_compiler.py +++ b/test/dialect/oracle/test_compiler.py @@ -92,7 +92,7 @@ def test_owner(self): ) self.assert_compile( parent.join(child), - "ed.parent JOIN ed.child ON ed.parent.id = " "ed.child.parent_id", + "ed.parent JOIN ed.child ON ed.parent.id = ed.child.parent_id", ) def test_subquery(self): @@ -1183,7 +1183,7 @@ def test_outer_join_seven(self): q = select(table1.c.name).where(table1.c.name == "foo") self.assert_compile( q, - "SELECT mytable.name FROM mytable WHERE " "mytable.name = :name_1", + "SELECT mytable.name FROM mytable WHERE mytable.name = :name_1", dialect=oracle.dialect(use_ansi=False), ) @@ -1498,7 +1498,7 @@ def test_create_table_compress(self): ) self.assert_compile( schema.CreateTable(tbl2), - "CREATE TABLE testtbl2 (data INTEGER) " "COMPRESS FOR OLTP", + "CREATE TABLE testtbl2 (data INTEGER) COMPRESS FOR OLTP", ) def test_create_index_bitmap_compress(self): diff --git a/test/dialect/oracle/test_dialect.py b/test/dialect/oracle/test_dialect.py index 68ee3f71800..0c4b894f89d 100644 --- a/test/dialect/oracle/test_dialect.py +++ b/test/dialect/oracle/test_dialect.py @@ -532,9 +532,7 @@ def setup_test_class(cls): def test_out_params(self, connection): result = connection.execute( - text( - "begin foo(:x_in, :x_out, :y_out, " ":z_out); end;" - ).bindparams( + text("begin foo(:x_in, :x_out, :y_out, :z_out); end;").bindparams( bindparam("x_in", Float), outparam("x_out", Integer), outparam("y_out", Float), @@ -863,7 +861,7 @@ def test_basic(self): with testing.db.connect() as conn: eq_( conn.exec_driver_sql( - "/*+ this is a comment */ SELECT 1 FROM " "DUAL" + "/*+ this is a comment */ SELECT 1 FROM DUAL" ).fetchall(), [(1,)], ) diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index 005e60eaa14..f33c251160e 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -262,7 +262,7 @@ def test_generic_enum(self): ) self.assert_compile( postgresql.CreateEnumType(e2), - "CREATE TYPE someschema.somename AS ENUM " "('x', 'y', 'z')", + "CREATE TYPE someschema.somename AS ENUM ('x', 'y', 'z')", ) self.assert_compile(postgresql.DropEnumType(e1), "DROP TYPE somename") self.assert_compile( @@ -271,7 +271,7 @@ def test_generic_enum(self): t1 = Table("sometable", MetaData(), Column("somecolumn", e1)) self.assert_compile( schema.CreateTable(t1), - "CREATE TABLE sometable (somecolumn " "somename)", + "CREATE TABLE sometable (somecolumn somename)", ) t1 = Table( "sometable", @@ -682,7 +682,7 @@ def test_create_index_with_ops(self): self.assert_compile( schema.CreateIndex(idx), - "CREATE INDEX test_idx1 ON testtbl " "(data text_pattern_ops)", + "CREATE INDEX test_idx1 ON testtbl (data text_pattern_ops)", dialect=postgresql.dialect(), ) self.assert_compile( @@ -725,7 +725,7 @@ def test_create_index_with_ops(self): unique=True, ) ), - "CREATE UNIQUE INDEX test_idx3 ON test_tbl " "(data3)", + "CREATE UNIQUE INDEX test_idx3 ON test_tbl (data3)", ), ( lambda tbl: schema.CreateIndex( @@ -892,17 +892,17 @@ def test_create_index_with_using(self): self.assert_compile( schema.CreateIndex(idx1), - "CREATE INDEX test_idx1 ON testtbl " "(data)", + "CREATE INDEX test_idx1 ON testtbl (data)", dialect=postgresql.dialect(), ) self.assert_compile( schema.CreateIndex(idx2), - "CREATE INDEX test_idx2 ON testtbl " "USING btree (data)", + "CREATE INDEX test_idx2 ON testtbl USING btree (data)", dialect=postgresql.dialect(), ) self.assert_compile( schema.CreateIndex(idx3), - "CREATE INDEX test_idx3 ON testtbl " "USING hash (data)", + "CREATE INDEX test_idx3 ON testtbl USING hash (data)", dialect=postgresql.dialect(), ) @@ -923,7 +923,7 @@ def test_create_index_with_with(self): self.assert_compile( schema.CreateIndex(idx1), - "CREATE INDEX test_idx1 ON testtbl " "(data)", + "CREATE INDEX test_idx1 ON testtbl (data)", ) self.assert_compile( schema.CreateIndex(idx2), @@ -946,7 +946,7 @@ def test_create_index_with_using_unusual_conditions(self): schema.CreateIndex( Index("test_idx1", tbl.c.data, postgresql_using="GIST") ), - "CREATE INDEX test_idx1 ON testtbl " "USING gist (data)", + "CREATE INDEX test_idx1 ON testtbl USING gist (data)", ) self.assert_compile( @@ -988,7 +988,7 @@ def test_create_index_with_tablespace(self): self.assert_compile( schema.CreateIndex(idx1), - "CREATE INDEX test_idx1 ON testtbl " "(data)", + "CREATE INDEX test_idx1 ON testtbl (data)", dialect=postgresql.dialect(), ) self.assert_compile( @@ -2083,7 +2083,7 @@ def test_update_array_slice(self): # default dialect does not, as DBAPIs may be doing this for us self.assert_compile( t.update().values({t.c.data[2:5]: [2, 3, 4]}), - "UPDATE t SET data[%s:%s]=" "%s", + "UPDATE t SET data[%s:%s]=%s", checkparams={"param_1": [2, 3, 4], "data_2": 5, "data_1": 2}, dialect=PGDialect(paramstyle="format"), ) @@ -2139,7 +2139,7 @@ def test_from_only(self): tbl3 = Table("testtbl3", m, Column("id", Integer), schema="testschema") stmt = tbl3.select().with_hint(tbl3, "ONLY", "postgresql") expected = ( - "SELECT testschema.testtbl3.id FROM " "ONLY testschema.testtbl3" + "SELECT testschema.testtbl3.id FROM ONLY testschema.testtbl3" ) self.assert_compile(stmt, expected) @@ -3296,7 +3296,7 @@ def test_query_plain(self): sess = Session() self.assert_compile( sess.query(self.table).distinct(), - "SELECT DISTINCT t.id AS t_id, t.a AS t_a, " "t.b AS t_b FROM t", + "SELECT DISTINCT t.id AS t_id, t.a AS t_a, t.b AS t_b FROM t", ) def test_query_on_columns(self): diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index 919842a49c4..32a5a84ac8d 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -721,7 +721,7 @@ def test_non_int_port_disallowed(self, dialect, url_string): "postgresql+psycopg2://USER:PASS@/DB" "?host=hostA,hostC&port=111,222,333", ), - ("postgresql+psycopg2://USER:PASS@/DB" "?host=hostA&port=111,222",), + ("postgresql+psycopg2://USER:PASS@/DB?host=hostA&port=111,222",), ( "postgresql+asyncpg://USER:PASS@/DB" "?host=hostA,hostB,hostC&port=111,333", diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py index 9822b3e60b9..a737381760e 100644 --- a/test/dialect/postgresql/test_query.py +++ b/test/dialect/postgresql/test_query.py @@ -977,7 +977,7 @@ def test_expression_pyformat(self, connection): if self._strs_render_bind_casts(connection): self.assert_compile( matchtable.c.title.match("somstr"), - "matchtable.title @@ " "plainto_tsquery(%(title_1)s::VARCHAR)", + "matchtable.title @@ plainto_tsquery(%(title_1)s::VARCHAR)", ) else: self.assert_compile( diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index d5ff0fc19de..245b762cf37 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -84,12 +84,12 @@ def test_boolean(self, connection, metadata): ) metadata.create_all(connection) for stmt in [ - "INSERT INTO bool_table (id, boo) " "VALUES (1, 'false');", - "INSERT INTO bool_table (id, boo) " "VALUES (2, 'true');", - "INSERT INTO bool_table (id, boo) " "VALUES (3, '1');", - "INSERT INTO bool_table (id, boo) " "VALUES (4, '0');", - "INSERT INTO bool_table (id, boo) " "VALUES (5, 1);", - "INSERT INTO bool_table (id, boo) " "VALUES (6, 0);", + "INSERT INTO bool_table (id, boo) VALUES (1, 'false');", + "INSERT INTO bool_table (id, boo) VALUES (2, 'true');", + "INSERT INTO bool_table (id, boo) VALUES (3, '1');", + "INSERT INTO bool_table (id, boo) VALUES (4, '0');", + "INSERT INTO bool_table (id, boo) VALUES (5, 1);", + "INSERT INTO bool_table (id, boo) VALUES (6, 0);", ]: connection.exec_driver_sql(stmt) @@ -653,7 +653,7 @@ def test_quoted_identifiers_functional_one(self): @testing.provide_metadata def test_quoted_identifiers_functional_two(self): - """ "test the edgiest of edge cases, quoted table/col names + """test the edgiest of edge cases, quoted table/col names that start and end with quotes. SQLite claims to have fixed this in @@ -741,7 +741,7 @@ def test_pool_class(self): ), ), ( - "sqlite:///file:path/to/database?" "mode=ro&uri=true", + "sqlite:///file:path/to/database?mode=ro&uri=true", ( ["file:path/to/database?mode=ro"], {"uri": True, "check_same_thread": False}, @@ -1155,7 +1155,7 @@ def test_on_conflict_clause_column_not_null(self): self.assert_compile( schema.CreateColumn(c), - "test INTEGER NOT NULL " "ON CONFLICT FAIL", + "test INTEGER NOT NULL ON CONFLICT FAIL", dialect=sqlite.dialect(), ) @@ -1194,7 +1194,7 @@ def test_on_conflict_clause_unique_constraint_from_column(self): self.assert_compile( CreateTable(t), - "CREATE TABLE n (x VARCHAR(30), " "UNIQUE (x) ON CONFLICT FAIL)", + "CREATE TABLE n (x VARCHAR(30), UNIQUE (x) ON CONFLICT FAIL)", dialect=sqlite.dialect(), ) diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py index 34dc1d7aa82..16b129fd8a3 100644 --- a/test/engine/test_parseconnect.py +++ b/test/engine/test_parseconnect.py @@ -373,7 +373,7 @@ def test_create_engine_url_invalid(self): ( "foo1=bar1&foo2=bar21&foo2=bar22&foo3=bar31", "foo2=bar23&foo3=bar32&foo3=bar33", - "foo1=bar1&foo2=bar23&" "foo3=bar32&foo3=bar33", + "foo1=bar1&foo2=bar23&foo3=bar32&foo3=bar33", False, ), ) @@ -573,7 +573,7 @@ def test_engine_from_config(self): e = engine_from_config(config, module=dbapi, _initialize=False) assert e.pool._recycle == 50 assert e.url == url.make_url( - "postgresql+psycopg2://scott:tiger@somehost/test?foo" "z=somevalue" + "postgresql+psycopg2://scott:tiger@somehost/test?fooz=somevalue" ) assert e.echo is True diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index 4ae87c4ad18..a70e8e05d0f 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -345,9 +345,7 @@ def test_ctxmanager_interface(self, local_connection): assert not trans.is_active eq_( - connection.exec_driver_sql( - "select count(*) from " "users" - ).scalar(), + connection.exec_driver_sql("select count(*) from users").scalar(), 2, ) connection.rollback() diff --git a/test/orm/declarative/test_basic.py b/test/orm/declarative/test_basic.py index 37a1b643c1d..1f31544e065 100644 --- a/test/orm/declarative/test_basic.py +++ b/test/orm/declarative/test_basic.py @@ -1387,7 +1387,7 @@ class User(Base): assert_raises_message( sa.exc.ArgumentError, - "Can't add additional column 'foo' when " "specifying __table__", + "Can't add additional column 'foo' when specifying __table__", go, ) @@ -1825,7 +1825,7 @@ class Foo(Base, ComparableEntity): assert_raises_message( exc.InvalidRequestError, - "'addresses' is not an instance of " "ColumnProperty", + "'addresses' is not an instance of ColumnProperty", configure_mappers, ) @@ -1954,7 +1954,7 @@ class Bar(Base, ComparableEntity): assert_raises_message( AttributeError, - "does not have a mapped column named " "'__table__'", + "does not have a mapped column named '__table__'", configure_mappers, ) @@ -2508,7 +2508,7 @@ class User(Base, ComparableEntity): def test_oops(self): with testing.expect_warnings( - "Ignoring declarative-like tuple value of " "attribute 'name'" + "Ignoring declarative-like tuple value of attribute 'name'" ): class User(Base, ComparableEntity): diff --git a/test/orm/declarative/test_mixin.py b/test/orm/declarative/test_mixin.py index 32f737484e2..2520eb846d7 100644 --- a/test/orm/declarative/test_mixin.py +++ b/test/orm/declarative/test_mixin.py @@ -1322,7 +1322,7 @@ class Model(Base, ColumnMixin): assert_raises_message( sa.exc.ArgumentError, - "Can't add additional column 'tada' when " "specifying __table__", + "Can't add additional column 'tada' when specifying __table__", go, ) diff --git a/test/orm/dml/test_bulk.py b/test/orm/dml/test_bulk.py index baa6c20f83f..62b435e9cbf 100644 --- a/test/orm/dml/test_bulk.py +++ b/test/orm/dml/test_bulk.py @@ -238,7 +238,7 @@ def test_bulk_save_updated_include_unchanged(self): asserter.assert_( CompiledSQL( - "UPDATE users SET name=:name WHERE " "users.id = :users_id", + "UPDATE users SET name=:name WHERE users.id = :users_id", [ {"users_id": 1, "name": "u1new"}, {"users_id": 2, "name": "u2"}, diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py index a76f563f818..9028fd25a43 100644 --- a/test/orm/inheritance/test_basic.py +++ b/test/orm/inheritance/test_basic.py @@ -1684,7 +1684,7 @@ def test_none(self): s.flush() asserter.assert_( RegexSQL( - "SELECT .* " "FROM c WHERE :param_1 = c.bid", [{"param_1": 3}] + "SELECT .* FROM c WHERE :param_1 = c.bid", [{"param_1": 3}] ), CompiledSQL("DELETE FROM c WHERE c.cid = :cid", [{"cid": 1}]), CompiledSQL("DELETE FROM b WHERE b.id = :id", [{"id": 3}]), @@ -3012,7 +3012,7 @@ class D(C): ) def test_optimized_passes(self): - """ "test that the 'optimized load' routine doesn't crash when + """test that the 'optimized load' routine doesn't crash when a column in the join condition is not available.""" base, sub = self.tables.base, self.tables.sub @@ -3744,7 +3744,7 @@ class B(A): __mapper_args__ = {"polymorphic_identity": "b"} with expect_warnings( - r"Mapper\[C\(a\)\] does not indicate a " "'polymorphic_identity'," + r"Mapper\[C\(a\)\] does not indicate a 'polymorphic_identity'," ): class C(A): diff --git a/test/orm/test_bind.py b/test/orm/test_bind.py index 976df514f3b..abd008cadf0 100644 --- a/test/orm/test_bind.py +++ b/test/orm/test_bind.py @@ -464,7 +464,7 @@ def get_bind(self, **kw): engine = {"e1": e1, "e2": e2, "e3": e3}[expected_engine_name] with mock.patch( - "sqlalchemy.orm.context." "ORMCompileState.orm_setup_cursor_result" + "sqlalchemy.orm.context.ORMCompileState.orm_setup_cursor_result" ), mock.patch( "sqlalchemy.orm.context.ORMCompileState.orm_execute_statement" ), mock.patch( @@ -529,7 +529,7 @@ def test_bound_connection(self): assert_raises_message( sa.exc.InvalidRequestError, - "Session already has a Connection " "associated", + "Session already has a Connection associated", transaction._connection_for_bind, testing.db.connect(), None, diff --git a/test/orm/test_core_compilation.py b/test/orm/test_core_compilation.py index dd0d597b225..915c9747f8f 100644 --- a/test/orm/test_core_compilation.py +++ b/test/orm/test_core_compilation.py @@ -555,7 +555,7 @@ def test_aliased_delete(self, stmt_type: testing.Variation): self.assert_compile( stmt, - "DELETE FROM users AS users_1 " "WHERE users_1.name = :name_1", + "DELETE FROM users AS users_1 WHERE users_1.name = :name_1", ) @testing.variation("stmt_type", ["core", "orm"]) diff --git a/test/orm/test_cycles.py b/test/orm/test_cycles.py index cffde9bdab9..fb37185f53e 100644 --- a/test/orm/test_cycles.py +++ b/test/orm/test_cycles.py @@ -1188,7 +1188,7 @@ def test_post_update_o2m(self): ], ), CompiledSQL( - "DELETE FROM person " "WHERE person.id = :id", + "DELETE FROM person WHERE person.id = :id", lambda ctx: [{"id": p.id}], ), CompiledSQL( diff --git a/test/orm/test_deprecations.py b/test/orm/test_deprecations.py index f943d8dfe42..bf545d6ad99 100644 --- a/test/orm/test_deprecations.py +++ b/test/orm/test_deprecations.py @@ -1995,7 +1995,7 @@ def test_values_specific_order_by(self): @testing.fails_on("mssql", "FIXME: unknown") @testing.fails_on( - "oracle", "Oracle doesn't support boolean expressions as " "columns" + "oracle", "Oracle doesn't support boolean expressions as columns" ) @testing.fails_on( "postgresql+pg8000", diff --git a/test/orm/test_events.py b/test/orm/test_events.py index 02e00fe9479..3af6aad86aa 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -1671,7 +1671,7 @@ class C(B): class DeferredMapperEventsTest(RemoveORMEventsGlobally, _fixtures.FixtureTest): - """ "test event listeners against unmapped classes. + """test event listeners against unmapped classes. This incurs special logic. Note if we ever do the "remove" case, it has to get all of these, too. diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py index f93c18d2161..64d0ac9abde 100644 --- a/test/orm/test_mapper.py +++ b/test/orm/test_mapper.py @@ -3483,7 +3483,7 @@ def test_load_options(self, use_bound): self.assert_compile( stmt, - "SELECT users.id, " "users.name " "FROM users", + "SELECT users.id, users.name FROM users", ) is_true(um.configured) diff --git a/test/orm/test_options.py b/test/orm/test_options.py index 9362d52470e..db9b51607c3 100644 --- a/test/orm/test_options.py +++ b/test/orm/test_options.py @@ -981,7 +981,7 @@ def test_wrong_type_in_option_cls(self, first_element): if first_element else (Load(Item).joinedload(Keyword),) ), - "expected ORM mapped attribute for loader " "strategy argument", + "expected ORM mapped attribute for loader strategy argument", ) @testing.combinations( diff --git a/test/orm/test_query.py b/test/orm/test_query.py index a06406c1154..ea108c345b0 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -3563,7 +3563,7 @@ def test_filter_by_against_label(self): self.assert_compile( q1, - "SELECT users.id AS foo FROM users " "WHERE users.name = :name_1", + "SELECT users.id AS foo FROM users WHERE users.name = :name_1", ) def test_empty_filters(self): @@ -4348,7 +4348,7 @@ def test_exists(self): q1 = sess.query(User) self.assert_compile( sess.query(q1.exists()), - "SELECT EXISTS (" "SELECT 1 FROM users" ") AS anon_1", + "SELECT EXISTS (SELECT 1 FROM users) AS anon_1", ) q2 = sess.query(User).filter(User.name == "fred") @@ -4366,7 +4366,7 @@ def test_exists_col_expression(self): q1 = sess.query(User.id) self.assert_compile( sess.query(q1.exists()), - "SELECT EXISTS (" "SELECT 1 FROM users" ") AS anon_1", + "SELECT EXISTS (SELECT 1 FROM users) AS anon_1", ) def test_exists_labeled_col_expression(self): @@ -4376,7 +4376,7 @@ def test_exists_labeled_col_expression(self): q1 = sess.query(User.id.label("foo")) self.assert_compile( sess.query(q1.exists()), - "SELECT EXISTS (" "SELECT 1 FROM users" ") AS anon_1", + "SELECT EXISTS (SELECT 1 FROM users) AS anon_1", ) def test_exists_arbitrary_col_expression(self): @@ -4386,7 +4386,7 @@ def test_exists_arbitrary_col_expression(self): q1 = sess.query(func.foo(User.id)) self.assert_compile( sess.query(q1.exists()), - "SELECT EXISTS (" "SELECT 1 FROM users" ") AS anon_1", + "SELECT EXISTS (SELECT 1 FROM users) AS anon_1", ) def test_exists_col_warning(self): @@ -5178,7 +5178,7 @@ def test_one_prefix(self): User = self.classes.User sess = fixture_session() query = sess.query(User.name).prefix_with("PREFIX_1") - expected = "SELECT PREFIX_1 " "users.name AS users_name FROM users" + expected = "SELECT PREFIX_1 users.name AS users_name FROM users" self.assert_compile(query, expected, dialect=default.DefaultDialect()) def test_one_suffix(self): @@ -5194,7 +5194,7 @@ def test_many_prefixes(self): sess = fixture_session() query = sess.query(User.name).prefix_with("PREFIX_1", "PREFIX_2") expected = ( - "SELECT PREFIX_1 PREFIX_2 " "users.name AS users_name FROM users" + "SELECT PREFIX_1 PREFIX_2 users.name AS users_name FROM users" ) self.assert_compile(query, expected, dialect=default.DefaultDialect()) diff --git a/test/orm/test_selectin_relations.py b/test/orm/test_selectin_relations.py index c9907c76515..93b3d8710ce 100644 --- a/test/orm/test_selectin_relations.py +++ b/test/orm/test_selectin_relations.py @@ -3429,7 +3429,7 @@ def test_use_join_parent_degrade_on_defer(self): testing.db, q.all, CompiledSQL( - "SELECT a.id AS a_id, a.q AS a_q " "FROM a ORDER BY a.id", [{}] + "SELECT a.id AS a_id, a.q AS a_q FROM a ORDER BY a.id", [{}] ), # in the very unlikely case that the the FK col on parent is # deferred, we degrade to the JOIN version so that we don't need to diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index 3b3175e10ec..7b29b4362a0 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -2299,7 +2299,7 @@ def test_m2o_one_to_one(self): testing.db, session.flush, CompiledSQL( - "INSERT INTO users (name) " "VALUES (:name)", + "INSERT INTO users (name) VALUES (:name)", {"name": "imnewlyadded"}, ), AllOf( @@ -2616,7 +2616,7 @@ def test_many_to_many(self): {"description": "item4updated", "items_id": objects[4].id}, ), CompiledSQL( - "INSERT INTO keywords (name) " "VALUES (:name)", + "INSERT INTO keywords (name) VALUES (:name)", {"name": "yellow"}, ), CompiledSQL( @@ -3416,7 +3416,7 @@ def test_row_switch_no_child_table(self): # sync operation during _save_obj().update, this is safe to remove # again. CompiledSQL( - "UPDATE child SET pid=:pid " "WHERE child.cid = :child_cid", + "UPDATE child SET pid=:pid WHERE child.cid = :child_cid", {"pid": 1, "child_cid": 1}, ), ) diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py index e01220d1150..90ea0eaa039 100644 --- a/test/orm/test_unitofworkv2.py +++ b/test/orm/test_unitofworkv2.py @@ -3045,7 +3045,7 @@ def test_insert_dont_fetch_nondefaults(self): testing.db, s.flush, CompiledSQL( - "INSERT INTO test2 (id, foo, bar) " "VALUES (:id, :foo, :bar)", + "INSERT INTO test2 (id, foo, bar) VALUES (:id, :foo, :bar)", [{"id": 1, "foo": None, "bar": 2}], ), ) diff --git a/test/perf/orm2010.py b/test/perf/orm2010.py index c069430fb1e..520944c9f0b 100644 --- a/test/perf/orm2010.py +++ b/test/perf/orm2010.py @@ -149,14 +149,12 @@ def status(msg): print("Total cpu seconds: %.2f" % stats.total_tt) print( "Total execute calls: %d" - % counts_by_methname[ - "" - ] + % counts_by_methname[""] ) print( "Total executemany calls: %d" % counts_by_methname.get( - "", 0 + "", 0 ) ) diff --git a/test/requirements.py b/test/requirements.py index 8b137fe4675..a692cd3fee3 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -999,7 +999,7 @@ def arraysize(self): @property def emulated_lastrowid(self): - """ "target dialect retrieves cursor.lastrowid or an equivalent + """target dialect retrieves cursor.lastrowid or an equivalent after an insert() construct executes. """ return fails_on_everything_except( @@ -1027,7 +1027,7 @@ def database_discards_null_for_autoincrement(self): @property def emulated_lastrowid_even_with_sequences(self): - """ "target dialect retrieves cursor.lastrowid or an equivalent + """target dialect retrieves cursor.lastrowid or an equivalent after an insert() construct executes, even if the table has a Sequence on it. """ @@ -1040,7 +1040,7 @@ def emulated_lastrowid_even_with_sequences(self): @property def dbapi_lastrowid(self): - """ "target backend includes a 'lastrowid' accessor on the DBAPI + """target backend includes a 'lastrowid' accessor on the DBAPI cursor object. """ diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 5756bb6927c..9d9f69bdb9b 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -1544,7 +1544,7 @@ def test_scalar_select(self): ) self.assert_compile( select(select(table1.c.name).label("foo")), - "SELECT (SELECT mytable.name FROM mytable) " "AS foo", + "SELECT (SELECT mytable.name FROM mytable) AS foo", ) # scalar selects should not have any attributes on their 'c' or @@ -2694,7 +2694,7 @@ def test_deduping_unique_across_selects(self): self.assert_compile( s3, - "SELECT NULL AS anon_1, NULL AS anon__1 " "UNION " + "SELECT NULL AS anon_1, NULL AS anon__1 UNION " # without the feature tested in test_deduping_hash_algo we'd get # "SELECT true AS anon_2, true AS anon__1", "SELECT true AS anon_2, true AS anon__2", @@ -3775,7 +3775,7 @@ def test_binds(self): ) assert_raises_message( exc.CompileError, - "conflicts with unique bind parameter " "of the same name", + "conflicts with unique bind parameter of the same name", str, s, ) @@ -3789,7 +3789,7 @@ def test_binds(self): ) assert_raises_message( exc.CompileError, - "conflicts with unique bind parameter " "of the same name", + "conflicts with unique bind parameter of the same name", str, s, ) @@ -4434,7 +4434,7 @@ def test_tuple_expanding_in_no_values(self): ) self.assert_compile( expr, - "(mytable.myid, mytable.name) IN " "(__[POSTCOMPILE_param_1])", + "(mytable.myid, mytable.name) IN (__[POSTCOMPILE_param_1])", checkparams={"param_1": [(1, "foo"), (5, "bar")]}, check_post_param={"param_1": [(1, "foo"), (5, "bar")]}, check_literal_execute={}, @@ -4469,7 +4469,7 @@ def test_tuple_expanding_in_values(self): dialect.tuple_in_values = True self.assert_compile( tuple_(table1.c.myid, table1.c.name).in_([(1, "foo"), (5, "bar")]), - "(mytable.myid, mytable.name) IN " "(__[POSTCOMPILE_param_1])", + "(mytable.myid, mytable.name) IN (__[POSTCOMPILE_param_1])", dialect=dialect, checkparams={"param_1": [(1, "foo"), (5, "bar")]}, check_post_param={"param_1": [(1, "foo"), (5, "bar")]}, @@ -4816,7 +4816,7 @@ def test_render_literal_execute_parameter_literal_binds(self): select(table1.c.myid).where( table1.c.myid == bindparam("foo", 5, literal_execute=True) ), - "SELECT mytable.myid FROM mytable " "WHERE mytable.myid = 5", + "SELECT mytable.myid FROM mytable WHERE mytable.myid = 5", literal_binds=True, ) @@ -4843,7 +4843,7 @@ def test_render_literal_execute_parameter_render_postcompile(self): select(table1.c.myid).where( table1.c.myid == bindparam("foo", 5, literal_execute=True) ), - "SELECT mytable.myid FROM mytable " "WHERE mytable.myid = 5", + "SELECT mytable.myid FROM mytable WHERE mytable.myid = 5", render_postcompile=True, ) @@ -6136,7 +6136,7 @@ def test_dialect_specific_ddl(self): eq_ignore_whitespace( str(schema.AddConstraint(cons)), - "ALTER TABLE testtbl ADD EXCLUDE USING gist " "(room WITH =)", + "ALTER TABLE testtbl ADD EXCLUDE USING gist (room WITH =)", ) def test_try_cast(self): @@ -7337,7 +7337,7 @@ def test_correlate_auto_where_singlefrom(self): s = select(t1.c.a) s2 = select(t1).where(t1.c.a == s.scalar_subquery()) self.assert_compile( - s2, "SELECT t1.a FROM t1 WHERE t1.a = " "(SELECT t1.a FROM t1)" + s2, "SELECT t1.a FROM t1 WHERE t1.a = (SELECT t1.a FROM t1)" ) def test_correlate_semiauto_where_singlefrom(self): diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py index 54fcba576ca..93c385ba4d7 100644 --- a/test/sql/test_constraints.py +++ b/test/sql/test_constraints.py @@ -286,7 +286,7 @@ def _assert_cyclic_constraint_supports_alter(self, metadata, auto=False): if auto: fk_assertions.append( CompiledSQL( - "ALTER TABLE a ADD " "FOREIGN KEY(bid) REFERENCES b (id)" + "ALTER TABLE a ADD FOREIGN KEY(bid) REFERENCES b (id)" ) ) assertions.append(AllOf(*fk_assertions)) @@ -409,10 +409,10 @@ def test_cycle_unnamed_fks(self): ), AllOf( CompiledSQL( - "ALTER TABLE b ADD " "FOREIGN KEY(aid) REFERENCES a (id)" + "ALTER TABLE b ADD FOREIGN KEY(aid) REFERENCES a (id)" ), CompiledSQL( - "ALTER TABLE a ADD " "FOREIGN KEY(bid) REFERENCES b (id)" + "ALTER TABLE a ADD FOREIGN KEY(bid) REFERENCES b (id)" ), ), ] @@ -720,10 +720,10 @@ def test_index_create_inline(self): RegexSQL("^CREATE TABLE events"), AllOf( CompiledSQL( - "CREATE UNIQUE INDEX ix_events_name ON events " "(name)" + "CREATE UNIQUE INDEX ix_events_name ON events (name)" ), CompiledSQL( - "CREATE INDEX ix_events_location ON events " "(location)" + "CREATE INDEX ix_events_location ON events (location)" ), CompiledSQL( "CREATE UNIQUE INDEX sport_announcer ON events " @@ -817,7 +817,7 @@ def test_too_long_index_name(self): self.assert_compile( schema.CreateIndex(ix1), - "CREATE INDEX %s " "ON %s (%s)" % (exp, tname, cname), + "CREATE INDEX %s ON %s (%s)" % (exp, tname, cname), dialect=dialect, ) @@ -1237,7 +1237,7 @@ def test_external_ck_constraint_cancels_internal(self): # is disabled self.assert_compile( schema.CreateTable(t), - "CREATE TABLE tbl (" "a INTEGER, " "b INTEGER" ")", + "CREATE TABLE tbl (a INTEGER, b INTEGER)", ) def test_render_drop_constraint(self): diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index 0b665b84da6..ef7eac51e3d 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -518,7 +518,7 @@ def test_conflicting_names(self, identical, use_clone): else: assert_raises_message( CompileError, - "Multiple, unrelated CTEs found " "with the same name: 'cte1'", + "Multiple, unrelated CTEs found with the same name: 'cte1'", s.compile, ) diff --git a/test/sql/test_deprecations.py b/test/sql/test_deprecations.py index dbb5644cd1e..96b636bd058 100644 --- a/test/sql/test_deprecations.py +++ b/test/sql/test_deprecations.py @@ -326,7 +326,7 @@ def test_append_column_after_replace_selectable(self): sel = select(basefrom.c.a) with testing.expect_deprecated( - r"The Selectable.replace_selectable\(\) " "method is deprecated" + r"The Selectable.replace_selectable\(\) method is deprecated" ): replaced = sel.replace_selectable( basefrom, basefrom.join(joinfrom, basefrom.c.a == joinfrom.c.a) diff --git a/test/sql/test_external_traversal.py b/test/sql/test_external_traversal.py index 0204d6e6fcb..d044d8b57f0 100644 --- a/test/sql/test_external_traversal.py +++ b/test/sql/test_external_traversal.py @@ -2185,7 +2185,7 @@ def test_table_to_alias_8(self): def test_table_to_alias_9(self): s = select(literal_column("*")).select_from(t1).alias("foo") self.assert_compile( - s.select(), "SELECT foo.* FROM (SELECT * FROM table1) " "AS foo" + s.select(), "SELECT foo.* FROM (SELECT * FROM table1) AS foo" ) def test_table_to_alias_10(self): @@ -2194,13 +2194,13 @@ def test_table_to_alias_10(self): vis = sql_util.ClauseAdapter(t1alias) self.assert_compile( vis.traverse(s.select()), - "SELECT foo.* FROM (SELECT * FROM table1 " "AS t1alias) AS foo", + "SELECT foo.* FROM (SELECT * FROM table1 AS t1alias) AS foo", ) def test_table_to_alias_11(self): s = select(literal_column("*")).select_from(t1).alias("foo") self.assert_compile( - s.select(), "SELECT foo.* FROM (SELECT * FROM table1) " "AS foo" + s.select(), "SELECT foo.* FROM (SELECT * FROM table1) AS foo" ) def test_table_to_alias_12(self): @@ -2209,7 +2209,7 @@ def test_table_to_alias_12(self): ff = vis.traverse(func.count(t1.c.col1).label("foo")) self.assert_compile( select(ff), - "SELECT count(t1alias.col1) AS foo FROM " "table1 AS t1alias", + "SELECT count(t1alias.col1) AS foo FROM table1 AS t1alias", ) assert list(_from_objects(ff)) == [t1alias] @@ -2700,7 +2700,7 @@ def test_splice_2(self): ) self.assert_compile( sql_util.splice_joins(table1, j2), - "table1 JOIN table4 AS table4_1 ON " "table1.col3 = table4_1.col3", + "table1 JOIN table4 AS table4_1 ON table1.col3 = table4_1.col3", ) self.assert_compile( sql_util.splice_joins(sql_util.splice_joins(table1, j1), j2), @@ -2726,23 +2726,23 @@ def setup_test_class(cls): def test_columns(self): s = t1.select() self.assert_compile( - s, "SELECT table1.col1, table1.col2, " "table1.col3 FROM table1" + s, "SELECT table1.col1, table1.col2, table1.col3 FROM table1" ) select_copy = s.add_columns(column("yyy")) self.assert_compile( select_copy, - "SELECT table1.col1, table1.col2, " "table1.col3, yyy FROM table1", + "SELECT table1.col1, table1.col2, table1.col3, yyy FROM table1", ) is_not(s.selected_columns, select_copy.selected_columns) is_not(s._raw_columns, select_copy._raw_columns) self.assert_compile( - s, "SELECT table1.col1, table1.col2, " "table1.col3 FROM table1" + s, "SELECT table1.col1, table1.col2, table1.col3 FROM table1" ) def test_froms(self): s = t1.select() self.assert_compile( - s, "SELECT table1.col1, table1.col2, " "table1.col3 FROM table1" + s, "SELECT table1.col1, table1.col2, table1.col3 FROM table1" ) select_copy = s.select_from(t2) self.assert_compile( @@ -2752,13 +2752,13 @@ def test_froms(self): ) self.assert_compile( - s, "SELECT table1.col1, table1.col2, " "table1.col3 FROM table1" + s, "SELECT table1.col1, table1.col2, table1.col3 FROM table1" ) def test_prefixes(self): s = t1.select() self.assert_compile( - s, "SELECT table1.col1, table1.col2, " "table1.col3 FROM table1" + s, "SELECT table1.col1, table1.col2, table1.col3 FROM table1" ) select_copy = s.prefix_with("FOOBER") self.assert_compile( @@ -2767,7 +2767,7 @@ def test_prefixes(self): "table1.col3 FROM table1", ) self.assert_compile( - s, "SELECT table1.col1, table1.col2, " "table1.col3 FROM table1" + s, "SELECT table1.col1, table1.col2, table1.col3 FROM table1" ) def test_execution_options(self): diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py index ddfb9aea200..a5cfad5b694 100644 --- a/test/sql/test_insert.py +++ b/test/sql/test_insert.py @@ -1120,7 +1120,7 @@ def test_anticipate_no_pk_non_composite_pk(self): Column("q", Integer), ) with expect_warnings( - "Column 't.x' is marked as a member.*" "may not store NULL.$" + "Column 't.x' is marked as a member.*may not store NULL.$" ): self.assert_compile( t.insert(), "INSERT INTO t (q) VALUES (:q)", params={"q": 5} @@ -1136,7 +1136,7 @@ def test_anticipate_no_pk_non_composite_pk_implicit_returning(self): d = postgresql.dialect() d.implicit_returning = True with expect_warnings( - "Column 't.x' is marked as a member.*" "may not store NULL.$" + "Column 't.x' is marked as a member.*may not store NULL.$" ): self.assert_compile( t.insert(), @@ -1156,7 +1156,7 @@ def test_anticipate_no_pk_non_composite_pk_prefetch(self): d.implicit_returning = False with expect_warnings( - "Column 't.x' is marked as a member.*" "may not store NULL.$" + "Column 't.x' is marked as a member.*may not store NULL.$" ): self.assert_compile( t.insert(), @@ -1172,7 +1172,7 @@ def test_anticipate_no_pk_lower_case_table(self): Column("notpk", String(10), nullable=True), ) with expect_warnings( - "Column 't.id' is marked as a member.*" "may not store NULL.$" + "Column 't.id' is marked as a member.*may not store NULL.$" ): self.assert_compile( t.insert(), @@ -1755,7 +1755,7 @@ def test_sql_expression_pk_autoinc_lastinserted(self): self.assert_compile( stmt, - "INSERT INTO sometable (id, data) VALUES " "(foobar(), ?)", + "INSERT INTO sometable (id, data) VALUES (foobar(), ?)", checkparams={"data": "foo"}, params={"data": "foo"}, dialect=dialect, diff --git a/test/sql/test_lambdas.py b/test/sql/test_lambdas.py index 627310d8f17..17991ea2e35 100644 --- a/test/sql/test_lambdas.py +++ b/test/sql/test_lambdas.py @@ -221,7 +221,7 @@ def go(val): self.assert_compile( go("u1"), - "SELECT users.id FROM users " "WHERE users.name = 'u1'", + "SELECT users.id FROM users WHERE users.name = 'u1'", literal_binds=True, ) diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 3592bc6f006..8b43b0f98ac 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -2882,7 +2882,7 @@ def go(): assert_raises_message( exc.InvalidRequestError, - "Table 'users' is already defined for this " "MetaData instance.", + "Table 'users' is already defined for this MetaData instance.", go, ) @@ -5665,7 +5665,7 @@ def test_ix_allcols_truncation(self): dialect.max_identifier_length = 15 self.assert_compile( schema.CreateIndex(ix), - "CREATE INDEX ix_user_2de9 ON " '"user" (data, "Data2", "Data3")', + 'CREATE INDEX ix_user_2de9 ON "user" (data, "Data2", "Data3")', dialect=dialect, ) @@ -5949,7 +5949,7 @@ def test_schematype_ck_name_boolean_no_name(self): # no issue with native boolean self.assert_compile( schema.CreateTable(u1), - 'CREATE TABLE "user" (' "x BOOLEAN" ")", + """CREATE TABLE "user" (x BOOLEAN)""", dialect="postgresql", ) diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index c0b5cb47d66..9c87b355776 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -419,7 +419,7 @@ def test_parenthesized_exprs(self, op, reverse, negate): ), ( lambda p, q: (1 - p) * (2 - q) * (3 - p) * (4 - q), - "(:p_1 - t.p) * (:q_1 - t.q) * " "(:p_2 - t.p) * (:q_2 - t.q)", + "(:p_1 - t.p) * (:q_1 - t.q) * (:p_2 - t.p) * (:q_2 - t.q)", ), ( lambda p, q: ( @@ -3227,7 +3227,7 @@ def test_regexp_precedence_1(self): self.table.c.myid.match("foo"), self.table.c.myid.regexp_match("xx"), ), - "mytable.myid MATCH :myid_1 AND " "mytable.myid :myid_2", + "mytable.myid MATCH :myid_1 AND mytable.myid :myid_2", ) self.assert_compile( and_( diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py index 51382b19b4a..f3bc8e49481 100644 --- a/test/sql/test_quote.py +++ b/test/sql/test_quote.py @@ -821,7 +821,7 @@ def test_apply_labels_shouldnt_quote(self): # what if table/schema *are* quoted? self.assert_compile( t1.select().set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL), - "SELECT " "Foo.T1.Col1 AS Foo_T1_Col1 " "FROM " "Foo.T1", + "SELECT Foo.T1.Col1 AS Foo_T1_Col1 FROM Foo.T1", ) def test_quote_flag_propagate_check_constraint(self): @@ -830,7 +830,7 @@ def test_quote_flag_propagate_check_constraint(self): CheckConstraint(t.c.x > 5) self.assert_compile( schema.CreateTable(t), - "CREATE TABLE t (" '"x" INTEGER, ' 'CHECK ("x" > 5)' ")", + 'CREATE TABLE t ("x" INTEGER, CHECK ("x" > 5))', ) def test_quote_flag_propagate_index(self): diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py index be1f57121b5..8651207a912 100644 --- a/test/sql/test_resultset.py +++ b/test/sql/test_resultset.py @@ -490,7 +490,7 @@ def test_pickled_rows(self, connection, use_pickle, use_labels): if use_pickle: with expect_raises_message( exc.NoSuchColumnError, - "Row was unpickled; lookup by ColumnElement is " "unsupported", + "Row was unpickled; lookup by ColumnElement is unsupported", ): result[0]._mapping[users.c.user_id] else: @@ -499,7 +499,7 @@ def test_pickled_rows(self, connection, use_pickle, use_labels): if use_pickle: with expect_raises_message( exc.NoSuchColumnError, - "Row was unpickled; lookup by ColumnElement is " "unsupported", + "Row was unpickled; lookup by ColumnElement is unsupported", ): result[0]._mapping[users.c.user_name] else: diff --git a/test/sql/test_text.py b/test/sql/test_text.py index 301ad9ffdf8..941a02d9e7e 100644 --- a/test/sql/test_text.py +++ b/test/sql/test_text.py @@ -470,7 +470,7 @@ def test_escaping_double_colons(self): r"SELECT * FROM pg_attribute WHERE " r"attrelid = :tab\:\:regclass" ), - "SELECT * FROM pg_attribute WHERE " "attrelid = %(tab)s::regclass", + "SELECT * FROM pg_attribute WHERE attrelid = %(tab)s::regclass", params={"tab": None}, dialect="postgresql", ) @@ -483,7 +483,7 @@ def test_double_colons_dont_actually_need_escaping(self): r"SELECT * FROM pg_attribute WHERE " r"attrelid = foo::regclass" ), - "SELECT * FROM pg_attribute WHERE " "attrelid = foo::regclass", + "SELECT * FROM pg_attribute WHERE attrelid = foo::regclass", params={}, dialect="postgresql", ) diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 76249f56174..898d6fa0a8c 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -2303,7 +2303,7 @@ def test_variant_we_are_default(self, metadata): assert_raises( (exc.DBAPIError,), connection.exec_driver_sql, - "insert into my_table " "(data) values('four')", + "insert into my_table (data) values('four')", ) trans.rollback() From d85d7a736c155ba5647e0025904b908a8122fe88 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 6 Feb 2024 14:11:05 -0500 Subject: [PATCH 100/544] remove unneeded constructors for Unicode, UnicodeText References: https://github.com/sqlalchemy/sqlalchemy/pull/10970 Change-Id: I59461bcd6359314c0c0a99923da5e3f3d3ddbfff (cherry picked from commit 2202fa4c1318c5342625159e035793cb11fa50bb) --- lib/sqlalchemy/sql/sqltypes.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 764f9a382ea..fa4c7827fc3 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -306,15 +306,6 @@ class Unicode(String): __visit_name__ = "unicode" - def __init__(self, length=None, **kwargs): - """ - Create a :class:`.Unicode` object. - - Parameters are the same as that of :class:`.String`. - - """ - super().__init__(length=length, **kwargs) - class UnicodeText(Text): """An unbounded-length Unicode string type. @@ -330,15 +321,6 @@ class UnicodeText(Text): __visit_name__ = "unicode_text" - def __init__(self, length=None, **kwargs): - """ - Create a Unicode-converting Text type. - - Parameters are the same as that of :class:`_expression.TextClause`. - - """ - super().__init__(length=length, **kwargs) - class Integer(HasExpressionLookup, TypeEngine[int]): """A type for ``int`` integers.""" From e03d4d039369f1676df8045ed278348bdbc2fb5c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 6 Feb 2024 18:11:32 -0500 Subject: [PATCH 101/544] restore uuid_data_type as closed at top level the supports_native_uuid attribute does NOT indicate the UUID datatype being present, only that Uuid(native_uuid=True) would be able to produce something. On SQL Server it produces UNIQUEIDENTIFIER. The current use for this requirement is that of testing the uppercase UUID type that has to match that exactly. Change-Id: I050e5d1889f804ee3763b84828f2bd6a47dd265e (cherry picked from commit f932fc762d40f9b3bb305eb8db8b890483282502) --- lib/sqlalchemy/testing/requirements.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index c5dc52be885..31aac741d48 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -62,10 +62,7 @@ def index_ddl_if_exists(self): def uuid_data_type(self): """Return databases that support the UUID datatype.""" - return exclusions.skip_if( - lambda config: not config.db.dialect.supports_native_uuid, - "backend does not have a UUID datatype", - ) + return exclusions.closed() @property def foreign_keys(self): From 14870221fbad2acf1e9f35132bc3e23872357a69 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Tue, 14 Nov 2023 16:19:31 -0500 Subject: [PATCH 102/544] Fix typing generics in PostgreSQL range types. Correctly type PostgreSQL RANGE and MULTIRANGE types as ``Range[T]`` and ``Sequence[Range[T]]``. Introduced utility sequence ``MultiRange`` to allow better interoperability of MULTIRANGE types. Fixes #9736 Closes: #10625 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10625 Pull-request-sha: 2c17bc5f922a2bdb805a29e458184076ccc08055 Change-Id: I4f91d0233b29fd8101e67bdd4cd0aa2524ab788a (cherry picked from commit 4006cb38e13ac471655f5f27102678ed8933ee60) --- doc/build/changelog/unreleased_20/9736.rst | 16 ++ doc/build/dialects/postgresql.rst | 40 +++++ .../dialects/postgresql/__init__.py | 2 + lib/sqlalchemy/dialects/postgresql/asyncpg.py | 16 +- lib/sqlalchemy/dialects/postgresql/pg8000.py | 10 +- lib/sqlalchemy/dialects/postgresql/psycopg.py | 14 +- .../dialects/postgresql/psycopg2.py | 2 +- lib/sqlalchemy/dialects/postgresql/ranges.py | 168 +++++++++++++----- setup.cfg | 2 +- test/dialect/postgresql/test_compiler.py | 27 ++- test/dialect/postgresql/test_types.py | 118 ++++++++++-- .../dialects/postgresql/pg_stuff.py | 21 ++- 12 files changed, 352 insertions(+), 84 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/9736.rst diff --git a/doc/build/changelog/unreleased_20/9736.rst b/doc/build/changelog/unreleased_20/9736.rst new file mode 100644 index 00000000000..deb1703d87b --- /dev/null +++ b/doc/build/changelog/unreleased_20/9736.rst @@ -0,0 +1,16 @@ +.. change:: + :tags: postgresql, usecase + :tickets: 9736 + + Correctly type PostgreSQL RANGE and MULTIRANGE types as ``Range[T]`` + and ``Sequence[Range[T]]``. + Introduced utility sequence :class:`_postgresql.MultiRange` to allow better + interoperability of MULTIRANGE types. + +.. change:: + :tags: postgresql, usecase + + Differentiate between INT4 and INT8 ranges and multi-ranges types when + inferring the database type from a :class:`_postgresql.Range` or + :class:`_postgresql.MultiRange` instance, preferring INT4 if the values + fit into it. diff --git a/doc/build/dialects/postgresql.rst b/doc/build/dialects/postgresql.rst index 0575837185c..e822d069ce6 100644 --- a/doc/build/dialects/postgresql.rst +++ b/doc/build/dialects/postgresql.rst @@ -238,6 +238,8 @@ dialect, **does not** support multirange datatypes. .. versionadded:: 2.0.17 Added multirange support for the pg8000 dialect. pg8000 1.29.8 or greater is required. +.. versionadded:: 2.0.26 :class:`_postgresql.MultiRange` sequence added. + The example below illustrates use of the :class:`_postgresql.TSMULTIRANGE` datatype:: @@ -260,6 +262,7 @@ datatype:: id: Mapped[int] = mapped_column(primary_key=True) event_name: Mapped[str] + added: Mapped[datetime] in_session_periods: Mapped[List[Range[datetime]]] = mapped_column(TSMULTIRANGE) Illustrating insertion and selecting of a record:: @@ -294,6 +297,38 @@ Illustrating insertion and selecting of a record:: a new list to the attribute, or use the :class:`.MutableList` type modifier. See the section :ref:`mutable_toplevel` for background. +.. _postgresql_multirange_list_use: + +Use of a MultiRange sequence to infer the multirange type +""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +When using a multirange as a literal without specifying the type +the utility :class:`_postgresql.MultiRange` sequence can be used:: + + from sqlalchemy import literal + from sqlalchemy.dialects.postgresql import MultiRange + + with Session(engine) as session: + stmt = select(EventCalendar).where( + EventCalendar.added.op("<@")( + MultiRange( + [ + Range(datetime(2023, 1, 1), datetime(2013, 3, 31)), + Range(datetime(2023, 7, 1), datetime(2013, 9, 30)), + ] + ) + ) + ) + in_range = session.execute(stmt).all() + + with engine.connect() as conn: + row = conn.scalar(select(literal(MultiRange([Range(2, 4)])))) + print(f"{row.lower} -> {row.upper}") + +Using a simple ``list`` instead of :class:`_postgresql.MultiRange` would require +manually setting the type of the literal value to the appropriate multirange type. + +.. versionadded:: 2.0.26 :class:`_postgresql.MultiRange` sequence added. The available multirange datatypes are as follows: @@ -416,6 +451,8 @@ construction arguments, are as follows: .. autoclass:: sqlalchemy.dialects.postgresql.AbstractRange :members: comparator_factory +.. autoclass:: sqlalchemy.dialects.postgresql.AbstractSingleRange + .. autoclass:: sqlalchemy.dialects.postgresql.AbstractMultiRange @@ -529,6 +566,9 @@ construction arguments, are as follows: .. autoclass:: TSTZMULTIRANGE +.. autoclass:: MultiRange + + PostgreSQL SQL Elements and Functions -------------------------------------- diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index 8dfa54d3aca..17b14f4d05b 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -57,12 +57,14 @@ from .named_types import NamedType from .ranges import AbstractMultiRange from .ranges import AbstractRange +from .ranges import AbstractSingleRange from .ranges import DATEMULTIRANGE from .ranges import DATERANGE from .ranges import INT4MULTIRANGE from .ranges import INT4RANGE from .ranges import INT8MULTIRANGE from .ranges import INT8RANGE +from .ranges import MultiRange from .ranges import NUMMULTIRANGE from .ranges import NUMRANGE from .ranges import Range diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 2c460412c09..af097e283d3 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -176,8 +176,6 @@ import json as _py_json import re import time -from typing import cast -from typing import TYPE_CHECKING from . import json from . import ranges @@ -207,9 +205,6 @@ from ...util.concurrency import await_fallback from ...util.concurrency import await_only -if TYPE_CHECKING: - from typing import Iterable - class AsyncpgARRAY(PGARRAY): render_bind_cast = True @@ -361,7 +356,7 @@ class AsyncpgCHAR(sqltypes.CHAR): render_bind_cast = True -class _AsyncpgRange(ranges.AbstractRangeImpl): +class _AsyncpgRange(ranges.AbstractSingleRangeImpl): def bind_processor(self, dialect): asyncpg_Range = dialect.dbapi.asyncpg.Range @@ -415,10 +410,7 @@ def to_range(value): ) return value - return [ - to_range(element) - for element in cast("Iterable[ranges.Range]", value) - ] + return [to_range(element) for element in value] return to_range @@ -437,7 +429,7 @@ def to_range(rvalue): return rvalue if value is not None: - value = [to_range(elem) for elem in value] + value = ranges.MultiRange(to_range(elem) for elem in value) return value @@ -1050,7 +1042,7 @@ class PGDialect_asyncpg(PGDialect): OID: AsyncpgOID, REGCLASS: AsyncpgREGCLASS, sqltypes.CHAR: AsyncpgCHAR, - ranges.AbstractRange: _AsyncpgRange, + ranges.AbstractSingleRange: _AsyncpgRange, ranges.AbstractMultiRange: _AsyncpgMultiRange, }, ) diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index fd7d9a37880..0151be0253d 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -253,7 +253,7 @@ class _PGOIDVECTOR(_SpaceVector, OIDVECTOR): pass -class _Pg8000Range(ranges.AbstractRangeImpl): +class _Pg8000Range(ranges.AbstractSingleRangeImpl): def bind_processor(self, dialect): pg8000_Range = dialect.dbapi.Range @@ -304,15 +304,13 @@ def result_processor(self, dialect, coltype): def to_multirange(value): if value is None: return None - - mr = [] - for v in value: - mr.append( + else: + return ranges.MultiRange( ranges.Range( v.lower, v.upper, bounds=v.bounds, empty=v.is_empty ) + for v in value ) - return mr return to_multirange diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index df3d50e4867..90177a43ceb 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -164,7 +164,7 @@ class _PGBoolean(sqltypes.Boolean): render_bind_cast = True -class _PsycopgRange(ranges.AbstractRangeImpl): +class _PsycopgRange(ranges.AbstractSingleRangeImpl): def bind_processor(self, dialect): psycopg_Range = cast(PGDialect_psycopg, dialect)._psycopg_Range @@ -220,8 +220,10 @@ def to_range(value): def result_processor(self, dialect, coltype): def to_range(value): - if value is not None: - value = [ + if value is None: + return None + else: + return ranges.MultiRange( ranges.Range( elem._lower, elem._upper, @@ -229,9 +231,7 @@ def to_range(value): empty=not elem._bounds, ) for elem in value - ] - - return value + ) return to_range @@ -288,7 +288,7 @@ class PGDialect_psycopg(_PGDialect_common_psycopg): sqltypes.Integer: _PGInteger, sqltypes.SmallInteger: _PGSmallInteger, sqltypes.BigInteger: _PGBigInteger, - ranges.AbstractRange: _PsycopgRange, + ranges.AbstractSingleRange: _PsycopgRange, ranges.AbstractMultiRange: _PsycopgMultiRange, }, ) diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 0b89149ec9d..9bf2e493361 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -513,7 +513,7 @@ def result_processor(self, dialect, coltype): return None -class _Psycopg2Range(ranges.AbstractRangeImpl): +class _Psycopg2Range(ranges.AbstractSingleRangeImpl): _psycopg2_range_cls = "none" def bind_processor(self, dialect): diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index 980f1449359..b793ca49f18 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -15,8 +15,10 @@ from typing import Any from typing import cast from typing import Generic +from typing import List from typing import Optional from typing import overload +from typing import Sequence from typing import Tuple from typing import Type from typing import TYPE_CHECKING @@ -152,8 +154,8 @@ def upper_inf(self) -> bool: return not self.empty and self.upper is None @property - def __sa_type_engine__(self) -> AbstractRange[Range[_T]]: - return AbstractRange() + def __sa_type_engine__(self) -> AbstractSingleRange[_T]: + return AbstractSingleRange() def _contains_value(self, value: _T) -> bool: """Return True if this range contains the given value.""" @@ -708,15 +710,34 @@ def _stringify(self) -> str: return f"{b0}{l},{r}{b1}" -class AbstractRange(sqltypes.TypeEngine[Range[_T]]): - """ - Base for PostgreSQL RANGE types. +class MultiRange(List[Range[_T]]): + """Represents a multirange sequence. + + This list subclass is an utility to allow automatic type inference of + the proper multi-range SQL type depending on the single range values. + This is useful when operating on literal multi-ranges:: + + import sqlalchemy as sa + from sqlalchemy.dialects.postgresql import MultiRange, Range + + value = literal(MultiRange([Range(2, 4)])) + + select(tbl).where(tbl.c.value.op("@")(MultiRange([Range(-3, 7)]))) + + .. versionadded:: 2.0.26 .. seealso:: - `PostgreSQL range functions `_ + - :ref:`postgresql_multirange_list_use`. + """ - """ # noqa: E501 + @property + def __sa_type_engine__(self) -> AbstractMultiRange[_T]: + return AbstractMultiRange() + + +class AbstractRange(sqltypes.TypeEngine[_T]): + """Base class for single and multi Range SQL types.""" render_bind_cast = True @@ -742,7 +763,10 @@ def adapt( and also render as ``INT4RANGE`` in SQL and DDL. """ - if issubclass(cls, AbstractRangeImpl) and cls is not self.__class__: + if ( + issubclass(cls, (AbstractSingleRangeImpl, AbstractMultiRangeImpl)) + and cls is not self.__class__ + ): # two ways to do this are: 1. create a new type on the fly # or 2. have AbstractRangeImpl(visit_name) constructor and a # visit_abstract_range_impl() method in the PG compiler. @@ -761,21 +785,6 @@ def adapt( else: return super().adapt(cls) - def _resolve_for_literal(self, value: Any) -> Any: - spec = value.lower if value.lower is not None else value.upper - - if isinstance(spec, int): - return INT8RANGE() - elif isinstance(spec, (Decimal, float)): - return NUMRANGE() - elif isinstance(spec, datetime): - return TSRANGE() if not spec.tzinfo else TSTZRANGE() - elif isinstance(spec, date): - return DATERANGE() - else: - # empty Range, SQL datatype can't be determined here - return sqltypes.NULLTYPE - class comparator_factory(TypeEngine.Comparator[Range[Any]]): """Define comparison operations for range types.""" @@ -857,91 +866,164 @@ def intersection(self, other: Any) -> ColumnElement[Range[_T]]: return self.expr.operate(operators.mul, other) -class AbstractRangeImpl(AbstractRange[Range[_T]]): - """Marker for AbstractRange that will apply a subclass-specific +class AbstractSingleRange(AbstractRange[Range[_T]]): + """Base for PostgreSQL RANGE types. + + These are types that return a single :class:`_postgresql.Range` object. + + .. seealso:: + + `PostgreSQL range functions `_ + + """ # noqa: E501 + + __abstract__ = True + + def _resolve_for_literal(self, value: Range[Any]) -> Any: + spec = value.lower if value.lower is not None else value.upper + + if isinstance(spec, int): + # pg is unreasonably picky here: the query + # "select 1::INTEGER <@ '[1, 4)'::INT8RANGE" raises + # "operator does not exist: integer <@ int8range" as of pg 16 + if _is_int32(value): + return INT4RANGE() + else: + return INT8RANGE() + elif isinstance(spec, (Decimal, float)): + return NUMRANGE() + elif isinstance(spec, datetime): + return TSRANGE() if not spec.tzinfo else TSTZRANGE() + elif isinstance(spec, date): + return DATERANGE() + else: + # empty Range, SQL datatype can't be determined here + return sqltypes.NULLTYPE + + +class AbstractSingleRangeImpl(AbstractSingleRange[_T]): + """Marker for AbstractSingleRange that will apply a subclass-specific adaptation""" -class AbstractMultiRange(AbstractRange[Range[_T]]): - """base for PostgreSQL MULTIRANGE types""" +class AbstractMultiRange(AbstractRange[Sequence[Range[_T]]]): + """Base for PostgreSQL MULTIRANGE types. + + these are types that return a sequence of :class:`_postgresql.Range` + objects. + + """ __abstract__ = True + def _resolve_for_literal(self, value: Sequence[Range[Any]]) -> Any: + if not value: + # empty MultiRange, SQL datatype can't be determined here + return sqltypes.NULLTYPE + first = value[0] + spec = first.lower if first.lower is not None else first.upper -class AbstractMultiRangeImpl( - AbstractRangeImpl[Range[_T]], AbstractMultiRange[Range[_T]] -): - """Marker for AbstractRange that will apply a subclass-specific + if isinstance(spec, int): + # pg is unreasonably picky here: the query + # "select 1::INTEGER <@ '{[1, 4),[6,19)}'::INT8MULTIRANGE" raises + # "operator does not exist: integer <@ int8multirange" as of pg 16 + if all(_is_int32(r) for r in value): + return INT4MULTIRANGE() + else: + return INT8MULTIRANGE() + elif isinstance(spec, (Decimal, float)): + return NUMMULTIRANGE() + elif isinstance(spec, datetime): + return TSMULTIRANGE() if not spec.tzinfo else TSTZMULTIRANGE() + elif isinstance(spec, date): + return DATEMULTIRANGE() + else: + # empty Range, SQL datatype can't be determined here + return sqltypes.NULLTYPE + + +class AbstractMultiRangeImpl(AbstractMultiRange[_T]): + """Marker for AbstractMultiRange that will apply a subclass-specific adaptation""" -class INT4RANGE(AbstractRange[Range[int]]): +class INT4RANGE(AbstractSingleRange[int]): """Represent the PostgreSQL INT4RANGE type.""" __visit_name__ = "INT4RANGE" -class INT8RANGE(AbstractRange[Range[int]]): +class INT8RANGE(AbstractSingleRange[int]): """Represent the PostgreSQL INT8RANGE type.""" __visit_name__ = "INT8RANGE" -class NUMRANGE(AbstractRange[Range[Decimal]]): +class NUMRANGE(AbstractSingleRange[Decimal]): """Represent the PostgreSQL NUMRANGE type.""" __visit_name__ = "NUMRANGE" -class DATERANGE(AbstractRange[Range[date]]): +class DATERANGE(AbstractSingleRange[date]): """Represent the PostgreSQL DATERANGE type.""" __visit_name__ = "DATERANGE" -class TSRANGE(AbstractRange[Range[datetime]]): +class TSRANGE(AbstractSingleRange[datetime]): """Represent the PostgreSQL TSRANGE type.""" __visit_name__ = "TSRANGE" -class TSTZRANGE(AbstractRange[Range[datetime]]): +class TSTZRANGE(AbstractSingleRange[datetime]): """Represent the PostgreSQL TSTZRANGE type.""" __visit_name__ = "TSTZRANGE" -class INT4MULTIRANGE(AbstractMultiRange[Range[int]]): +class INT4MULTIRANGE(AbstractMultiRange[int]): """Represent the PostgreSQL INT4MULTIRANGE type.""" __visit_name__ = "INT4MULTIRANGE" -class INT8MULTIRANGE(AbstractMultiRange[Range[int]]): +class INT8MULTIRANGE(AbstractMultiRange[int]): """Represent the PostgreSQL INT8MULTIRANGE type.""" __visit_name__ = "INT8MULTIRANGE" -class NUMMULTIRANGE(AbstractMultiRange[Range[Decimal]]): +class NUMMULTIRANGE(AbstractMultiRange[Decimal]): """Represent the PostgreSQL NUMMULTIRANGE type.""" __visit_name__ = "NUMMULTIRANGE" -class DATEMULTIRANGE(AbstractMultiRange[Range[date]]): +class DATEMULTIRANGE(AbstractMultiRange[date]): """Represent the PostgreSQL DATEMULTIRANGE type.""" __visit_name__ = "DATEMULTIRANGE" -class TSMULTIRANGE(AbstractMultiRange[Range[datetime]]): +class TSMULTIRANGE(AbstractMultiRange[datetime]): """Represent the PostgreSQL TSRANGE type.""" __visit_name__ = "TSMULTIRANGE" -class TSTZMULTIRANGE(AbstractMultiRange[Range[datetime]]): +class TSTZMULTIRANGE(AbstractMultiRange[datetime]): """Represent the PostgreSQL TSTZRANGE type.""" __visit_name__ = "TSTZMULTIRANGE" + + +_max_int_32 = 2**31 - 1 +_min_int_32 = -(2**31) + + +def _is_int32(r: Range[int]) -> bool: + return (r.lower is None or _min_int_32 <= r.lower <= _max_int_32) and ( + r.upper is None or _min_int_32 <= r.upper <= _max_int_32 + ) diff --git a/setup.cfg b/setup.cfg index d51e4d854cc..2a8a68132ad 100644 --- a/setup.cfg +++ b/setup.cfg @@ -181,7 +181,7 @@ mariadb_connector = mariadb+mariadbconnector://scott:tiger@127.0.0.1:3306/test mssql = mssql+pyodbc://scott:tiger^5HHH@mssql2017:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes mssql_async = mssql+aioodbc://scott:tiger^5HHH@mssql2017:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes pymssql = mssql+pymssql://scott:tiger^5HHH@mssql2017:1433/test -docker_mssql = mssql+pyodbc://scott:tiger^5HHH@127.0.0.1:1433/test?driver=ODBC+Driver+18+for+SQL+Server +docker_mssql = mssql+pyodbc://scott:tiger^5HHH@127.0.0.1:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes oracle = oracle+cx_oracle://scott:tiger@oracle18c/xe cxoracle = oracle+cx_oracle://scott:tiger@oracle18c/xe oracledb = oracle+oracledb://scott:tiger@oracle18c/xe diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index 005e60eaa14..10144d63a69 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -52,6 +52,7 @@ from sqlalchemy.dialects.postgresql import TSRANGE from sqlalchemy.dialects.postgresql.base import PGDialect from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2 +from sqlalchemy.dialects.postgresql.ranges import MultiRange from sqlalchemy.orm import aliased from sqlalchemy.orm import clear_mappers from sqlalchemy.orm import Session @@ -2588,7 +2589,7 @@ def test_eager_grouping_flag(self, expr, expected, type_): self.assert_compile(expr, expected) - def test_custom_object_hook(self): + def test_range_custom_object_hook(self): # See issue #8884 from datetime import date @@ -2608,6 +2609,30 @@ def test_custom_object_hook(self): "WHERE usages.date <@ %(date_1)s::DATERANGE", ) + def test_multirange_custom_object_hook(self): + from datetime import date + + usages = table( + "usages", + column("id", Integer), + column("date", Date), + column("amount", Integer), + ) + period = MultiRange( + [ + Range(date(2022, 1, 1), (2023, 1, 1)), + Range(date(2024, 1, 1), (2025, 1, 1)), + ] + ) + stmt = select(func.sum(usages.c.amount)).where( + usages.c.date.op("<@")(period) + ) + self.assert_compile( + stmt, + "SELECT sum(usages.amount) AS sum_1 FROM usages " + "WHERE usages.date <@ %(date_1)s::DATEMULTIRANGE", + ) + def test_bitwise_xor(self): c1 = column("c1", Integer) c2 = column("c2", Integer) diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 2088436eebf..a5093c0bc90 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -73,6 +73,7 @@ from sqlalchemy.dialects.postgresql import TSRANGE from sqlalchemy.dialects.postgresql import TSTZMULTIRANGE from sqlalchemy.dialects.postgresql import TSTZRANGE +from sqlalchemy.dialects.postgresql.ranges import MultiRange from sqlalchemy.exc import CompileError from sqlalchemy.exc import DBAPIError from sqlalchemy.orm import declarative_base @@ -92,6 +93,7 @@ from sqlalchemy.testing.assertions import ComparesTables from sqlalchemy.testing.assertions import eq_ from sqlalchemy.testing.assertions import is_ +from sqlalchemy.testing.assertions import ne_ from sqlalchemy.testing.assertsql import RegexSQL from sqlalchemy.testing.schema import pep435_enum from sqlalchemy.testing.suite import test_types as suite @@ -3887,6 +3889,53 @@ def __init__(self, name, data): eq_(s.query(Data.data, Data).all(), [(d.data, d)]) +class RangeMiscTests(fixtures.TestBase): + @testing.combinations( + (Range(2, 7), INT4RANGE), + (Range(-10, 7), INT4RANGE), + (Range(None, -7), INT4RANGE), + (Range(33, None), INT4RANGE), + (Range(-2147483648, 2147483647), INT4RANGE), + (Range(-2147483648 - 1, 2147483647), INT8RANGE), + (Range(-2147483648, 2147483647 + 1), INT8RANGE), + (Range(-2147483648 - 1, None), INT8RANGE), + (Range(None, 2147483647 + 1), INT8RANGE), + ) + def test_resolve_for_literal(self, obj, type_): + """This tests that the int4 / int8 version is selected correctly by + _resolve_for_literal.""" + lit = literal(obj) + eq_(type(lit.type), type_) + + @testing.combinations( + (Range(2, 7), INT4MULTIRANGE), + (Range(-10, 7), INT4MULTIRANGE), + (Range(None, -7), INT4MULTIRANGE), + (Range(33, None), INT4MULTIRANGE), + (Range(-2147483648, 2147483647), INT4MULTIRANGE), + (Range(-2147483648 - 1, 2147483647), INT8MULTIRANGE), + (Range(-2147483648, 2147483647 + 1), INT8MULTIRANGE), + (Range(-2147483648 - 1, None), INT8MULTIRANGE), + (Range(None, 2147483647 + 1), INT8MULTIRANGE), + ) + def test_resolve_for_literal_multi(self, obj, type_): + """This tests that the int4 / int8 version is selected correctly by + _resolve_for_literal.""" + list_ = MultiRange([Range(-1, 1), obj, Range(7, 100)]) + lit = literal(list_) + eq_(type(lit.type), type_) + + def test_multirange_sequence(self): + plain = [Range(-1, 1), Range(42, 43), Range(7, 100)] + mr = MultiRange(plain) + is_true(issubclass(MultiRange, list)) + is_true(isinstance(mr, list)) + eq_(mr, plain) + eq_(str(mr), str(plain)) + eq_(repr(mr), repr(plain)) + ne_(mr, plain[1:]) + + class _RangeTests: _col_type = None "The concrete range class these tests are for." @@ -4641,11 +4690,21 @@ def test_auto_cast_back_to_type(self, connection): Brought up in #8540. """ + # see also CompileTest::test_range_custom_object_hook data_obj = self._data_obj() stmt = select(literal(data_obj, type_=self._col_type)) round_trip = connection.scalar(stmt) eq_(round_trip, data_obj) + def test_auto_cast_back_to_type_without_type(self, connection): + """use _resolve_for_literal to cast""" + # see also CompileTest::test_range_custom_object_hook + data_obj = self._data_obj() + lit = literal(data_obj) + round_trip = connection.scalar(select(lit)) + eq_(round_trip, data_obj) + eq_(type(lit.type), self._col_type) + def test_actual_type(self): eq_(str(self._col_type()), self._col_str) @@ -5140,10 +5199,17 @@ def test_difference(self): ) -class _MultiRangeTypeRoundTrip(fixtures.TablesTest): +class _MultiRangeTypeRoundTrip(fixtures.TablesTest, _RangeTests): __requires__ = ("multirange_types",) __backend__ = True + @testing.fixture(params=(True, False), ids=["multirange", "plain_list"]) + def data_obj(self, request): + if request.param: + return MultiRange(self._data_obj()) + else: + return list(self._data_obj()) + @classmethod def define_tables(cls, metadata): # no reason ranges shouldn't be primary keys, @@ -5155,7 +5221,7 @@ def define_tables(cls, metadata): ) cls.col = table.c.range - def test_auto_cast_back_to_type(self, connection): + def test_auto_cast_back_to_type(self, connection, data_obj): """test that a straight pass of the range type without any context will send appropriate casting info so that the driver can round trip it. @@ -5170,11 +5236,29 @@ def test_auto_cast_back_to_type(self, connection): Brought up in #8540. """ - data_obj = self._data_obj() + # see also CompileTest::test_multirange_custom_object_hook stmt = select(literal(data_obj, type_=self._col_type)) round_trip = connection.scalar(stmt) eq_(round_trip, data_obj) + def test_auto_cast_back_to_type_without_type(self, connection): + """use _resolve_for_literal to cast""" + # see also CompileTest::test_multirange_custom_object_hook + data_obj = MultiRange(self._data_obj()) + lit = literal(data_obj) + round_trip = connection.scalar(select(lit)) + eq_(round_trip, data_obj) + eq_(type(lit.type), self._col_type) + + @testing.fails("no automatic adaptation of plain list") + def test_auto_cast_back_to_type_without_type_plain_list(self, connection): + """use _resolve_for_literal to cast""" + # see also CompileTest::test_multirange_custom_object_hook + data_obj = list(self._data_obj()) + lit = literal(data_obj) + r = connection.scalar(select(lit)) + eq_(type(r), list) + def test_actual_type(self): eq_(str(self._col_type()), self._col_str) @@ -5188,12 +5272,12 @@ def test_reflect(self, connection): def _assert_data(self, conn): data = conn.execute(select(self.tables.data_table.c.range)).fetchall() eq_(data, [(self._data_obj(),)]) + eq_(type(data[0][0]), MultiRange) - def test_textual_round_trip_w_dialect_type(self, connection): + def test_textual_round_trip_w_dialect_type(self, connection, data_obj): """test #8690""" data_table = self.tables.data_table - data_obj = self._data_obj() connection.execute( self.tables.data_table.insert(), {"range": data_obj} ) @@ -5206,9 +5290,9 @@ def test_textual_round_trip_w_dialect_type(self, connection): eq_(data_obj, v2) - def test_insert_obj(self, connection): + def test_insert_obj(self, connection, data_obj): connection.execute( - self.tables.data_table.insert(), {"range": self._data_obj()} + self.tables.data_table.insert(), {"range": data_obj} ) self._assert_data(connection) @@ -5229,6 +5313,7 @@ def test_union_result_text(self, connection): range_ = self.tables.data_table.c.range data = connection.execute(select(range_ + range_)).fetchall() eq_(data, [(self._data_obj(),)]) + eq_(type(data[0][0]), MultiRange) @testing.requires.psycopg_or_pg8000_compatibility def test_intersection_result_text(self, connection): @@ -5240,6 +5325,7 @@ def test_intersection_result_text(self, connection): range_ = self.tables.data_table.c.range data = connection.execute(select(range_ * range_)).fetchall() eq_(data, [(self._data_obj(),)]) + eq_(type(data[0][0]), MultiRange) @testing.requires.psycopg_or_pg8000_compatibility def test_difference_result_text(self, connection): @@ -5251,6 +5337,7 @@ def test_difference_result_text(self, connection): range_ = self.tables.data_table.c.range data = connection.execute(select(range_ - range_)).fetchall() eq_(data, [([],)]) + eq_(type(data[0][0]), MultiRange) class _Int4MultiRangeTests: @@ -5261,11 +5348,7 @@ def _data_str(self): return "{[1,2), [3, 5), [9, 12)}" def _data_obj(self): - return [ - Range(1, 2), - Range(3, 5), - Range(9, 12), - ] + return [Range(1, 2), Range(3, 5), Range(9, 12)] class _Int8MultiRangeTests: @@ -5465,6 +5548,17 @@ class DateTimeTZRMultiangeRoundTripTest( pass +class MultiRangeSequenceTest(fixtures.TestBase): + def test_methods(self): + plain = [Range(1, 3), Range(5, 9)] + multi = MultiRange(plain) + is_true(isinstance(multi, list)) + eq_(multi, plain) + ne_(multi, plain[:1]) + eq_(str(multi), str(plain)) + eq_(repr(multi), repr(plain)) + + class JSONTest(AssertsCompiledSQL, fixtures.TestBase): __dialect__ = "postgresql" diff --git a/test/typing/plain_files/dialects/postgresql/pg_stuff.py b/test/typing/plain_files/dialects/postgresql/pg_stuff.py index 4567daa3866..678d22b71f9 100644 --- a/test/typing/plain_files/dialects/postgresql/pg_stuff.py +++ b/test/typing/plain_files/dialects/postgresql/pg_stuff.py @@ -12,14 +12,17 @@ from sqlalchemy import UniqueConstraint from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.dialects.postgresql import array +from sqlalchemy.dialects.postgresql import DATERANGE from sqlalchemy.dialects.postgresql import insert +from sqlalchemy.dialects.postgresql import INT4RANGE +from sqlalchemy.dialects.postgresql import INT8MULTIRANGE from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.dialects.postgresql import TSTZMULTIRANGE from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column - # test #6402 c1 = Column(UUID()) @@ -77,3 +80,19 @@ class Test(Base): ).on_conflict_do_update( unique, ["foo"], Test.id > 0, {"id": 42, Test.ident: 99}, Test.id == 22 ).excluded.foo.desc() + + +# EXPECTED_TYPE: Column[Range[int]] +reveal_type(Column(INT4RANGE())) +# EXPECTED_TYPE: Column[Range[datetime.date]] +reveal_type(Column("foo", DATERANGE())) +# EXPECTED_TYPE: Column[Sequence[Range[int]]] +reveal_type(Column(INT8MULTIRANGE())) +# EXPECTED_TYPE: Column[Sequence[Range[datetime.datetime]]] +reveal_type(Column("foo", TSTZMULTIRANGE())) + + +range_col_stmt = select(Column(INT4RANGE()), Column(INT8MULTIRANGE())) + +# EXPECTED_TYPE: Select[Tuple[Range[int], Sequence[Range[int]]]] +reveal_type(range_col_stmt) From 696acea44007004273a7bf862cae5e631049303c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Silo=C3=A9=20Garcez?= <51986786+Roast-Lord@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:14:24 -0300 Subject: [PATCH 103/544] Fixed typo on ordered_values example. (#10984) (cherry picked from commit 009aa8cb63dd082e1ba0c4a96a39980d36e26e71) --- lib/sqlalchemy/sql/dml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 1151d61ad80..779be1dac12 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -1536,7 +1536,7 @@ def ordered_values(self, *args: Tuple[_DMLColumnArgument, Any]) -> Self: E.g.:: stmt = table.update().ordered_values( - ("name", "ed"), ("ident": "foo") + ("name", "ed"), ("ident", "foo") ) .. seealso:: From 07258583942ae76cd2291bfee37ad63f249ac86a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 8 Feb 2024 08:45:22 -0500 Subject: [PATCH 104/544] handle case where neither side has a cache key Fixed issue where an assertion within the implementation for :func:`_orm.with_expression` would raise if a SQL expression that was not cacheable were used; this was a 2.0 regression since 1.4. Fixes: #10990 Change-Id: I6541189d29d2e860df7fbab187bfcc6f4dcbfc76 (cherry picked from commit d97679e0926b829592bf5962d9dae5f2fe99503f) --- doc/build/changelog/unreleased_20/10990.rst | 7 ++++ lib/sqlalchemy/orm/strategy_options.py | 12 +++---- test/orm/test_deferred.py | 38 +++++++++++++++++++++ 3 files changed, 51 insertions(+), 6 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10990.rst diff --git a/doc/build/changelog/unreleased_20/10990.rst b/doc/build/changelog/unreleased_20/10990.rst new file mode 100644 index 00000000000..ac887c83640 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10990.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, orm + :tickets: 10990 + + Fixed issue where an assertion within the implementation for + :func:`_orm.with_expression` would raise if a SQL expression that was not + cacheable were used; this was a 2.0 regression since 1.4. diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 5f3ac9bf455..c20215ac336 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1064,15 +1064,15 @@ def process(opt: _LoadElement) -> _LoadElement: orig_cache_key = orig_query._generate_cache_key() replacement_cache_key = context.query._generate_cache_key() + if replacement_cache_key is not None: assert orig_cache_key is not None - assert replacement_cache_key is not None - opt._extra_criteria = tuple( - replacement_cache_key._apply_params_to_element( - orig_cache_key, crit + opt._extra_criteria = tuple( + replacement_cache_key._apply_params_to_element( + orig_cache_key, crit + ) + for crit in opt._extra_criteria ) - for crit in opt._extra_criteria - ) return opt diff --git a/test/orm/test_deferred.py b/test/orm/test_deferred.py index 66e3104a95d..dbfe3ef7974 100644 --- a/test/orm/test_deferred.py +++ b/test/orm/test_deferred.py @@ -10,6 +10,7 @@ from sqlalchemy import select from sqlalchemy import String from sqlalchemy import testing +from sqlalchemy import TypeDecorator from sqlalchemy import union_all from sqlalchemy import util from sqlalchemy.orm import aliased @@ -2215,9 +2216,21 @@ class C(ComparableEntity, Base): c_expr = query_expression(literal(1)) + class CustomTimeStamp(TypeDecorator): + cache_ok = False + impl = Integer + + class HasNonCacheable(ComparableEntity, Base): + __tablename__ = "non_cacheable" + + id = Column(Integer, primary_key=True) + created = Column(CustomTimeStamp) + msg_translated = query_expression() + @classmethod def insert_data(cls, connection): A, A_default, B, C = cls.classes("A", "A_default", "B", "C") + (HasNonCacheable,) = cls.classes("HasNonCacheable") s = Session(connection) s.add_all( @@ -2230,6 +2243,7 @@ def insert_data(cls, connection): C(id=2, x=2), A_default(id=1, x=1, y=2), A_default(id=2, x=2, y=3), + HasNonCacheable(id=1, created=12345), ] ) @@ -2269,6 +2283,30 @@ def test_expr_default_value(self): ) eq_(c2.all(), [C(c_expr=4)]) + def test_non_cacheable_expr(self): + """test #10990""" + + HasNonCacheable = self.classes.HasNonCacheable + + for i in range(3): + s = fixture_session() + + stmt = ( + select(HasNonCacheable) + .where(HasNonCacheable.created > 10) + .options( + with_expression( + HasNonCacheable.msg_translated, + HasNonCacheable.created + 10, + ) + ) + ) + + eq_( + s.scalars(stmt).all(), + [HasNonCacheable(id=1, created=12345, msg_translated=12355)], + ) + def test_reuse_expr(self): A = self.classes.A From 825aa01640b71f1cd70724656e8e887ea03c3601 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 11 Feb 2024 10:08:22 -0500 Subject: [PATCH 105/544] fix changelog messages Change-Id: I5af8f59ec15820f83210f49aab3006b726484301 (cherry picked from commit a15df7a107912e393352dc2bf378e7cf8f537b71) --- doc/build/changelog/unreleased_20/10877.rst | 7 +++++-- doc/build/changelog/unreleased_20/uuid_imv_fixes.rst | 4 ++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/doc/build/changelog/unreleased_20/10877.rst b/doc/build/changelog/unreleased_20/10877.rst index 8aaac983b45..90bf75f2493 100644 --- a/doc/build/changelog/unreleased_20/10877.rst +++ b/doc/build/changelog/unreleased_20/10877.rst @@ -1,7 +1,10 @@ .. change:: - :tags: oracle + :tags: oracle, bug, performance :tickets: 10877 Changed the default arraysize of the Oracle dialects so that the value set by the driver is used, that is 100 at the time of writing for both - cx_oracle and oracledb. Previously the value was set to 50 by default. + cx_oracle and oracledb. Previously the value was set to 50 by default. The + setting of 50 could cause significant performance regressions compared to + when using cx_oracle/oracledb alone to fetch many hundreds of rows over + slower networks. diff --git a/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst b/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst index 79aa132b21e..0744c61e35b 100644 --- a/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst +++ b/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst @@ -13,8 +13,8 @@ :tags: bug, postgresql Fixed an issue regarding the use of the :class:`.Uuid` datatype with the - :paramref:`.Uuid.as_uuid` parameter set to False, when using the pymssql - dialect. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" + :paramref:`.Uuid.as_uuid` parameter set to False, when using PostgreSQL + dialects. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" feature) would not correctly align primary key UUID values for bulk INSERT statements, resulting in errors. Similar issues were fixed for the pymssql driver as well. From 77e606ee5d102869658bd454d3320d5b406ee01d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 11 Feb 2024 10:11:07 -0500 Subject: [PATCH 106/544] - 2.0.26 --- doc/build/changelog/changelog_20.rst | 182 +++++++++++++++++- doc/build/changelog/unreleased_20/10777.rst | 7 - doc/build/changelog/unreleased_20/10843.rst | 10 - doc/build/changelog/unreleased_20/10850.rst | 7 - doc/build/changelog/unreleased_20/10863.rst | 11 -- doc/build/changelog/unreleased_20/10877.rst | 10 - doc/build/changelog/unreleased_20/10893.rst | 8 - doc/build/changelog/unreleased_20/10896.rst | 11 -- doc/build/changelog/unreleased_20/10899.rst | 10 - doc/build/changelog/unreleased_20/10904.rst | 11 -- doc/build/changelog/unreleased_20/10920.rst | 11 -- doc/build/changelog/unreleased_20/10967.rst | 11 -- doc/build/changelog/unreleased_20/10990.rst | 7 - doc/build/changelog/unreleased_20/9736.rst | 16 -- .../unreleased_20/checkin_conn_none.rst | 6 - .../changelog/unreleased_20/examples.rst | 8 - .../unreleased_20/uuid_imv_fixes.rst | 20 -- doc/build/conf.py | 4 +- 18 files changed, 183 insertions(+), 167 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/10777.rst delete mode 100644 doc/build/changelog/unreleased_20/10843.rst delete mode 100644 doc/build/changelog/unreleased_20/10850.rst delete mode 100644 doc/build/changelog/unreleased_20/10863.rst delete mode 100644 doc/build/changelog/unreleased_20/10877.rst delete mode 100644 doc/build/changelog/unreleased_20/10893.rst delete mode 100644 doc/build/changelog/unreleased_20/10896.rst delete mode 100644 doc/build/changelog/unreleased_20/10899.rst delete mode 100644 doc/build/changelog/unreleased_20/10904.rst delete mode 100644 doc/build/changelog/unreleased_20/10920.rst delete mode 100644 doc/build/changelog/unreleased_20/10967.rst delete mode 100644 doc/build/changelog/unreleased_20/10990.rst delete mode 100644 doc/build/changelog/unreleased_20/9736.rst delete mode 100644 doc/build/changelog/unreleased_20/checkin_conn_none.rst delete mode 100644 doc/build/changelog/unreleased_20/examples.rst delete mode 100644 doc/build/changelog/unreleased_20/uuid_imv_fixes.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 5bd0385fc5d..79631ed7621 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,187 @@ .. changelog:: :version: 2.0.26 - :include_notes_from: unreleased_20 + :released: February 11, 2024 + + .. change:: + :tags: usecase, postgresql, reflection + :tickets: 10777 + + Added support for reflection of PostgreSQL CHECK constraints marked with + "NO INHERIT", setting the key ``no_inherit=True`` in the reflected data. + Pull request courtesy Ellis Valentiner. + + .. change:: + :tags: bug, sql + :tickets: 10843 + + Fixed issues in :func:`_sql.case` where the logic for determining the + type of the expression could result in :class:`.NullType` if the last + element in the "whens" had no type, or in other cases where the type + could resolve to ``None``. The logic has been updated to scan all + given expressions so that the first non-null type is used, as well as + to always ensure a type is present. Pull request courtesy David Evans. + + .. change:: + :tags: bug, mysql + :tickets: 10850 + + Fixed issue where NULL/NOT NULL would not be properly reflected from a + MySQL column that also specified the VIRTUAL or STORED directives. Pull + request courtesy Georg Wicke-Arndt. + + .. change:: + :tags: bug, regression, postgresql + :tickets: 10863 + + Fixed regression in the asyncpg dialect caused by :ticket:`10717` in + release 2.0.24 where the change that now attempts to gracefully close the + asyncpg connection before terminating would not fall back to + ``terminate()`` for other potential connection-related exceptions other + than a timeout error, not taking into account cases where the graceful + ``.close()`` attempt fails for other reasons such as connection errors. + + + .. change:: + :tags: oracle, bug, performance + :tickets: 10877 + + Changed the default arraysize of the Oracle dialects so that the value set + by the driver is used, that is 100 at the time of writing for both + cx_oracle and oracledb. Previously the value was set to 50 by default. The + setting of 50 could cause significant performance regressions compared to + when using cx_oracle/oracledb alone to fetch many hundreds of rows over + slower networks. + + .. change:: + :tags: bug, mysql + :tickets: 10893 + + Fixed issue in asyncio dialects asyncmy and aiomysql, where their + ``.close()`` method is apparently not a graceful close. replace with + non-standard ``.ensure_closed()`` method that's awaitable and move + ``.close()`` to the so-called "terminate" case. + + .. change:: + :tags: bug, orm + :tickets: 10896 + + Replaced the "loader depth is excessively deep" warning with a shorter + message added to the caching badge within SQL logging, for those statements + where the ORM disabled the cache due to a too-deep chain of loader options. + The condition which this warning highlights is difficult to resolve and is + generally just a limitation in the ORM's application of SQL caching. A + future feature may include the ability to tune the threshold where caching + is disabled, but for now the warning will no longer be a nuisance. + + .. change:: + :tags: bug, orm + :tickets: 10899 + + Fixed issue where it was not possible to use a type (such as an enum) + within a :class:`_orm.Mapped` container type if that type were declared + locally within the class body. The scope of locals used for the eval now + includes that of the class body itself. In addition, the expression within + :class:`_orm.Mapped` may also refer to the class name itself, if used as a + string or with future annotations mode. + + .. change:: + :tags: usecase, postgresql + :tickets: 10904 + + Support the ``USING `` option for PostgreSQL ``CREATE TABLE`` to + specify the access method to use to store the contents for the new table. + Pull request courtesy Edgar Ramírez-Mondragón. + + .. seealso:: + + :ref:`postgresql_table_options` + + .. change:: + :tags: bug, examples + :tickets: 10920 + + Fixed regression in history_meta example where the use of + :meth:`_schema.MetaData.to_metadata` to make a copy of the history table + would also copy indexes (which is a good thing), but causing naming + conflicts indexes regardless of naming scheme used for those indexes. A + "_history" suffix is now added to these indexes in the same way as is + achieved for the table name. + + + .. change:: + :tags: bug, orm + :tickets: 10967 + + Fixed issue where using :meth:`_orm.Session.delete` along with the + :paramref:`_orm.Mapper.version_id_col` feature would fail to use the + correct version identifier in the case that an additional UPDATE were + emitted against the target object as a result of the use of + :paramref:`_orm.relationship.post_update` on the object. The issue is + similar to :ticket:`10800` just fixed in version 2.0.25 for the case of + updates alone. + + .. change:: + :tags: bug, orm + :tickets: 10990 + + Fixed issue where an assertion within the implementation for + :func:`_orm.with_expression` would raise if a SQL expression that was not + cacheable were used; this was a 2.0 regression since 1.4. + + .. change:: + :tags: postgresql, usecase + :tickets: 9736 + + Correctly type PostgreSQL RANGE and MULTIRANGE types as ``Range[T]`` + and ``Sequence[Range[T]]``. + Introduced utility sequence :class:`_postgresql.MultiRange` to allow better + interoperability of MULTIRANGE types. + + .. change:: + :tags: postgresql, usecase + + Differentiate between INT4 and INT8 ranges and multi-ranges types when + inferring the database type from a :class:`_postgresql.Range` or + :class:`_postgresql.MultiRange` instance, preferring INT4 if the values + fit into it. + + .. change:: + :tags: bug, typing + + Fixed the type signature for the :meth:`.PoolEvents.checkin` event to + indicate that the given :class:`.DBAPIConnection` argument may be ``None`` + in the case where the connection has been invalidated. + + .. change:: + :tags: bug, examples + + Fixed the performance example scripts in examples/performance to mostly + work with the Oracle database, by adding the :class:`.Identity` construct + to all the tables and allowing primary generation to occur on this backend. + A few of the "raw DBAPI" cases still are not compatible with Oracle. + + + .. change:: + :tags: bug, mssql + + Fixed an issue regarding the use of the :class:`.Uuid` datatype with the + :paramref:`.Uuid.as_uuid` parameter set to False, when using the pymssql + dialect. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" + feature) would not correctly align primary key UUID values for bulk INSERT + statements, resulting in errors. Similar issues were fixed for the + PostgreSQL drivers as well. + + + .. change:: + :tags: bug, postgresql + + Fixed an issue regarding the use of the :class:`.Uuid` datatype with the + :paramref:`.Uuid.as_uuid` parameter set to False, when using PostgreSQL + dialects. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" + feature) would not correctly align primary key UUID values for bulk INSERT + statements, resulting in errors. Similar issues were fixed for the + pymssql driver as well. .. changelog:: :version: 2.0.25 diff --git a/doc/build/changelog/unreleased_20/10777.rst b/doc/build/changelog/unreleased_20/10777.rst deleted file mode 100644 index cee5092e8d4..00000000000 --- a/doc/build/changelog/unreleased_20/10777.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: usecase, postgresql, reflection - :tickets: 10777 - - Added support for reflection of PostgreSQL CHECK constraints marked with - "NO INHERIT", setting the key ``no_inherit=True`` in the reflected data. - Pull request courtesy Ellis Valentiner. diff --git a/doc/build/changelog/unreleased_20/10843.rst b/doc/build/changelog/unreleased_20/10843.rst deleted file mode 100644 index 838f6a8beb1..00000000000 --- a/doc/build/changelog/unreleased_20/10843.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 10843 - - Fixed issues in :func:`_sql.case` where the logic for determining the - type of the expression could result in :class:`.NullType` if the last - element in the "whens" had no type, or in other cases where the type - could resolve to ``None``. The logic has been updated to scan all - given expressions so that the first non-null type is used, as well as - to always ensure a type is present. Pull request courtesy David Evans. diff --git a/doc/build/changelog/unreleased_20/10850.rst b/doc/build/changelog/unreleased_20/10850.rst deleted file mode 100644 index 6b6b323ce88..00000000000 --- a/doc/build/changelog/unreleased_20/10850.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 10850 - - Fixed issue where NULL/NOT NULL would not be properly reflected from a - MySQL column that also specified the VIRTUAL or STORED directives. Pull - request courtesy Georg Wicke-Arndt. diff --git a/doc/build/changelog/unreleased_20/10863.rst b/doc/build/changelog/unreleased_20/10863.rst deleted file mode 100644 index df722f8fe44..00000000000 --- a/doc/build/changelog/unreleased_20/10863.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, regression, postgresql - :tickets: 10863 - - Fixed regression in the asyncpg dialect caused by :ticket:`10717` in - release 2.0.24 where the change that now attempts to gracefully close the - asyncpg connection before terminating would not fall back to - ``terminate()`` for other potential connection-related exceptions other - than a timeout error, not taking into account cases where the graceful - ``.close()`` attempt fails for other reasons such as connection errors. - diff --git a/doc/build/changelog/unreleased_20/10877.rst b/doc/build/changelog/unreleased_20/10877.rst deleted file mode 100644 index 90bf75f2493..00000000000 --- a/doc/build/changelog/unreleased_20/10877.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: oracle, bug, performance - :tickets: 10877 - - Changed the default arraysize of the Oracle dialects so that the value set - by the driver is used, that is 100 at the time of writing for both - cx_oracle and oracledb. Previously the value was set to 50 by default. The - setting of 50 could cause significant performance regressions compared to - when using cx_oracle/oracledb alone to fetch many hundreds of rows over - slower networks. diff --git a/doc/build/changelog/unreleased_20/10893.rst b/doc/build/changelog/unreleased_20/10893.rst deleted file mode 100644 index 63507f38d56..00000000000 --- a/doc/build/changelog/unreleased_20/10893.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 10893 - - Fixed issue in asyncio dialects asyncmy and aiomysql, where their - ``.close()`` method is apparently not a graceful close. replace with - non-standard ``.ensure_closed()`` method that's awaitable and move - ``.close()`` to the so-called "terminate" case. diff --git a/doc/build/changelog/unreleased_20/10896.rst b/doc/build/changelog/unreleased_20/10896.rst deleted file mode 100644 index 77224d974ca..00000000000 --- a/doc/build/changelog/unreleased_20/10896.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10896 - - Replaced the "loader depth is excessively deep" warning with a shorter - message added to the caching badge within SQL logging, for those statements - where the ORM disabled the cache due to a too-deep chain of loader options. - The condition which this warning highlights is difficult to resolve and is - generally just a limitation in the ORM's application of SQL caching. A - future feature may include the ability to tune the threshold where caching - is disabled, but for now the warning will no longer be a nuisance. diff --git a/doc/build/changelog/unreleased_20/10899.rst b/doc/build/changelog/unreleased_20/10899.rst deleted file mode 100644 index 692381323ee..00000000000 --- a/doc/build/changelog/unreleased_20/10899.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10899 - - Fixed issue where it was not possible to use a type (such as an enum) - within a :class:`_orm.Mapped` container type if that type were declared - locally within the class body. The scope of locals used for the eval now - includes that of the class body itself. In addition, the expression within - :class:`_orm.Mapped` may also refer to the class name itself, if used as a - string or with future annotations mode. diff --git a/doc/build/changelog/unreleased_20/10904.rst b/doc/build/changelog/unreleased_20/10904.rst deleted file mode 100644 index 3dc744dc185..00000000000 --- a/doc/build/changelog/unreleased_20/10904.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: usecase, postgresql - :tickets: 10904 - - Support the ``USING `` option for PostgreSQL ``CREATE TABLE`` to - specify the access method to use to store the contents for the new table. - Pull request courtesy Edgar Ramírez-Mondragón. - - .. seealso:: - - :ref:`postgresql_table_options` diff --git a/doc/build/changelog/unreleased_20/10920.rst b/doc/build/changelog/unreleased_20/10920.rst deleted file mode 100644 index e7bc7b8acdb..00000000000 --- a/doc/build/changelog/unreleased_20/10920.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, examples - :tickets: 10920 - - Fixed regression in history_meta example where the use of - :meth:`_schema.MetaData.to_metadata` to make a copy of the history table - would also copy indexes (which is a good thing), but causing naming - conflicts indexes regardless of naming scheme used for those indexes. A - "_history" suffix is now added to these indexes in the same way as is - achieved for the table name. - diff --git a/doc/build/changelog/unreleased_20/10967.rst b/doc/build/changelog/unreleased_20/10967.rst deleted file mode 100644 index b0ed4d1bc06..00000000000 --- a/doc/build/changelog/unreleased_20/10967.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10967 - - Fixed issue where using :meth:`_orm.Session.delete` along with the - :paramref:`_orm.Mapper.version_id_col` feature would fail to use the - correct version identifier in the case that an additional UPDATE were - emitted against the target object as a result of the use of - :paramref:`_orm.relationship.post_update` on the object. The issue is - similar to :ticket:`10800` just fixed in version 2.0.25 for the case of - updates alone. diff --git a/doc/build/changelog/unreleased_20/10990.rst b/doc/build/changelog/unreleased_20/10990.rst deleted file mode 100644 index ac887c83640..00000000000 --- a/doc/build/changelog/unreleased_20/10990.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10990 - - Fixed issue where an assertion within the implementation for - :func:`_orm.with_expression` would raise if a SQL expression that was not - cacheable were used; this was a 2.0 regression since 1.4. diff --git a/doc/build/changelog/unreleased_20/9736.rst b/doc/build/changelog/unreleased_20/9736.rst deleted file mode 100644 index deb1703d87b..00000000000 --- a/doc/build/changelog/unreleased_20/9736.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. change:: - :tags: postgresql, usecase - :tickets: 9736 - - Correctly type PostgreSQL RANGE and MULTIRANGE types as ``Range[T]`` - and ``Sequence[Range[T]]``. - Introduced utility sequence :class:`_postgresql.MultiRange` to allow better - interoperability of MULTIRANGE types. - -.. change:: - :tags: postgresql, usecase - - Differentiate between INT4 and INT8 ranges and multi-ranges types when - inferring the database type from a :class:`_postgresql.Range` or - :class:`_postgresql.MultiRange` instance, preferring INT4 if the values - fit into it. diff --git a/doc/build/changelog/unreleased_20/checkin_conn_none.rst b/doc/build/changelog/unreleased_20/checkin_conn_none.rst deleted file mode 100644 index 9aeed4784fd..00000000000 --- a/doc/build/changelog/unreleased_20/checkin_conn_none.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, typing - - Fixed the type signature for the :meth:`.PoolEvents.checkin` event to - indicate that the given :class:`.DBAPIConnection` argument may be ``None`` - in the case where the connection has been invalidated. diff --git a/doc/build/changelog/unreleased_20/examples.rst b/doc/build/changelog/unreleased_20/examples.rst deleted file mode 100644 index 8ac2c567ed5..00000000000 --- a/doc/build/changelog/unreleased_20/examples.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, examples - - Fixed the performance example scripts in examples/performance to mostly - work with the Oracle database, by adding the :class:`.Identity` construct - to all the tables and allowing primary generation to occur on this backend. - A few of the "raw DBAPI" cases still are not compatible with Oracle. - diff --git a/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst b/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst deleted file mode 100644 index 0744c61e35b..00000000000 --- a/doc/build/changelog/unreleased_20/uuid_imv_fixes.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. change:: - :tags: bug, mssql - - Fixed an issue regarding the use of the :class:`.Uuid` datatype with the - :paramref:`.Uuid.as_uuid` parameter set to False, when using the pymssql - dialect. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" - feature) would not correctly align primary key UUID values for bulk INSERT - statements, resulting in errors. Similar issues were fixed for the - PostgreSQL drivers as well. - - -.. change:: - :tags: bug, postgresql - - Fixed an issue regarding the use of the :class:`.Uuid` datatype with the - :paramref:`.Uuid.as_uuid` parameter set to False, when using PostgreSQL - dialects. ORM-optimized INSERT statements (e.g. the "insertmanyvalues" - feature) would not correctly align primary key UUID values for bulk INSERT - statements, resulting in errors. Similar issues were fixed for the - pymssql driver as well. diff --git a/doc/build/conf.py b/doc/build/conf.py index b469037158d..126c0a8374f 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.25" +release = "2.0.26" -release_date = "January 2, 2024" +release_date = "February 11, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From fc415193e03b92946f396afb49ebe4e63e5c7079 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 11 Feb 2024 10:15:17 -0500 Subject: [PATCH 107/544] Version 2.0.27 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 79631ed7621..bc8095b6bdd 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.27 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.26 :released: February 11, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 544410f69c5..7090a744c96 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.26" +__version__ = "2.0.27" def __go(lcls: Any) -> None: From c1e192b28513e2b736f8806069d1eb4bcf4a4420 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 13 Feb 2024 08:45:53 -0500 Subject: [PATCH 108/544] use correct exception for terminate catch + test Fixed regression caused by just-released fix for :ticket:`10863` where an invalid exception class were added to the "except" block, which does not get exercised unless such a catch actually happens. A mock-style test has been added to ensure this catch is exercised in unit tests. Fixes: #11005 Change-Id: I5a65403fb7bb35296ff44ae3cf6a336f8e0bda97 (cherry picked from commit 80b52dc522f9f03a86ca6c3a5766cd9c594804ec) --- doc/build/changelog/unreleased_20/11005.rst | 9 ++++++++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 6 ++++- test/dialect/postgresql/test_dialect.py | 22 +++++++++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11005.rst diff --git a/doc/build/changelog/unreleased_20/11005.rst b/doc/build/changelog/unreleased_20/11005.rst new file mode 100644 index 00000000000..7c9292e5c12 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11005.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, postgresql, regression + :tickets: 11005 + + Fixed regression caused by just-released fix for :ticket:`10863` where an + invalid exception class were added to the "except" block, which does not + get exercised unless such a catch actually happens. A mock-style test has + been added to ensure this catch is exercised in unit tests. + diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index af097e283d3..4c60c3d832e 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -882,7 +882,11 @@ def terminate(self): # try to gracefully close; see #10717 # timeout added in asyncpg 0.14.0 December 2017 self.await_(self._connection.close(timeout=2)) - except (asyncio.TimeoutError, OSError, self.dbapi.PostgresError): + except ( + asyncio.TimeoutError, + OSError, + self.dbapi.asyncpg.PostgresError, + ): # in the case where we are recycling an old connection # that may have already been disconnected, close() will # fail with the above timeout. in this case, terminate diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index 32a5a84ac8d..40718ee2dff 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -178,6 +178,28 @@ def test_range_frozen(self): with expect_raises(dataclasses.FrozenInstanceError): r1.lower = 8 # type: ignore + @testing.only_on("postgresql+asyncpg") + def test_asyncpg_terminate_catch(self): + """test for #11005""" + + with testing.db.connect() as connection: + emulated_dbapi_connection = connection.connection.dbapi_connection + + async def boom(): + raise OSError("boom") + + with mock.patch.object( + emulated_dbapi_connection, + "_connection", + mock.Mock(close=mock.Mock(return_value=boom())), + ) as mock_asyncpg_connection: + emulated_dbapi_connection.terminate() + + eq_( + mock_asyncpg_connection.mock_calls, + [mock.call.close(timeout=2), mock.call.terminate()], + ) + def test_version_parsing(self): def mock_conn(res): return mock.Mock( From 43f0d8408240d6c893131a6fd6a81be29149ad05 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 13 Feb 2024 10:02:14 -0500 Subject: [PATCH 109/544] - 2.0.27 --- doc/build/changelog/changelog_20.rst | 12 +++++++++++- doc/build/changelog/unreleased_20/11005.rst | 9 --------- doc/build/conf.py | 4 ++-- 3 files changed, 13 insertions(+), 12 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11005.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index bc8095b6bdd..9c6d42fdd37 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,17 @@ .. changelog:: :version: 2.0.27 - :include_notes_from: unreleased_20 + :released: February 13, 2024 + + .. change:: + :tags: bug, postgresql, regression + :tickets: 11005 + + Fixed regression caused by just-released fix for :ticket:`10863` where an + invalid exception class were added to the "except" block, which does not + get exercised unless such a catch actually happens. A mock-style test has + been added to ensure this catch is exercised in unit tests. + .. changelog:: :version: 2.0.26 diff --git a/doc/build/changelog/unreleased_20/11005.rst b/doc/build/changelog/unreleased_20/11005.rst deleted file mode 100644 index 7c9292e5c12..00000000000 --- a/doc/build/changelog/unreleased_20/11005.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, postgresql, regression - :tickets: 11005 - - Fixed regression caused by just-released fix for :ticket:`10863` where an - invalid exception class were added to the "except" block, which does not - get exercised unless such a catch actually happens. A mock-style test has - been added to ensure this catch is exercised in unit tests. - diff --git a/doc/build/conf.py b/doc/build/conf.py index 126c0a8374f..599a76c69fa 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.26" +release = "2.0.27" -release_date = "February 11, 2024" +release_date = "February 13, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From dab2509c9ab67779316e44d96500cdbe5694fcb4 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 13 Feb 2024 10:05:54 -0500 Subject: [PATCH 110/544] Version 2.0.28 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 9c6d42fdd37..6d0dfaf8d4d 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.28 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.27 :released: February 13, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 7090a744c96..2cdd96e234c 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.27" +__version__ = "2.0.28" def __go(lcls: Any) -> None: From 3900210ddc0beeee9b491b42dc6ec1f6dc6c555c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 14 Feb 2024 09:29:19 -0500 Subject: [PATCH 111/544] ensure secondary cols not excluded from adaption Fixed regression caused by :ticket:`9779` where using the "secondary" table in a relationship ``and_()`` expression would fail to be aliased to match how the "secondary" table normally renders within a :meth:`_sql.Select.join` expression, leading to an invalid query. Fixes: #11010 Change-Id: I535ce8b14f6a779c26b6b50b796ce64e57d7ee3d (cherry picked from commit 8844cb0b4148ff52c0377edf01d6e88f3bbe1ab0) --- doc/build/changelog/unreleased_20/11010.rst | 8 +++ lib/sqlalchemy/orm/relationships.py | 59 ++++++++++++++------- test/orm/test_relationship_criteria.py | 22 ++++++++ 3 files changed, 70 insertions(+), 19 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11010.rst diff --git a/doc/build/changelog/unreleased_20/11010.rst b/doc/build/changelog/unreleased_20/11010.rst new file mode 100644 index 00000000000..bd24772dd6c --- /dev/null +++ b/doc/build/changelog/unreleased_20/11010.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm, regression + :tickets: 11010 + + Fixed regression caused by :ticket:`9779` where using the "secondary" table + in a relationship ``and_()`` expression would fail to be aliased to match + how the "secondary" table normally renders within a + :meth:`_sql.Select.join` expression, leading to an invalid query. diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 11ea5911279..afff24c8ccc 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -19,6 +19,7 @@ from collections import abc import dataclasses import inspect as _py_inspect +import itertools import re import typing from typing import Any @@ -26,6 +27,7 @@ from typing import cast from typing import Collection from typing import Dict +from typing import FrozenSet from typing import Generic from typing import Iterable from typing import Iterator @@ -3235,6 +3237,15 @@ def _gather_columns_with_annotation( if annotation_set.issubset(col._annotations) } + @util.memoized_property + def _secondary_lineage_set(self) -> FrozenSet[ColumnElement[Any]]: + if self.secondary is not None: + return frozenset( + itertools.chain(*[c.proxy_set for c in self.secondary.c]) + ) + else: + return util.EMPTY_SET + def join_targets( self, source_selectable: Optional[FromClause], @@ -3285,23 +3296,25 @@ def join_targets( if extra_criteria: - def mark_unrelated_columns_as_ok_to_adapt( + def mark_exclude_cols( elem: SupportsAnnotations, annotations: _AnnotationDict ) -> SupportsAnnotations: - """note unrelated columns in the "extra criteria" as OK - to adapt, even though they are not part of our "local" - or "remote" side. + """note unrelated columns in the "extra criteria" as either + should be adapted or not adapted, even though they are not + part of our "local" or "remote" side. - see #9779 for this case + see #9779 for this case, as well as #11010 for a follow up """ parentmapper_for_element = elem._annotations.get( "parentmapper", None ) + if ( parentmapper_for_element is not self.prop.parent and parentmapper_for_element is not self.prop.mapper + and elem not in self._secondary_lineage_set ): return _safe_annotate(elem, annotations) else: @@ -3310,8 +3323,8 @@ def mark_unrelated_columns_as_ok_to_adapt( extra_criteria = tuple( _deep_annotate( elem, - {"ok_to_adapt_in_join_condition": True}, - annotate_callable=mark_unrelated_columns_as_ok_to_adapt, + {"should_not_adapt": True}, + annotate_callable=mark_exclude_cols, ) for elem in extra_criteria ) @@ -3325,14 +3338,16 @@ def mark_unrelated_columns_as_ok_to_adapt( if secondary is not None: secondary = secondary._anonymous_fromclause(flat=True) primary_aliasizer = ClauseAdapter( - secondary, exclude_fn=_ColInAnnotations("local") + secondary, + exclude_fn=_local_col_exclude, ) secondary_aliasizer = ClauseAdapter( dest_selectable, equivalents=self.child_equivalents ).chain(primary_aliasizer) if source_selectable is not None: primary_aliasizer = ClauseAdapter( - secondary, exclude_fn=_ColInAnnotations("local") + secondary, + exclude_fn=_local_col_exclude, ).chain( ClauseAdapter( source_selectable, @@ -3344,14 +3359,14 @@ def mark_unrelated_columns_as_ok_to_adapt( else: primary_aliasizer = ClauseAdapter( dest_selectable, - exclude_fn=_ColInAnnotations("local"), + exclude_fn=_local_col_exclude, equivalents=self.child_equivalents, ) if source_selectable is not None: primary_aliasizer.chain( ClauseAdapter( source_selectable, - exclude_fn=_ColInAnnotations("remote"), + exclude_fn=_remote_col_exclude, equivalents=self.parent_equivalents, ) ) @@ -3430,18 +3445,24 @@ def col_to_bind( class _ColInAnnotations: - """Serializable object that tests for a name in c._annotations.""" + """Serializable object that tests for names in c._annotations. - __slots__ = ("name",) + TODO: does this need to be serializable anymore? can we find what the + use case was for that? - def __init__(self, name: str): - self.name = name + """ + + __slots__ = ("names",) + + def __init__(self, *names: str): + self.names = frozenset(names) def __call__(self, c: ClauseElement) -> bool: - return ( - self.name in c._annotations - or "ok_to_adapt_in_join_condition" in c._annotations - ) + return bool(self.names.intersection(c._annotations)) + + +_local_col_exclude = _ColInAnnotations("local", "should_not_adapt") +_remote_col_exclude = _ColInAnnotations("remote", "should_not_adapt") class Relationship( # type: ignore diff --git a/test/orm/test_relationship_criteria.py b/test/orm/test_relationship_criteria.py index 69279f60044..4add92c1e72 100644 --- a/test/orm/test_relationship_criteria.py +++ b/test/orm/test_relationship_criteria.py @@ -2409,6 +2409,28 @@ def test_select_joinm2m_aliased_local_criteria(self, order_item_fixture): "AND items_1.description != :description_1", ) + def test_use_secondary_table_in_criteria(self, order_item_fixture): + """test #11010 , regression caused by #9779""" + + Order, Item = order_item_fixture + order_items = self.tables.order_items + + stmt = select(Order).join( + Order.items.and_( + order_items.c.item_id > 1, Item.description != "description" + ) + ) + + self.assert_compile( + stmt, + "SELECT orders.id, orders.user_id, orders.address_id, " + "orders.description, orders.isopen FROM orders JOIN order_items " + "AS order_items_1 ON orders.id = order_items_1.order_id " + "JOIN items ON items.id = order_items_1.item_id " + "AND order_items_1.item_id > :item_id_1 " + "AND items.description != :description_1", + ) + class SubqueryCriteriaTest(fixtures.DeclarativeMappedTest): """test #10223""" From df582d5221931ade607b96e74ad746a528ecb994 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 14 Feb 2024 20:55:57 +0100 Subject: [PATCH 112/544] clarify example in orm tutorial Change-Id: Ib6f9a7ce0beacda43ccd6d3c7750778ed3333b38 (cherry picked from commit 1e099d1a855e492389c02559d2059d93e5a5a091) --- doc/build/tutorial/orm_data_manipulation.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/build/tutorial/orm_data_manipulation.rst b/doc/build/tutorial/orm_data_manipulation.rst index 73fef50aba3..b4beae0e070 100644 --- a/doc/build/tutorial/orm_data_manipulation.rst +++ b/doc/build/tutorial/orm_data_manipulation.rst @@ -533,6 +533,7 @@ a context manager as well, accomplishes the following things: are no longer associated with any database transaction in which to be refreshed:: + # note that 'squidward.name' was just expired previously, so its value is unloaded >>> squidward.name Traceback (most recent call last): ... From d08322c0189a07a1abcbc119b797cdee0ca94d07 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 14 Feb 2024 21:10:20 -0500 Subject: [PATCH 113/544] raise for asyncio-incompatible pool classes An error is raised if a :class:`.QueuePool` or other non-asyncio pool class is passed to :func:`_asyncio.create_async_engine`. This engine only accepts asyncio-compatible pool classes including :class:`.AsyncAdaptedQueuePool`. Other pool classes such as :class:`.NullPool` are compatible with both synchronous and asynchronous engines as they do not perform any locking. Fixes: #8771 Change-Id: I5843ccea7d824488492d1a9d46207b9f05330ae3 (cherry picked from commit c449505f651ebf4b73aaa7d7aec99b038ea34cb6) --- doc/build/changelog/unreleased_20/8771.rst | 15 +++++ doc/build/core/pooling.rst | 12 ++++ doc/build/errors.rst | 22 +++++++ lib/sqlalchemy/engine/create.py | 11 ++++ lib/sqlalchemy/pool/impl.py | 38 ++++++++++- lib/sqlalchemy/testing/engines.py | 7 +- test/engine/test_execute.py | 11 +++- test/engine/test_transaction.py | 13 +++- test/ext/asyncio/test_engine_py3k.py | 76 ++++++++++++++++++++++ 9 files changed, 199 insertions(+), 6 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/8771.rst diff --git a/doc/build/changelog/unreleased_20/8771.rst b/doc/build/changelog/unreleased_20/8771.rst new file mode 100644 index 00000000000..9f501fcb8d9 --- /dev/null +++ b/doc/build/changelog/unreleased_20/8771.rst @@ -0,0 +1,15 @@ +.. change:: + :tags: bug, asyncio + :tickets: 8771 + + An error is raised if a :class:`.QueuePool` or other non-asyncio pool class + is passed to :func:`_asyncio.create_async_engine`. This engine only + accepts asyncio-compatible pool classes including + :class:`.AsyncAdaptedQueuePool`. Other pool classes such as + :class:`.NullPool` are compatible with both synchronous and asynchronous + engines as they do not perform any locking. + + .. seealso:: + + :ref:`pool_api` + diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst index 78bbdcb1af8..f3ea6e86238 100644 --- a/doc/build/core/pooling.rst +++ b/doc/build/core/pooling.rst @@ -50,6 +50,13 @@ queued up - the pool would only grow to that size if the application actually used five connections concurrently, in which case the usage of a small pool is an entirely appropriate default behavior. +.. note:: The :class:`.QueuePool` class is **not compatible with asyncio**. + When using :class:`_asyncio.create_async_engine` to create an instance of + :class:`.AsyncEngine`, the :class:`_pool.AsyncAdaptedQueuePool` class, + which makes use of an asyncio-compatible queue implementation, is used + instead. + + .. _pool_switching: Switching Pool Implementations @@ -713,6 +720,8 @@ like in the following example:: my_pool = create_pool_from_url("mysql+mysqldb://", poolclass=NullPool) +.. _pool_api: + API Documentation - Available Pool Implementations -------------------------------------------------- @@ -722,6 +731,9 @@ API Documentation - Available Pool Implementations .. autoclass:: sqlalchemy.pool.QueuePool :members: +.. autoclass:: sqlalchemy.pool.AsyncAdaptedQueuePool + :members: + .. autoclass:: SingletonThreadPool :members: diff --git a/doc/build/errors.rst b/doc/build/errors.rst index 55ac40ae5f6..d6645123154 100644 --- a/doc/build/errors.rst +++ b/doc/build/errors.rst @@ -188,6 +188,28 @@ sooner. :ref:`connections_toplevel` +.. _error_pcls: + +Pool class cannot be used with asyncio engine (or vice versa) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The :class:`_pool.QueuePool` pool class uses a ``thread.Lock`` object internally +and is not compatible with asyncio. If using the :func:`_asyncio.create_async_engine` +function to create an :class:`.AsyncEngine`, the appropriate queue pool class +is :class:`_pool.AsyncAdaptedQueuePool`, which is used automatically and does +not need to be specified. + +In addition to :class:`_pool.AsyncAdaptedQueuePool`, the :class:`_pool.NullPool` +and :class:`_pool.StaticPool` pool classes do not use locks and are also +suitable for use with async engines. + +This error is also raised in reverse in the unlikely case that the +:class:`_pool.AsyncAdaptedQueuePool` pool class is indicated explicitly with +the :func:`_sa.create_engine` function. + +.. seealso:: + + :ref:`pooling_toplevel` .. _error_8s2b: diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py index fb3c3b79c72..74a3cf801e3 100644 --- a/lib/sqlalchemy/engine/create.py +++ b/lib/sqlalchemy/engine/create.py @@ -663,6 +663,17 @@ def connect( else: pool._dialect = dialect + if ( + hasattr(pool, "_is_asyncio") + and pool._is_asyncio is not dialect.is_async + ): + raise exc.ArgumentError( + f"Pool class {pool.__class__.__name__} cannot be " + f"used with {'non-' if not dialect.is_async else ''}" + "asyncio engine", + code="pcls", + ) + # create engine. if not pop_kwarg("future", True): raise exc.ArgumentError( diff --git a/lib/sqlalchemy/pool/impl.py b/lib/sqlalchemy/pool/impl.py index 18f9414ca64..157455cbe25 100644 --- a/lib/sqlalchemy/pool/impl.py +++ b/lib/sqlalchemy/pool/impl.py @@ -47,8 +47,18 @@ class QueuePool(Pool): that imposes a limit on the number of open connections. :class:`.QueuePool` is the default pooling implementation used for - all :class:`_engine.Engine` objects, unless the SQLite dialect is - in use with a ``:memory:`` database. + all :class:`_engine.Engine` objects other than SQLite with a ``:memory:`` + database. + + The :class:`.QueuePool` class **is not compatible** with asyncio and + :func:`_asyncio.create_async_engine`. The + :class:`.AsyncAdaptedQueuePool` class is used automatically when + using :func:`_asyncio.create_async_engine`, if no other kind of pool + is specified. + + .. seealso:: + + :class:`.AsyncAdaptedQueuePool` """ @@ -123,6 +133,7 @@ def __init__( :class:`_pool.Pool` constructor. """ + Pool.__init__(self, creator, **kw) self._pool = self._queue_class(pool_size, use_lifo=use_lifo) self._overflow = 0 - pool_size @@ -248,6 +259,18 @@ def checkedout(self) -> int: class AsyncAdaptedQueuePool(QueuePool): + """An asyncio-compatible version of :class:`.QueuePool`. + + This pool is used by default when using :class:`.AsyncEngine` engines that + were generated from :func:`_asyncio.create_async_engine`. It uses an + asyncio-compatible queue implementation that does not use + ``threading.Lock``. + + The arguments and operation of :class:`.AsyncAdaptedQueuePool` are + otherwise identical to that of :class:`.QueuePool`. + + """ + _is_asyncio = True # type: ignore[assignment] _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = ( sqla_queue.AsyncAdaptedQueue @@ -270,6 +293,9 @@ class NullPool(Pool): invalidation are not supported by this Pool implementation, since no connections are held persistently. + The :class:`.NullPool` class **is compatible** with asyncio and + :func:`_asyncio.create_async_engine`. + """ def status(self) -> str: @@ -317,6 +343,9 @@ class SingletonThreadPool(Pool): scenarios using a SQLite ``:memory:`` database and is not recommended for production use. + The :class:`.SingletonThreadPool` class **is not compatible** with asyncio + and :func:`_asyncio.create_async_engine`. + Options are the same as those of :class:`_pool.Pool`, as well as: @@ -425,6 +454,8 @@ class StaticPool(Pool): invalidation (which is also used to support auto-reconnect) are only partially supported right now and may not yield good results. + The :class:`.StaticPool` class **is compatible** with asyncio and + :func:`_asyncio.create_async_engine`. """ @@ -489,6 +520,9 @@ class AssertionPool(Pool): at a time. Useful for debugging code that is using more connections than desired. + The :class:`.AssertionPool` class **is compatible** with asyncio and + :func:`_asyncio.create_async_engine`. + """ _conn: Optional[ConnectionPoolEntry] diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index e055d99d26f..7cae807eb43 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -368,7 +368,12 @@ def do_begin(conn): True # enable event blocks, helps with profiling ) - if isinstance(engine.pool, pool.QueuePool): + if ( + isinstance(engine.pool, pool.QueuePool) + and "pool" not in options + and "pool_timeout" not in options + and "max_overflow" not in options + ): engine.pool._timeout = 0 engine.pool._max_overflow = 0 if use_reaper: diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 4618dfff8d5..122c08461d1 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -34,6 +34,7 @@ from sqlalchemy.engine import default from sqlalchemy.engine.base import Connection from sqlalchemy.engine.base import Engine +from sqlalchemy.pool import AsyncAdaptedQueuePool from sqlalchemy.pool import NullPool from sqlalchemy.pool import QueuePool from sqlalchemy.sql import column @@ -2411,7 +2412,15 @@ def test_dispose_event(self, testing_engine): @testing.combinations(True, False, argnames="close") def test_close_parameter(self, testing_engine, close): eng = testing_engine( - options=dict(pool_size=1, max_overflow=0, poolclass=QueuePool) + options=dict( + pool_size=1, + max_overflow=0, + poolclass=( + QueuePool + if not testing.db.dialect.is_async + else AsyncAdaptedQueuePool + ), + ) ) conn = eng.connect() diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index a70e8e05d0f..68650d6d2bc 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -12,6 +12,8 @@ from sqlalchemy.engine import characteristics from sqlalchemy.engine import default from sqlalchemy.engine import url +from sqlalchemy.pool import AsyncAdaptedQueuePool +from sqlalchemy.pool import QueuePool from sqlalchemy.testing import assert_raises_message from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_warnings @@ -1345,10 +1347,17 @@ def test_connection_invalidated(self): eq_(c2.get_isolation_level(), self._default_isolation_level()) def test_per_connection(self): - from sqlalchemy.pool import QueuePool eng = testing_engine( - options=dict(poolclass=QueuePool, pool_size=2, max_overflow=0) + options=dict( + poolclass=( + QueuePool + if not testing.db.dialect.is_async + else AsyncAdaptedQueuePool + ), + pool_size=2, + max_overflow=0, + ) ) c1 = eng.connect() diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index 59c623127bc..c12363f4d0b 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -3,6 +3,7 @@ import inspect as stdlib_inspect from unittest.mock import patch +from sqlalchemy import AssertionPool from sqlalchemy import Column from sqlalchemy import create_engine from sqlalchemy import delete @@ -11,7 +12,11 @@ from sqlalchemy import func from sqlalchemy import inspect from sqlalchemy import Integer +from sqlalchemy import NullPool +from sqlalchemy import QueuePool from sqlalchemy import select +from sqlalchemy import SingletonThreadPool +from sqlalchemy import StaticPool from sqlalchemy import String from sqlalchemy import Table from sqlalchemy import testing @@ -520,6 +525,77 @@ async def test_isolation_level(self, async_connection): eq_(isolation_level, "SERIALIZABLE") + @testing.combinations( + ( + AsyncAdaptedQueuePool, + True, + ), + ( + QueuePool, + False, + ), + (NullPool, True), + (SingletonThreadPool, False), + (StaticPool, True), + (AssertionPool, True), + argnames="pool_cls,should_work", + ) + @testing.variation("instantiate", [True, False]) + @async_test + async def test_pool_classes( + self, async_testing_engine, pool_cls, instantiate, should_work + ): + """test #8771""" + if instantiate: + if pool_cls in (QueuePool, AsyncAdaptedQueuePool): + pool = pool_cls(creator=testing.db.pool._creator, timeout=10) + else: + pool = pool_cls( + creator=testing.db.pool._creator, + ) + + options = {"pool": pool} + else: + if pool_cls in (QueuePool, AsyncAdaptedQueuePool): + options = {"poolclass": pool_cls, "pool_timeout": 10} + else: + options = {"poolclass": pool_cls} + + if not should_work: + with expect_raises_message( + exc.ArgumentError, + f"Pool class {pool_cls.__name__} " + "cannot be used with asyncio engine", + ): + async_testing_engine(options=options) + return + + e = async_testing_engine(options=options) + + if pool_cls is AssertionPool: + async with e.connect() as conn: + result = await conn.scalar(select(1)) + eq_(result, 1) + return + + async def go(): + async with e.connect() as conn: + result = await conn.scalar(select(1)) + eq_(result, 1) + return result + + eq_(await asyncio.gather(*[go() for i in range(10)]), [1] * 10) + + def test_cant_use_async_pool_w_create_engine(self): + """supplemental test for #8771""" + + with expect_raises_message( + exc.ArgumentError, + "Pool class AsyncAdaptedQueuePool " + "cannot be used with non-asyncio engine", + ): + create_engine("sqlite://", poolclass=AsyncAdaptedQueuePool) + @testing.requires.queue_pool @async_test async def test_dispose(self, async_engine): From 9f9f0c0f84918aa106655e289fb14c808039f338 Mon Sep 17 00:00:00 2001 From: Michael Habiger <115743596+hab6@users.noreply.github.com> Date: Fri, 16 Feb 2024 11:16:44 -0600 Subject: [PATCH 114/544] Update Actian entry in External Dialects table (#11014) (cherry picked from commit edc00d9e96661328621aea3f3849b493a365bbbe) --- doc/build/dialects/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 70ac258e401..f3d8f0ade2d 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -63,7 +63,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Database | Dialect | +================================================+=======================================+ -| Actian Avalanche, Vector, Actian X, and Ingres | sqlalchemy-ingres_ | +| Actian Data Platform, Vector, Actian X, Ingres | sqlalchemy-ingres_ | +------------------------------------------------+---------------------------------------+ | Amazon Athena | pyathena_ | +------------------------------------------------+---------------------------------------+ @@ -124,7 +124,7 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _openGauss-sqlalchemy: https://gitee.com/opengauss/openGauss-sqlalchemy .. _rockset-sqlalchemy: https://pypi.org/project/rockset-sqlalchemy -.. _sqlalchemy-ingres: https://github.com/clach04/ingres_sa_dialect +.. _sqlalchemy-ingres: https://github.com/ActianCorp/sqlalchemy-ingres .. _nzalchemy: https://pypi.org/project/nzalchemy/ .. _ibm-db-sa: https://pypi.org/project/ibm-db-sa/ .. _PyHive: https://github.com/dropbox/PyHive#sqlalchemy From 51011db22b0d51b5560d55b97671631cadc10265 Mon Sep 17 00:00:00 2001 From: Zhong Zheng Date: Fri, 16 Feb 2024 12:20:59 -0500 Subject: [PATCH 115/544] Fix mysql dialect text docstring, length is interpreted as byte size ### Description The `Text` and its variant types in MySQL are bytes size limited, not character length, so fixing the doctoring where the upper limit uses the `characters` as the unit instead of `bytes` https://dev.mysql.com/doc/refman/5.7/en/storage-requirements.html https://dev.mysql.com/doc/refman/8.0/en/storage-requirements.html Screenshot 2024-02-15 at 17 27 59 ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11018 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11018 Pull-request-sha: 13fa52917efea9a229c7abf19a3be40e24a79cb9 Change-Id: Iea903a6dc4b52ee4b7b5d2d64256c69abbd1f8aa (cherry picked from commit 1c58fe53b6fd069cbb82955ddaf9eb5405076146) --- lib/sqlalchemy/dialects/mysql/types.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py index f563ead357f..734f6ae3723 100644 --- a/lib/sqlalchemy/dialects/mysql/types.py +++ b/lib/sqlalchemy/dialects/mysql/types.py @@ -499,7 +499,7 @@ def __init__(self, display_width=None): class TEXT(_StringType, sqltypes.TEXT): - """MySQL TEXT type, for text up to 2^16 characters.""" + """MySQL TEXT type, for character storage encoded up to 2^16 bytes.""" __visit_name__ = "TEXT" @@ -508,7 +508,7 @@ def __init__(self, length=None, **kw): :param length: Optional, if provided the server may optimize storage by substituting the smallest TEXT type sufficient to store - ``length`` characters. + ``length`` bytes of characters. :param charset: Optional, a column-level character set for this string value. Takes precedence to 'ascii' or 'unicode' short-hand. @@ -535,7 +535,7 @@ def __init__(self, length=None, **kw): class TINYTEXT(_StringType): - """MySQL TINYTEXT type, for text up to 2^8 characters.""" + """MySQL TINYTEXT type, for character storage encoded up to 2^8 bytes.""" __visit_name__ = "TINYTEXT" @@ -567,7 +567,8 @@ def __init__(self, **kwargs): class MEDIUMTEXT(_StringType): - """MySQL MEDIUMTEXT type, for text up to 2^24 characters.""" + """MySQL MEDIUMTEXT type, for character storage encoded up + to 2^24 bytes.""" __visit_name__ = "MEDIUMTEXT" @@ -599,7 +600,7 @@ def __init__(self, **kwargs): class LONGTEXT(_StringType): - """MySQL LONGTEXT type, for text up to 2^32 characters.""" + """MySQL LONGTEXT type, for character storage encoded up to 2^32 bytes.""" __visit_name__ = "LONGTEXT" @@ -683,7 +684,7 @@ def __init__(self, length=None, **kwargs): super().__init__(length=length, **kwargs) @classmethod - def _adapt_string_for_cast(self, type_): + def _adapt_string_for_cast(cls, type_): # copy the given string type into a CHAR # for the purposes of rendering a CAST expression type_ = sqltypes.to_instance(type_) From 9a4da3c93084bea57395539e4aa7d04d4e662b4a Mon Sep 17 00:00:00 2001 From: James Braza Date: Wed, 21 Feb 2024 14:52:14 -0500 Subject: [PATCH 116/544] Documenting multiprocessing and events (#10831) * Added documentation on multiprocessing support for event system * Incorporating zzzeek's PR comments into docs as tip section * Removed tip and changed section title to 'Events and Multiprocessing' * Adopting zzzeek's PR comment suggestions * Tweaked wording to be more concise (cherry picked from commit e1e95a6a34ce201840a22c73b7f7dce358fe71d1) --- doc/build/core/event.rst | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/doc/build/core/event.rst b/doc/build/core/event.rst index 427da8fb15b..e07329f4e75 100644 --- a/doc/build/core/event.rst +++ b/doc/build/core/event.rst @@ -140,6 +140,33 @@ this value can be supported:: # it to use the return value listen(UserContact.phone, "set", validate_phone, retval=True) +Events and Multiprocessing +-------------------------- + +SQLAlchemy's event hooks are implemented with Python functions and objects, +so events propagate via Python function calls. +Python multiprocessing follows the +same way we think about OS multiprocessing, +such as a parent process forking a child process, +thus we can describe the SQLAlchemy event system's behavior using the same model. + +Event hooks registered in a parent process +will be present in new child processes +that are forked from that parent after the hooks have been registered, +since the child process starts with +a copy of all existing Python structures from the parent when spawned. +Child processes that already exist before the hooks are registered +will not receive those new event hooks, +as changes made to Python structures in a parent process +do not propagate to child processes. + +For the events themselves, these are Python function calls, +which do not have any ability to propagate between processes. +SQLAlchemy's event system does not implement any inter-process communication. +It is possible to implement event hooks +that use Python inter-process messaging within them, +however this would need to be implemented by the user. + Event Reference --------------- From cc6e958d39400f098df079e7320ccdeb5362a0cb Mon Sep 17 00:00:00 2001 From: layday Date: Wed, 21 Feb 2024 22:13:13 +0200 Subject: [PATCH 117/544] Replace non-standard Python plat env marker (#11035) Fixes #11034. (cherry picked from commit 78d0a24f98e3a7f3ea76acf5e47ace848adc2e2b) Change-Id: I43e5229612e6a881e547db08780f3223dfe1c5c3 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3cdf49301f7..780c9bf689a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ build-backend = "setuptools.build_meta" requires = [ "setuptools>=47", - "cython>=0.29.24; python_implementation == 'CPython'", # Skip cython when using pypy + "cython>=0.29.24; platform_python_implementation == 'CPython'", # Skip cython when using pypy ] [tool.black] From 7f377fbc8670f3a8988785a2522ef153d760f0a1 Mon Sep 17 00:00:00 2001 From: Sfurti-yb <78196231+Sfurti-yb@users.noreply.github.com> Date: Fri, 23 Feb 2024 01:07:24 +0530 Subject: [PATCH 118/544] Added YugabyteDB dialect to the documentation (#11047) * Added YugabyteDB dialect to the documentation * Update doc/build/dialects/index.rst Co-authored-by: Federico Caselli --------- Co-authored-by: Federico Caselli (cherry picked from commit 5c88498ba8ba5f7c524d5aca130e5a59a8940766) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index f3d8f0ade2d..52690f640a9 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -119,6 +119,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Teradata Vantage | teradatasqlalchemy_ | +------------------------------------------------+---------------------------------------+ +| YugabyteDB | sqlalchemy-yugabytedb_ | ++------------------------------------------------+---------------------------------------+ .. [1] Supports version 1.3.x only at the moment. @@ -150,3 +152,4 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _sqlalchemy-sybase: https://pypi.org/project/sqlalchemy-sybase/ .. _firebolt-sqlalchemy: https://pypi.org/project/firebolt-sqlalchemy/ .. _pyathena: https://github.com/laughingman7743/PyAthena/ +.. _sqlalchemy-yugabytedb: https://pypi.org/project/sqlalchemy-yugabytedb/ From ac7fb1d20019362991bccd500296872c86ff4d09 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 26 Feb 2024 21:11:20 +0100 Subject: [PATCH 119/544] fix issue in asincio doc example Change-Id: Idd55c07a57381450ab5c9db99854b6a1668f0382 (cherry picked from commit 191b67b948291a6d1b6e6c2a4a17b181d43ecb56) --- doc/build/orm/extensions/asyncio.rst | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/build/orm/extensions/asyncio.rst b/doc/build/orm/extensions/asyncio.rst index 0815da29aff..6649a981037 100644 --- a/doc/build/orm/extensions/asyncio.rst +++ b/doc/build/orm/extensions/asyncio.rst @@ -201,9 +201,9 @@ configuration:: async with session.begin(): session.add_all( [ - A(bs=[B(), B()], data="a1"), + A(bs=[B(data="b1"), B(data="b2")], data="a1"), A(bs=[], data="a2"), - A(bs=[B(), B()], data="a3"), + A(bs=[B(data="b3"), B(data="b4")], data="a3"), ] ) @@ -216,11 +216,11 @@ configuration:: result = await session.execute(stmt) - for a1 in result.scalars(): - print(a1) - print(f"created at: {a1.create_date}") - for b1 in a1.bs: - print(b1) + for a in result.scalars(): + print(a) + print(f"created at: {a.create_date}") + for b in a.bs: + print(b, b.data) result = await session.execute(select(A).order_by(A.id).limit(1)) @@ -237,7 +237,7 @@ configuration:: # alternatively, AsyncAttrs may be used to access any attribute # as an awaitable (new in 2.0.13) for b1 in await a1.awaitable_attrs.bs: - print(b1) + print(b1, b1.data) async def async_main() -> None: From f17a3cdb63f24ecee5b8af52554f33446568ac36 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 26 Feb 2024 22:16:38 +0100 Subject: [PATCH 120/544] fix typos in docs Change-Id: Iaba3c5979af626055acb0068cc8aac0c7334b0e0 (cherry picked from commit 8a171bb3391e916d19ddf853dc2f9f5e5756c16e) --- doc/build/orm/mapping_styles.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/mapping_styles.rst b/doc/build/orm/mapping_styles.rst index 4e3e3183797..8a4b8aece84 100644 --- a/doc/build/orm/mapping_styles.rst +++ b/doc/build/orm/mapping_styles.rst @@ -459,7 +459,7 @@ below. attributes (``x`` and ``y`` in this case) might change. Other forms of the above pattern include Python standard library - :ref:`cached_property ` + `cached_property `_ decorator (which is cached, and not re-computed each time), as well as SQLAlchemy's :class:`.hybrid_property` decorator which allows for attributes that can work for SQL querying as well. From 388afa73d94b63fa2b0a933171f060a94347d5cb Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 29 Feb 2024 11:07:22 -0500 Subject: [PATCH 121/544] add 1.4 changelog for #10365 Change-Id: I3359274337f214132f35d9c4b722c97685b63d72 (cherry picked from commit 9bcc4da735891d09a4c850c5f29b3abeef13ce27) --- doc/build/changelog/unreleased_14/10365.rst | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/build/changelog/unreleased_14/10365.rst diff --git a/doc/build/changelog/unreleased_14/10365.rst b/doc/build/changelog/unreleased_14/10365.rst new file mode 100644 index 00000000000..5eb4f440657 --- /dev/null +++ b/doc/build/changelog/unreleased_14/10365.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 10365 + + Fixed bug where ORM :func:`_orm.with_loader_criteria` would not apply + itself to a :meth:`_sql.Select.join` where the ON clause were given as a + plain SQL comparison, rather than as a relationship target or similar. + + This is a backport of the same issue fixed in version 2.0 for 2.0.22. From a0e7cab4d1f3fc6b586b63bef59fd84c1dc0d183 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 7 Feb 2024 22:11:25 +0100 Subject: [PATCH 122/544] Add support for preserve_rowcount execution_option Added new core execution option paramref:`_engine.Connection.execution_options.preserve_rowcount` to unconditionally save the ``rowcount`` attribute from the cursor in the class:`_engine.Result` returned from an execution, regardless of the statement being executed. When this option is provided the correct value is also set when an INSERT makes use of the "insertmanyvalues" mode, that may use more than one actualy cursor execution. Fixes: #10974 Change-Id: Icecef6b7539be9f0a1a02b9539864f5f163dcfbc (cherry picked from commit f0537442eb7d3a3b2e702c8843c3c277fbfda0ac) --- doc/build/changelog/unreleased_20/10974.rst | 15 ++++ doc/build/tutorial/data_update.rst | 21 +++-- lib/sqlalchemy/dialects/mssql/base.py | 8 -- .../dialects/mysql/mariadbconnector.py | 7 -- lib/sqlalchemy/dialects/mysql/mysqldb.py | 7 +- .../dialects/postgresql/__init__.py | 2 +- lib/sqlalchemy/engine/base.py | 29 ++++++- lib/sqlalchemy/engine/cursor.py | 78 ++++++++++++------ lib/sqlalchemy/engine/default.py | 25 ++++-- lib/sqlalchemy/engine/interfaces.py | 4 + lib/sqlalchemy/ext/asyncio/engine.py | 1 + lib/sqlalchemy/orm/query.py | 1 + lib/sqlalchemy/sql/base.py | 1 + lib/sqlalchemy/sql/compiler.py | 23 ++++-- lib/sqlalchemy/testing/fixtures/sql.py | 5 +- test/requirements.py | 14 ++++ test/sql/test_insert_exec.py | 26 +++++- test/sql/test_resultset.py | 82 ++++++++++++++++--- test/typing/test_overloads.py | 1 + 19 files changed, 258 insertions(+), 92 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10974.rst diff --git a/doc/build/changelog/unreleased_20/10974.rst b/doc/build/changelog/unreleased_20/10974.rst new file mode 100644 index 00000000000..a5da62475ea --- /dev/null +++ b/doc/build/changelog/unreleased_20/10974.rst @@ -0,0 +1,15 @@ +.. change:: + :tags: engine, usecase + :tickets: 10974 + + Added new core execution option + :paramref:`_engine.Connection.execution_options.preserve_rowcount`. When + set, the ``cursor.rowcount`` attribute from the DBAPI cursor will be + unconditionally memoized at statement execution time, so that whatever + value the DBAPI offers for any kind of statement will be available using + the :attr:`_engine.CursorResult.rowcount` attribute from the + :class:`_engine.CursorResult`. This allows the rowcount to be accessed for + statments such as INSERT and SELECT, to the degree supported by the DBAPI + in use. The :ref:`engine_insertmanyvalues` also supports this option and + will ensure :attr:`_engine.CursorResult.rowcount` is correctly set for a + bulk INSERT of rows when set. diff --git a/doc/build/tutorial/data_update.rst b/doc/build/tutorial/data_update.rst index a82f070a3f6..48cf5c058aa 100644 --- a/doc/build/tutorial/data_update.rst +++ b/doc/build/tutorial/data_update.rst @@ -279,17 +279,24 @@ Facts about :attr:`_engine.CursorResult.rowcount`: the statement. It does not matter if the row were actually modified or not. * :attr:`_engine.CursorResult.rowcount` is not necessarily available for an UPDATE - or DELETE statement that uses RETURNING. + or DELETE statement that uses RETURNING, or for one that uses an + :ref:`executemany ` execution. The availablility + depends on the DBAPI module in use. -* For an :ref:`executemany ` execution, - :attr:`_engine.CursorResult.rowcount` may not be available either, which depends - highly on the DBAPI module in use as well as configured options. The - attribute :attr:`_engine.CursorResult.supports_sane_multi_rowcount` indicates - if this value will be available for the current backend in use. +* In any case where the DBAPI does not determine the rowcount for some type + of statement, the returned value will be ``-1``. + +* SQLAlchemy pre-memoizes the DBAPIs ``cursor.rowcount`` value before the cursor + is closed, as some DBAPIs don't support accessing this attribute after the + fact. In order to pre-memoize ``cursor.rowcount`` for a statement that is + not UPDATE or DELETE, such as INSERT or SELECT, the + :paramref:`_engine.Connection.execution_options.preserve_rowcount` execution + option may be used. * Some drivers, particularly third party dialects for non-relational databases, may not support :attr:`_engine.CursorResult.rowcount` at all. The - :attr:`_engine.CursorResult.supports_sane_rowcount` will indicate this. + :attr:`_engine.CursorResult.supports_sane_rowcount` cursor attribute will + indicate this. * "rowcount" is used by the ORM :term:`unit of work` process to validate that an UPDATE or DELETE statement matched the expected number of rows, and is diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 98f7f6dce6e..ff69d6aa147 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1841,7 +1841,6 @@ class MSExecutionContext(default.DefaultExecutionContext): _enable_identity_insert = False _select_lastrowid = False _lastrowid = None - _rowcount = None dialect: MSDialect @@ -1961,13 +1960,6 @@ def post_exec(self): def get_lastrowid(self): return self._lastrowid - @property - def rowcount(self): - if self._rowcount is not None: - return self._rowcount - else: - return self.cursor.rowcount - def handle_dbapi_exception(self, e): if self._enable_identity_insert: try: diff --git a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py index 2fe3a192aa9..9bb3fa4d75c 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py @@ -87,13 +87,6 @@ def post_exec(self): if self.isinsert and self.compiled.postfetch_lastrowid: self._lastrowid = self.cursor.lastrowid - @property - def rowcount(self): - if self._rowcount is not None: - return self._rowcount - else: - return self.cursor.rowcount - def get_lastrowid(self): return self._lastrowid diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index d46d159d4cd..0c632b66f3e 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -97,12 +97,7 @@ class MySQLExecutionContext_mysqldb(MySQLExecutionContext): - @property - def rowcount(self): - if hasattr(self, "_rowcount"): - return self._rowcount - else: - return self.cursor.rowcount + pass class MySQLCompiler_mysqldb(MySQLCompiler): diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index 17b14f4d05b..325ea886990 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -8,7 +8,7 @@ from types import ModuleType -from . import array as arraylib # noqa # must be above base and other dialects +from . import array as arraylib # noqa # keep above base and other dialects from . import asyncpg # noqa from . import base from . import pg8000 # noqa diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index e577839c17d..3c11d14d5b7 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -250,6 +250,7 @@ def execution_options( yield_per: int = ..., insertmanyvalues_page_size: int = ..., schema_translate_map: Optional[SchemaTranslateMapType] = ..., + preserve_rowcount: bool = False, **opt: Any, ) -> Connection: ... @@ -490,6 +491,18 @@ def execution_options(self, **opt: Any) -> Connection: :ref:`schema_translating` + :param preserve_rowcount: Boolean; when True, the ``cursor.rowcount`` + attribute will be unconditionally memoized within the result and + made available via the :attr:`.CursorResult.rowcount` attribute. + Normally, this attribute is only preserved for UPDATE and DELETE + statements. Using this option, the DBAPIs rowcount value can + be accessed for other kinds of statements such as INSERT and SELECT, + to the degree that the DBAPI supports these statements. See + :attr:`.CursorResult.rowcount` for notes regarding the behavior + of this attribute. + + .. versionadded:: 2.0.28 + .. seealso:: :meth:`_engine.Engine.execution_options` @@ -1831,10 +1844,7 @@ def _execute_context( context.pre_exec() if context.execute_style is ExecuteStyle.INSERTMANYVALUES: - return self._exec_insertmany_context( - dialect, - context, - ) + return self._exec_insertmany_context(dialect, context) else: return self._exec_single_context( dialect, context, statement, parameters @@ -2018,6 +2028,11 @@ def _exec_insertmany_context( if self._echo: stats = context._get_cache_stats() + " (insertmanyvalues)" + preserve_rowcount = context.execution_options.get( + "preserve_rowcount", False + ) + rowcount = 0 + for imv_batch in dialect._deliver_insertmanyvalues_batches( cursor, str_statement, @@ -2128,9 +2143,15 @@ def _exec_insertmany_context( context.executemany, ) + if preserve_rowcount: + rowcount += imv_batch.current_batch_size + try: context.post_exec() + if preserve_rowcount: + context._rowcount = rowcount # type: ignore[attr-defined] + result = context._setup_result_proxy() except BaseException as e: diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index 4b18ddb4340..71767db74ed 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -1975,8 +1975,28 @@ def supports_sane_multi_rowcount(self): def rowcount(self) -> int: """Return the 'rowcount' for this result. - The 'rowcount' reports the number of rows *matched* - by the WHERE criterion of an UPDATE or DELETE statement. + The primary purpose of 'rowcount' is to report the number of rows + matched by the WHERE criterion of an UPDATE or DELETE statement + executed once (i.e. for a single parameter set), which may then be + compared to the number of rows expected to be updated or deleted as a + means of asserting data integrity. + + This attribute is transferred from the ``cursor.rowcount`` attribute + of the DBAPI before the cursor is closed, to support DBAPIs that + don't make this value available after cursor close. Some DBAPIs may + offer meaningful values for other kinds of statements, such as INSERT + and SELECT statements as well. In order to retrieve ``cursor.rowcount`` + for these statements, set the + :paramref:`.Connection.execution_options.preserve_rowcount` + execution option to True, which will cause the ``cursor.rowcount`` + value to be unconditionally memoized before any results are returned + or the cursor is closed, regardless of statement type. + + For cases where the DBAPI does not support rowcount for a particular + kind of statement and/or execution, the returned value will be ``-1``, + which is delivered directly from the DBAPI and is part of :pep:`249`. + All DBAPIs should support rowcount for single-parameter-set + UPDATE and DELETE statements, however. .. note:: @@ -1985,38 +2005,47 @@ def rowcount(self) -> int: * This attribute returns the number of rows *matched*, which is not necessarily the same as the number of rows - that were actually *modified* - an UPDATE statement, for example, + that were actually *modified*. For example, an UPDATE statement may have no net change on a given row if the SET values given are the same as those present in the row already. Such a row would be matched but not modified. On backends that feature both styles, such as MySQL, - rowcount is configured by default to return the match + rowcount is configured to return the match count in all cases. - * :attr:`_engine.CursorResult.rowcount` - is *only* useful in conjunction - with an UPDATE or DELETE statement. Contrary to what the Python - DBAPI says, it does *not* reliably return the - number of rows available from the results of a SELECT statement - as DBAPIs cannot support this functionality when rows are - unbuffered. - - * :attr:`_engine.CursorResult.rowcount` - may not be fully implemented by - all dialects. In particular, most DBAPIs do not support an - aggregate rowcount result from an executemany call. - The :meth:`_engine.CursorResult.supports_sane_rowcount` and - :meth:`_engine.CursorResult.supports_sane_multi_rowcount` methods - will report from the dialect if each usage is known to be - supported. - - * Statements that use RETURNING may not return a correct - rowcount. + * :attr:`_engine.CursorResult.rowcount` in the default case is + *only* useful in conjunction with an UPDATE or DELETE statement, + and only with a single set of parameters. For other kinds of + statements, SQLAlchemy will not attempt to pre-memoize the value + unless the + :paramref:`.Connection.execution_options.preserve_rowcount` + execution option is used. Note that contrary to :pep:`249`, many + DBAPIs do not support rowcount values for statements that are not + UPDATE or DELETE, particularly when rows are being returned which + are not fully pre-buffered. DBAPIs that dont support rowcount + for a particular kind of statement should return the value ``-1`` + for such statements. + + * :attr:`_engine.CursorResult.rowcount` may not be meaningful + when executing a single statement with multiple parameter sets + (i.e. an :term:`executemany`). Most DBAPIs do not sum "rowcount" + values across multiple parameter sets and will return ``-1`` + when accessed. + + * SQLAlchemy's :ref:`engine_insertmanyvalues` feature does support + a correct population of :attr:`_engine.CursorResult.rowcount` + when the :paramref:`.Connection.execution_options.preserve_rowcount` + execution option is set to True. + + * Statements that use RETURNING may not support rowcount, returning + a ``-1`` value instead. .. seealso:: :ref:`tutorial_update_delete_rowcount` - in the :ref:`unified_tutorial` + :paramref:`.Connection.execution_options.preserve_rowcount` + """ # noqa: E501 try: return self.context.rowcount @@ -2110,8 +2139,7 @@ def _raw_row_iterator(self): def merge(self, *others: Result[Any]) -> MergedResult[Any]: merged_result = super().merge(*others) - setup_rowcounts = self.context._has_rowcount - if setup_rowcounts: + if self.context._has_rowcount: merged_result.rowcount = sum( cast("CursorResult[Any]", result).rowcount for result in (self,) + others diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index d64f05cdf52..82c396414b5 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -1198,7 +1198,7 @@ class DefaultExecutionContext(ExecutionContext): _soft_closed = False - _has_rowcount = False + _rowcount: Optional[int] = None # a hook for SQLite's translation of # result column names @@ -1788,7 +1788,14 @@ def handle_dbapi_exception(self, e): @util.non_memoized_property def rowcount(self) -> int: - return self.cursor.rowcount + if self._rowcount is not None: + return self._rowcount + else: + return self.cursor.rowcount + + @property + def _has_rowcount(self): + return self._rowcount is not None def supports_sane_rowcount(self): return self.dialect.supports_sane_rowcount @@ -1799,6 +1806,9 @@ def supports_sane_multi_rowcount(self): def _setup_result_proxy(self): exec_opt = self.execution_options + if self._rowcount is None and exec_opt.get("preserve_rowcount", False): + self._rowcount = self.cursor.rowcount + if self.is_crud or self.is_text: result = self._setup_dml_or_text_result() yp = sr = False @@ -1955,8 +1965,7 @@ def _setup_dml_or_text_result(self): if rows: self.returned_default_rows = rows - result.rowcount = len(rows) - self._has_rowcount = True + self._rowcount = len(rows) if self._is_supplemental_returning: result._rewind(rows) @@ -1970,12 +1979,12 @@ def _setup_dml_or_text_result(self): elif not result._metadata.returns_rows: # no results, get rowcount # (which requires open cursor on some drivers) - result.rowcount - self._has_rowcount = True + if self._rowcount is None: + self._rowcount = self.cursor.rowcount result._soft_close() elif self.isupdate or self.isdelete: - result.rowcount - self._has_rowcount = True + if self._rowcount is None: + self._rowcount = self.cursor.rowcount return result @util.memoized_property diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 67eb68ae949..d1657b8b010 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -270,6 +270,7 @@ class _CoreKnownExecutionOptions(TypedDict, total=False): yield_per: int insertmanyvalues_page_size: int schema_translate_map: Optional[SchemaTranslateMapType] + preserve_rowcount: bool _ExecuteOptions = immutabledict[str, Any] @@ -2977,6 +2978,9 @@ class ExecutionContext: inline SQL expression value was fired off. Applies to inserts and updates.""" + execution_options: _ExecuteOptions + """Execution options associated with the current statement execution""" + @classmethod def _init_ddl( cls, diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 3c718fad3d5..dc6f89d6b59 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -414,6 +414,7 @@ async def execution_options( yield_per: int = ..., insertmanyvalues_page_size: int = ..., schema_translate_map: Optional[SchemaTranslateMapType] = ..., + preserve_rowcount: bool = False, **opt: Any, ) -> AsyncConnection: ... diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 77bce788483..1dfc9cb3459 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1710,6 +1710,7 @@ def execution_options( schema_translate_map: Optional[SchemaTranslateMapType] = ..., populate_existing: bool = False, autoflush: bool = False, + preserve_rowcount: bool = False, **opt: Any, ) -> Self: ... diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index cea0f177df6..5eb32e30dd4 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -1167,6 +1167,7 @@ def execution_options( render_nulls: bool = ..., is_delete_using: bool = ..., is_update_from: bool = ..., + preserve_rowcount: bool = False, **opt: Any, ) -> Self: ... diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index ba8e3ea450b..d9ef9837346 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -600,7 +600,7 @@ class _InsertManyValuesBatch(NamedTuple): replaced_parameters: _DBAPIAnyExecuteParams processed_setinputsizes: Optional[_GenericSetInputSizesType] batch: Sequence[_DBAPISingleExecuteParams] - batch_size: int + current_batch_size: int batchnum: int total_batches: int rows_sorted: bool @@ -5404,7 +5404,7 @@ def _deliver_insertmanyvalues_batches( param, generic_setinputsizes, [param], - batch_size, + 1, batchnum, lenparams, sort_by_parameter_order, @@ -5435,7 +5435,7 @@ def _deliver_insertmanyvalues_batches( ), ) - batches = list(parameters) + batches = cast("List[Sequence[Any]]", list(parameters)) processed_setinputsizes: Optional[_GenericSetInputSizesType] = None batchnum = 1 @@ -5529,8 +5529,12 @@ def apply_placeholders(keys, formatted): ) while batches: - batch = cast("Sequence[Any]", batches[0:batch_size]) + batch = batches[0:batch_size] batches[0:batch_size] = [] + if batches: + current_batch_size = batch_size + else: + current_batch_size = len(batch) if generic_setinputsizes: # if setinputsizes is present, expand this collection to @@ -5540,7 +5544,7 @@ def apply_placeholders(keys, formatted): (new_key, len_, typ) for new_key, len_, typ in ( (f"{key}_{index}", len_, typ) - for index in range(len(batch)) + for index in range(current_batch_size) for key, len_, typ in generic_setinputsizes ) ] @@ -5550,6 +5554,9 @@ def apply_placeholders(keys, formatted): num_ins_params = imv.num_positional_params_counted batch_iterator: Iterable[Sequence[Any]] + extra_params_left: Sequence[Any] + extra_params_right: Sequence[Any] + if num_ins_params == len(batch[0]): extra_params_left = extra_params_right = () batch_iterator = batch @@ -5572,7 +5579,7 @@ def apply_placeholders(keys, formatted): )[:-2] else: expanded_values_string = ( - (executemany_values_w_comma * len(batch)) + (executemany_values_w_comma * current_batch_size) )[:-2] if self._numeric_binds and num_ins_params > 0: @@ -5588,7 +5595,7 @@ def apply_placeholders(keys, formatted): assert not extra_params_right start = expand_pos_lower_index + 1 - end = num_ins_params * (len(batch)) + start + end = num_ins_params * (current_batch_size) + start # need to format here, since statement may contain # unescaped %, while values_string contains just (%s, %s) @@ -5638,7 +5645,7 @@ def apply_placeholders(keys, formatted): replaced_parameters, processed_setinputsizes, batch, - batch_size, + current_batch_size, batchnum, total_batches, sort_by_parameter_order, diff --git a/lib/sqlalchemy/testing/fixtures/sql.py b/lib/sqlalchemy/testing/fixtures/sql.py index 1448510625d..ab532ab0e6d 100644 --- a/lib/sqlalchemy/testing/fixtures/sql.py +++ b/lib/sqlalchemy/testing/fixtures/sql.py @@ -478,10 +478,7 @@ def _deliver_insertmanyvalues_batches( yield batch - def _exec_insertmany_context( - dialect, - context, - ): + def _exec_insertmany_context(dialect, context): with mock.patch.object( dialect, "_deliver_insertmanyvalues_batches", diff --git a/test/requirements.py b/test/requirements.py index a692cd3fee3..2e80884bc17 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -2061,3 +2061,17 @@ def go(config): return False return only_if(go, "json_each is required") + + @property + def rowcount_always_cached(self): + """Indicates that ``cursor.rowcount`` is always accessed, + usually in an ``ExecutionContext.post_exec``. + """ + return only_on(["+mariadbconnector"]) + + @property + def rowcount_always_cached_on_insert(self): + """Indicates that ``cursor.rowcount`` is always accessed in an insert + statement. + """ + return only_on(["mssql"]) diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py index ce4caf30e93..16300aad0ff 100644 --- a/test/sql/test_insert_exec.py +++ b/test/sql/test_insert_exec.py @@ -787,7 +787,8 @@ def test_insert_unicode_keys(self, connection): eq_(connection.execute(table.select()).all(), [(1, 1), (2, 2), (3, 3)]) - def test_insert_returning_values(self, connection): + @testing.variation("preserve_rowcount", [True, False]) + def test_insert_returning_values(self, connection, preserve_rowcount): t = self.tables.data conn = connection @@ -796,7 +797,14 @@ def test_insert_returning_values(self, connection): {"x": "x%d" % i, "y": "y%d" % i} for i in range(1, page_size * 2 + 27) ] - result = conn.execute(t.insert().returning(t.c.x, t.c.y), data) + if preserve_rowcount: + eo = {"preserve_rowcount": True} + else: + eo = {} + + result = conn.execute( + t.insert().returning(t.c.x, t.c.y), data, execution_options=eo + ) eq_([tup[0] for tup in result.cursor.description], ["x", "y"]) eq_(result.keys(), ["x", "y"]) @@ -814,6 +822,9 @@ def test_insert_returning_values(self, connection): # assert result.closed assert result.cursor is None + if preserve_rowcount: + eq_(result.rowcount, len(data)) + def test_insert_returning_preexecute_pk(self, metadata, connection): counter = itertools.count(1) @@ -1036,10 +1047,14 @@ def test_insert_w_bindparam_in_subq( eq_(result.all(), [("p1_p1", "y1"), ("p2_p2", "y2")]) - def test_insert_returning_defaults(self, connection): + @testing.variation("preserve_rowcount", [True, False]) + def test_insert_returning_defaults(self, connection, preserve_rowcount): t = self.tables.data - conn = connection + if preserve_rowcount: + conn = connection.execution_options(preserve_rowcount=True) + else: + conn = connection result = conn.execute(t.insert(), {"x": "x0", "y": "y0"}) first_pk = result.inserted_primary_key[0] @@ -1054,6 +1069,9 @@ def test_insert_returning_defaults(self, connection): [(pk, 5) for pk in range(1 + first_pk, total_rows + first_pk)], ) + if preserve_rowcount: + eq_(result.rowcount, total_rows - 1) # range starts from 1 + def test_insert_return_pks_default_values(self, connection): """test sending multiple, empty rows into an INSERT and getting primary key values back. diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py index 8651207a912..350e9542214 100644 --- a/test/sql/test_resultset.py +++ b/test/sql/test_resultset.py @@ -1,4 +1,5 @@ import collections +from collections import defaultdict import collections.abc as collections_abc from contextlib import contextmanager import csv @@ -1734,6 +1735,29 @@ def __getitem__(self, i): eq_(proxy.key, "value") eq_(proxy._mapping["key"], "value") + @contextmanager + def cursor_wrapper(self, engine): + calls = defaultdict(int) + + class CursorWrapper: + def __init__(self, real_cursor): + self.real_cursor = real_cursor + + def __getattr__(self, name): + calls[name] += 1 + return getattr(self.real_cursor, name) + + create_cursor = engine.dialect.execution_ctx_cls.create_cursor + + def new_create(context): + cursor = create_cursor(context) + return CursorWrapper(cursor) + + with patch.object( + engine.dialect.execution_ctx_cls, "create_cursor", new_create + ): + yield calls + def test_no_rowcount_on_selects_inserts(self, metadata, testing_engine): """assert that rowcount is only called on deletes and updates. @@ -1745,33 +1769,71 @@ def test_no_rowcount_on_selects_inserts(self, metadata, testing_engine): engine = testing_engine() + req = testing.requires + t = Table("t1", metadata, Column("data", String(10))) metadata.create_all(engine) - - with patch.object( - engine.dialect.execution_ctx_cls, "rowcount" - ) as mock_rowcount: + count = 0 + with self.cursor_wrapper(engine) as call_counts: with engine.begin() as conn: - mock_rowcount.__get__ = Mock() conn.execute( t.insert(), [{"data": "d1"}, {"data": "d2"}, {"data": "d3"}], ) - - eq_(len(mock_rowcount.__get__.mock_calls), 0) + if ( + req.rowcount_always_cached.enabled + or req.rowcount_always_cached_on_insert.enabled + ): + count += 1 + eq_(call_counts["rowcount"], count) eq_( conn.execute(t.select()).fetchall(), [("d1",), ("d2",), ("d3",)], ) - eq_(len(mock_rowcount.__get__.mock_calls), 0) + if req.rowcount_always_cached.enabled: + count += 1 + eq_(call_counts["rowcount"], count) + + conn.execute(t.update(), {"data": "d4"}) + + count += 1 + eq_(call_counts["rowcount"], count) + + conn.execute(t.delete()) + count += 1 + eq_(call_counts["rowcount"], count) + + def test_rowcount_always_called_when_preserve_rowcount( + self, metadata, testing_engine + ): + """assert that rowcount is called on any statement when + ``preserve_rowcount=True``. + + """ + + engine = testing_engine() + + t = Table("t1", metadata, Column("data", String(10))) + metadata.create_all(engine) + + with self.cursor_wrapper(engine) as call_counts: + with engine.begin() as conn: + conn = conn.execution_options(preserve_rowcount=True) + # Do not use insertmanyvalues on any driver + conn.execute(t.insert(), {"data": "d1"}) + + eq_(call_counts["rowcount"], 1) + + eq_(conn.execute(t.select()).fetchall(), [("d1",)]) + eq_(call_counts["rowcount"], 2) conn.execute(t.update(), {"data": "d4"}) - eq_(len(mock_rowcount.__get__.mock_calls), 1) + eq_(call_counts["rowcount"], 3) conn.execute(t.delete()) - eq_(len(mock_rowcount.__get__.mock_calls), 2) + eq_(call_counts["rowcount"], 4) def test_row_is_sequence(self): row = Row(object(), [None], {}, ["value"]) diff --git a/test/typing/test_overloads.py b/test/typing/test_overloads.py index 968b60d9264..66209f50365 100644 --- a/test/typing/test_overloads.py +++ b/test/typing/test_overloads.py @@ -24,6 +24,7 @@ "stream_results": "bool", "max_row_buffer": "int", "yield_per": "int", + "preserve_rowcount": "bool", } orm_dql_execution_options = { From 1d1bf73c9909e32d5f54d35ee8295e898e985ffd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 2 Mar 2024 00:28:26 -0500 Subject: [PATCH 123/544] adjust bound parameters within cache key only, dont deep copy Adjusted the fix made in :ticket:`10570`, released in 2.0.23, where new logic was added to reconcile possibly changing bound parameter values across cache key generations used within the :func:`_orm.with_expression` construct. The new logic changes the approach by which the new bound parameter values are associated with the statement, avoiding the need to deep-copy the statement which can result in a significant performance penalty for very deep / complex SQL constructs. The new approach no longer requires this deep-copy step. Fixes: #11085 Change-Id: Ia51eb4e949c8f37af135399925a9916b9ed4ad2f (cherry picked from commit 06be748b474246c1061c309f16f5648ae9bb3954) --- doc/build/changelog/unreleased_20/11085.rst | 12 +++ lib/sqlalchemy/orm/strategy_options.py | 70 ++++++------ lib/sqlalchemy/sql/cache_key.py | 18 ++-- lib/sqlalchemy/sql/compiler.py | 42 ++++++++ lib/sqlalchemy/sql/elements.py | 52 +++++++++ test/aaa_profiling/test_orm.py | 113 ++++++++++++++++++++ test/profiles.txt | 99 ++++++++++++----- test/sql/test_compare.py | 3 +- 8 files changed, 338 insertions(+), 71 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11085.rst diff --git a/doc/build/changelog/unreleased_20/11085.rst b/doc/build/changelog/unreleased_20/11085.rst new file mode 100644 index 00000000000..74f877dac7a --- /dev/null +++ b/doc/build/changelog/unreleased_20/11085.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: bug, orm, performance, regression + :tickets: 11085 + + Adjusted the fix made in :ticket:`10570`, released in 2.0.23, where new + logic was added to reconcile possibly changing bound parameter values + across cache key generations used within the :func:`_orm.with_expression` + construct. The new logic changes the approach by which the new bound + parameter values are associated with the statement, avoiding the need to + deep-copy the statement which can result in a significant performance + penalty for very deep / complex SQL constructs. The new approach no longer + requires this deep-copy step. diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index c20215ac336..3e70ada7e63 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1034,6 +1034,8 @@ def _construct_for_existing_path( def _adapt_cached_option_to_uncached_option( self, context: QueryContext, uncached_opt: ORMOption ) -> ORMOption: + if uncached_opt is self: + return self return self._adjust_for_extra_criteria(context) def _prepend_path(self, path: PathRegistry) -> Load: @@ -1049,47 +1051,51 @@ def _adjust_for_extra_criteria(self, context: QueryContext) -> Load: returning a new instance of this ``Load`` object. """ - orig_query = context.compile_state.select_statement - - orig_cache_key: Optional[CacheKey] = None - replacement_cache_key: Optional[CacheKey] = None - found_crit = False - def process(opt: _LoadElement) -> _LoadElement: - nonlocal orig_cache_key, replacement_cache_key, found_crit - - found_crit = True + # avoid generating cache keys for the queries if we don't + # actually have any extra_criteria options, which is the + # common case + for value in self.context: + if value._extra_criteria: + break + else: + return self - if orig_cache_key is None or replacement_cache_key is None: - orig_cache_key = orig_query._generate_cache_key() - replacement_cache_key = context.query._generate_cache_key() + replacement_cache_key = context.query._generate_cache_key() - if replacement_cache_key is not None: - assert orig_cache_key is not None + if replacement_cache_key is None: + return self - opt._extra_criteria = tuple( - replacement_cache_key._apply_params_to_element( - orig_cache_key, crit - ) - for crit in opt._extra_criteria + orig_query = context.compile_state.select_statement + orig_cache_key = orig_query._generate_cache_key() + assert orig_cache_key is not None + + def process( + opt: _LoadElement, + replacement_cache_key: CacheKey, + orig_cache_key: CacheKey, + ) -> _LoadElement: + cloned_opt = opt._clone() + + cloned_opt._extra_criteria = tuple( + replacement_cache_key._apply_params_to_element( + orig_cache_key, crit ) + for crit in cloned_opt._extra_criteria + ) - return opt + return cloned_opt - # avoid generating cache keys for the queries if we don't - # actually have any extra_criteria options, which is the - # common case - new_context = tuple( - process(value._clone()) if value._extra_criteria else value + cloned = self._clone() + cloned.context = tuple( + ( + process(value, replacement_cache_key, orig_cache_key) + if value._extra_criteria + else value + ) for value in self.context ) - - if found_crit: - cloned = self._clone() - cloned.context = new_context - return cloned - else: - return self + return cloned def _reconcile_query_entities_with_us(self, mapper_entities, raiseerr): """called at process time to allow adjustment of the root diff --git a/lib/sqlalchemy/sql/cache_key.py b/lib/sqlalchemy/sql/cache_key.py index b4b6b23fa80..1172d3c98f4 100644 --- a/lib/sqlalchemy/sql/cache_key.py +++ b/lib/sqlalchemy/sql/cache_key.py @@ -37,6 +37,7 @@ if typing.TYPE_CHECKING: from .elements import BindParameter from .elements import ClauseElement + from .elements import ColumnElement from .visitors import _TraverseInternalsType from ..engine.interfaces import _CoreSingleExecuteParams @@ -557,18 +558,17 @@ def _generate_param_dict(self) -> Dict[str, Any]: _anon_map = prefix_anon_map() return {b.key % _anon_map: b.effective_value for b in self.bindparams} + @util.preload_module("sqlalchemy.sql.elements") def _apply_params_to_element( - self, original_cache_key: CacheKey, target_element: ClauseElement - ) -> ClauseElement: - if target_element._is_immutable: + self, original_cache_key: CacheKey, target_element: ColumnElement[Any] + ) -> ColumnElement[Any]: + if target_element._is_immutable or original_cache_key is self: return target_element - translate = { - k.key: v.value - for k, v in zip(original_cache_key.bindparams, self.bindparams) - } - - return target_element.params(translate) + elements = util.preloaded.sql_elements + return elements._OverrideBinds( + target_element, self.bindparams, original_cache_key.bindparams + ) def _ad_hoc_cache_key_from_args( diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index ba8e3ea450b..e8fb3704df1 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -2383,6 +2383,47 @@ def default_from(self): """ return "" + def visit_override_binds(self, override_binds, **kw): + """SQL compile the nested element of an _OverrideBinds with + bindparams swapped out. + + The _OverrideBinds is not normally expected to be compiled; it + is meant to be used when an already cached statement is to be used, + the compilation was already performed, and only the bound params should + be swapped in at execution time. + + However, the test suite has some tests that exercise compilation + on individual elements without using the cache key version, so here we + modify the bound parameter collection for the given compiler based on + the translation. + + """ + + # get SQL text first + sqltext = override_binds.element._compiler_dispatch(self, **kw) + + # then change binds after the fact. note that we don't try to + # swap the bindparam as we compile, because our element may be + # elsewhere in the statement already (e.g. a subquery or perhaps a + # CTE) and was already visited / compiled. See + # test_relationship_criteria.py -> + # test_selectinload_local_criteria_subquery + for k in override_binds.translate: + if k not in self.binds: + continue + bp = self.binds[k] + + new_bp = bp._with_value( + override_binds.translate[bp.key], + maintain_key=True, + required=False, + ) + name = self.bind_names[bp] + self.binds[k] = self.binds[name] = new_bp + self.bind_names[new_bp] = name + + return sqltext + def visit_grouping(self, grouping, asfrom=False, **kwargs): return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")" @@ -3614,6 +3655,7 @@ def visit_bindparam( render_postcompile=False, **kwargs, ): + if not skip_bind_expression: impl = bindparam.type.dialect_impl(self.dialect) if impl._has_bind_expression: diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index e8131994b6b..e5a9fb0624c 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -104,6 +104,7 @@ from .type_api import TypeEngine from .visitors import _CloneCallableType from .visitors import _TraverseInternalsType + from .visitors import anon_map from ..engine import Connection from ..engine import Dialect from ..engine import Engine @@ -4066,6 +4067,57 @@ def __setstate__(self, state): self.type = state["type"] +class _OverrideBinds(Grouping[_T]): + """used by cache_key->_apply_params_to_element to allow compilation / + execution of a SQL element that's been cached, using an alternate set of + bound parameter values. + + This is used by the ORM to swap new parameter values into expressions + that are embedded into loader options like with_expression(), + selectinload(). Previously, this task was accomplished using the + .params() method which would perform a deep-copy instead. This deep + copy proved to be too expensive for more complex expressions. + + See #11085 + + """ + + __visit_name__ = "override_binds" + + def __init__( + self, + element: ColumnElement[_T], + bindparams: Sequence[BindParameter[Any]], + replaces_params: Sequence[BindParameter[Any]], + ): + self.element = element + self.translate = { + k.key: v.value for k, v in zip(replaces_params, bindparams) + } + + def _gen_cache_key( + self, anon_map: anon_map, bindparams: List[BindParameter[Any]] + ) -> Optional[typing_Tuple[Any, ...]]: + """generate a cache key for the given element, substituting its bind + values for the translation values present.""" + + existing_bps: List[BindParameter[Any]] = [] + ck = self.element._gen_cache_key(anon_map, existing_bps) + + bindparams.extend( + ( + bp._with_value( + self.translate[bp.key], maintain_key=True, required=False + ) + if bp.key in self.translate + else bp + ) + for bp in existing_bps + ) + + return ck + + class _OverRange(IntEnum): RANGE_UNBOUNDED = 0 RANGE_CURRENT = 1 diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py index 8bf2bfa1803..e02c7cae857 100644 --- a/test/aaa_profiling/test_orm.py +++ b/test/aaa_profiling/test_orm.py @@ -1,7 +1,9 @@ from sqlalchemy import and_ from sqlalchemy import ForeignKey +from sqlalchemy import Identity from sqlalchemy import Integer from sqlalchemy import join +from sqlalchemy import literal_column from sqlalchemy import select from sqlalchemy import String from sqlalchemy import testing @@ -13,10 +15,12 @@ from sqlalchemy.orm import join as orm_join from sqlalchemy.orm import joinedload from sqlalchemy.orm import Load +from sqlalchemy.orm import query_expression from sqlalchemy.orm import relationship from sqlalchemy.orm import selectinload from sqlalchemy.orm import Session from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm import with_expression from sqlalchemy.testing import fixtures from sqlalchemy.testing import profiling from sqlalchemy.testing.fixtures import fixture_session @@ -1314,3 +1318,112 @@ def go(): r = q.all() # noqa: F841 go() + + +class WithExpresionLoaderOptTest(fixtures.DeclarativeMappedTest): + # keep caching on with this test. + __requires__ = ("python_profiling_backend",) + + """test #11085""" + + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class A(Base): + __tablename__ = "a" + + id = Column(Integer, Identity(), primary_key=True) + data = Column(String(30)) + bs = relationship("B") + + class B(Base): + __tablename__ = "b" + id = Column(Integer, Identity(), primary_key=True) + a_id = Column(ForeignKey("a.id")) + boolean = query_expression() + d1 = Column(String(30)) + d2 = Column(String(30)) + d3 = Column(String(30)) + d4 = Column(String(30)) + d5 = Column(String(30)) + d6 = Column(String(30)) + d7 = Column(String(30)) + + @classmethod + def insert_data(cls, connection): + A, B = cls.classes("A", "B") + + with Session(connection) as s: + s.add( + A( + bs=[ + B( + d1="x", + d2="x", + d3="x", + d4="x", + d5="x", + d6="x", + d7="x", + ) + ] + ) + ) + s.commit() + + def test_from_opt_no_cache(self): + A, B = self.classes("A", "B") + + @profiling.function_call_count(warmup=2) + def go(): + with Session( + testing.db.execution_options(compiled_cache=None) + ) as sess: + _ = sess.execute( + select(A).options( + selectinload(A.bs).options( + with_expression( + B.boolean, + and_( + B.d1 == "x", + B.d2 == "x", + B.d3 == "x", + B.d4 == "x", + B.d5 == "x", + B.d6 == "x", + B.d7 == "x", + ), + ) + ) + ) + ).scalars() + + go() + + def test_from_opt_after_cache(self): + A, B = self.classes("A", "B") + + @profiling.function_call_count(warmup=2) + def go(): + with Session(testing.db) as sess: + _ = sess.execute( + select(A).options( + selectinload(A.bs).options( + with_expression( + B.boolean, + and_( + B.d1 == literal_column("'x'"), + B.d2 == "x", + B.d3 == literal_column("'x'"), + B.d4 == "x", + B.d5 == literal_column("'x'"), + B.d6 == "x", + B.d7 == literal_column("'x'"), + ), + ) + ) + ) + ).scalars() + + go() diff --git a/test/profiles.txt b/test/profiles.txt index d943f418ff6..d8226f4a894 100644 --- a/test/profiles.txt +++ b/test/profiles.txt @@ -144,147 +144,188 @@ test.aaa_profiling.test_misc.EnumTest.test_create_enum_from_pep_435_w_expensive_ # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 55930 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 65740 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 65640 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 51230 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 54230 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 64040 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 63940 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 49530 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 58530 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 66440 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 66240 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 54730 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 57530 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 65440 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 65240 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 53730 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 49130 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 51940 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 51840 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 46030 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 52830 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 60140 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 60040 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 49130 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 51830 -test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 59140 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 59040 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 48130 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 37705 test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 40805 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 34505 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 36705 test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 39805 +test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 33505 # TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 3599 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 3599 +test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 3598 # TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 5527 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 5527 +test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 5526 # TEST: test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 128 test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 128 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 124 # TEST: test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 128 test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 128 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 124 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 15359 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 24383 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 15360 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 24378 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 15325 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 21437 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 24461 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 21420 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 24444 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 21384 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 10654 -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 11054 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 10804 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 11204 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 10754 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1154 test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1154 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased_select_join x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 1154 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 4304 test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 4604 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_plain x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 4304 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 96282 -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 109782 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 98632 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 112132 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 95532 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 93732 -test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 107432 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 96082 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 109782 +test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 92982 # TEST: test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated -test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 26339,1019,96653 -test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 27438,1228,117553 +test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 27016,1006,95353 +test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 28168,1215,116253 +test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results_integrated x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 26604,974,92153 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 23981 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 23981 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 22982 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 113158 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 123916 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 113225 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 123983 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 108201 # TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 21189 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 22709 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 21197 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 22705 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 20478 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load -test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1480 -test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1583 +test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1481 +test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1581 +test.aaa_profiling.test_orm.MergeTest.test_merge_load x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 1412 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load test.aaa_profiling.test_orm.MergeTest.test_merge_no_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 108,20 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 108,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 108,20 # TEST: test.aaa_profiling.test_orm.QueryTest.test_query_cols -test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 6696 -test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 7456 +test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 6706 +test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 7436 +test.aaa_profiling.test_orm.QueryTest.test_query_cols x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 6316 # TEST: test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results -test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 277405 -test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 298505 +test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 277005 +test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 297305 +test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 263005 # TEST: test.aaa_profiling.test_orm.SessionTest.test_expire_lots test.aaa_profiling.test_orm.SessionTest.test_expire_lots x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1212 test.aaa_profiling.test_orm.SessionTest.test_expire_lots x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1212 +test.aaa_profiling.test_orm.SessionTest.test_expire_lots x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 1098 + +# TEST: test.aaa_profiling.test_orm.WithExpresionLoaderOptTest.test_from_opt_after_cache + +test.aaa_profiling.test_orm.WithExpresionLoaderOptTest.test_from_opt_after_cache x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1418 +test.aaa_profiling.test_orm.WithExpresionLoaderOptTest.test_from_opt_after_cache x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1504 +test.aaa_profiling.test_orm.WithExpresionLoaderOptTest.test_from_opt_after_cache x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 1399 + +# TEST: test.aaa_profiling.test_orm.WithExpresionLoaderOptTest.test_from_opt_no_cache + +test.aaa_profiling.test_orm.WithExpresionLoaderOptTest.test_from_opt_no_cache x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 1859 +test.aaa_profiling.test_orm.WithExpresionLoaderOptTest.test_from_opt_no_cache x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 1880 +test.aaa_profiling.test_orm.WithExpresionLoaderOptTest.test_from_opt_no_cache x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 1830 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index b2be90f60cd..746058c679e 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -35,6 +35,7 @@ from sqlalchemy.sql import bindparam from sqlalchemy.sql import ColumnElement from sqlalchemy.sql import dml +from sqlalchemy.sql import elements from sqlalchemy.sql import False_ from sqlalchemy.sql import func from sqlalchemy.sql import operators @@ -1368,7 +1369,7 @@ def test_all_present(self): "__init__" in cls.__dict__ or issubclass(cls, AliasedReturnsRows) ) - and not issubclass(cls, (Annotated)) + and not issubclass(cls, (Annotated, elements._OverrideBinds)) and cls.__module__.startswith("sqlalchemy.") and "orm" not in cls.__module__ and "compiler" not in cls.__module__ From 01982fa679689be0a80d6e091de1ca01c42d7afd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 3 Mar 2024 23:03:14 -0500 Subject: [PATCH 124/544] support pytest 8.1 This is a bump in the tox.ini file. it's possible we don't need to change anything else as we had help from the pytest people a few years back to make sure our API use was fairly modern. Alembic is having problems that appear to be separate. Change-Id: If0348dc4915522d31d3ab970df31244bbc8b8ca4 (cherry picked from commit bc4d9ceec1c73c6a4e36d570052ff5e588462443) --- doc/build/changelog/unreleased_20/pytest81.rst | 4 ++++ tox.ini | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/pytest81.rst diff --git a/doc/build/changelog/unreleased_20/pytest81.rst b/doc/build/changelog/unreleased_20/pytest81.rst new file mode 100644 index 00000000000..01b840ee8c6 --- /dev/null +++ b/doc/build/changelog/unreleased_20/pytest81.rst @@ -0,0 +1,4 @@ +.. change:: + :tags: change, tests + + pytest support in the tox.ini file has been updated to support pytest 8.1. diff --git a/tox.ini b/tox.ini index cd91a51a7ea..72e17cfb686 100644 --- a/tox.ini +++ b/tox.ini @@ -41,7 +41,7 @@ install_command= python -I -m pip install --only-binary=pymssql {opts} {packages} deps= - pytest>=7.0.0rc1,<8 + pytest>=7.0.0,<8.2 # tracked by https://github.com/pytest-dev/pytest-xdist/issues/907 pytest-xdist!=3.3.0 From 6625ab42d51b76150ce8e7158388b5a54cd89831 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 4 Mar 2024 08:30:04 -0500 Subject: [PATCH 125/544] cherry-pick changelog from 1.4.52 (cherry picked from commit 662006d4f84f54b2bcddea5025696e8134a187f6) --- doc/build/changelog/changelog_14.rst | 12 +++++++++++- doc/build/changelog/unreleased_14/10365.rst | 9 --------- 2 files changed, 11 insertions(+), 10 deletions(-) delete mode 100644 doc/build/changelog/unreleased_14/10365.rst diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index 164a10a469d..250288b8ef9 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -15,7 +15,17 @@ This document details individual issue-level changes made throughout .. changelog:: :version: 1.4.52 - :include_notes_from: unreleased_14 + :released: March 4, 2024 + + .. change:: + :tags: bug, orm + :tickets: 10365 + + Fixed bug where ORM :func:`_orm.with_loader_criteria` would not apply + itself to a :meth:`_sql.Select.join` where the ON clause were given as a + plain SQL comparison, rather than as a relationship target or similar. + + This is a backport of the same issue fixed in version 2.0 for 2.0.22. .. changelog:: :version: 1.4.51 diff --git a/doc/build/changelog/unreleased_14/10365.rst b/doc/build/changelog/unreleased_14/10365.rst deleted file mode 100644 index 5eb4f440657..00000000000 --- a/doc/build/changelog/unreleased_14/10365.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10365 - - Fixed bug where ORM :func:`_orm.with_loader_criteria` would not apply - itself to a :meth:`_sql.Select.join` where the ON clause were given as a - plain SQL comparison, rather than as a relationship target or similar. - - This is a backport of the same issue fixed in version 2.0 for 2.0.22. From ca51afcfee9c3c9a19bd37863da57f53334ea1dc Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 4 Mar 2024 08:30:05 -0500 Subject: [PATCH 126/544] cherry-pick changelog update for 1.4.53 (cherry picked from commit cb00a5252e240c2b0308a2891989836473633538) --- doc/build/changelog/changelog_14.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index 250288b8ef9..55e671e18b8 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -13,6 +13,10 @@ This document details individual issue-level changes made throughout :start-line: 5 +.. changelog:: + :version: 1.4.53 + :include_notes_from: unreleased_14 + .. changelog:: :version: 1.4.52 :released: March 4, 2024 From 2a71f3b1f955e5e78b19b91e41ecd60aa257883e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 4 Mar 2024 08:32:33 -0500 Subject: [PATCH 127/544] - 2.0.28 --- doc/build/changelog/changelog_20.rst | 61 ++++++++++++++++++- doc/build/changelog/unreleased_20/10974.rst | 15 ----- doc/build/changelog/unreleased_20/11010.rst | 8 --- doc/build/changelog/unreleased_20/11085.rst | 12 ---- doc/build/changelog/unreleased_20/8771.rst | 15 ----- .../changelog/unreleased_20/pytest81.rst | 4 -- doc/build/conf.py | 4 +- 7 files changed, 62 insertions(+), 57 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/10974.rst delete mode 100644 doc/build/changelog/unreleased_20/11010.rst delete mode 100644 doc/build/changelog/unreleased_20/11085.rst delete mode 100644 doc/build/changelog/unreleased_20/8771.rst delete mode 100644 doc/build/changelog/unreleased_20/pytest81.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 6d0dfaf8d4d..8a5b51d8282 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,66 @@ .. changelog:: :version: 2.0.28 - :include_notes_from: unreleased_20 + :released: March 4, 2024 + + .. change:: + :tags: engine, usecase + :tickets: 10974 + + Added new core execution option + :paramref:`_engine.Connection.execution_options.preserve_rowcount`. When + set, the ``cursor.rowcount`` attribute from the DBAPI cursor will be + unconditionally memoized at statement execution time, so that whatever + value the DBAPI offers for any kind of statement will be available using + the :attr:`_engine.CursorResult.rowcount` attribute from the + :class:`_engine.CursorResult`. This allows the rowcount to be accessed for + statments such as INSERT and SELECT, to the degree supported by the DBAPI + in use. The :ref:`engine_insertmanyvalues` also supports this option and + will ensure :attr:`_engine.CursorResult.rowcount` is correctly set for a + bulk INSERT of rows when set. + + .. change:: + :tags: bug, orm, regression + :tickets: 11010 + + Fixed regression caused by :ticket:`9779` where using the "secondary" table + in a relationship ``and_()`` expression would fail to be aliased to match + how the "secondary" table normally renders within a + :meth:`_sql.Select.join` expression, leading to an invalid query. + + .. change:: + :tags: bug, orm, performance, regression + :tickets: 11085 + + Adjusted the fix made in :ticket:`10570`, released in 2.0.23, where new + logic was added to reconcile possibly changing bound parameter values + across cache key generations used within the :func:`_orm.with_expression` + construct. The new logic changes the approach by which the new bound + parameter values are associated with the statement, avoiding the need to + deep-copy the statement which can result in a significant performance + penalty for very deep / complex SQL constructs. The new approach no longer + requires this deep-copy step. + + .. change:: + :tags: bug, asyncio + :tickets: 8771 + + An error is raised if a :class:`.QueuePool` or other non-asyncio pool class + is passed to :func:`_asyncio.create_async_engine`. This engine only + accepts asyncio-compatible pool classes including + :class:`.AsyncAdaptedQueuePool`. Other pool classes such as + :class:`.NullPool` are compatible with both synchronous and asynchronous + engines as they do not perform any locking. + + .. seealso:: + + :ref:`pool_api` + + + .. change:: + :tags: change, tests + + pytest support in the tox.ini file has been updated to support pytest 8.1. .. changelog:: :version: 2.0.27 diff --git a/doc/build/changelog/unreleased_20/10974.rst b/doc/build/changelog/unreleased_20/10974.rst deleted file mode 100644 index a5da62475ea..00000000000 --- a/doc/build/changelog/unreleased_20/10974.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. change:: - :tags: engine, usecase - :tickets: 10974 - - Added new core execution option - :paramref:`_engine.Connection.execution_options.preserve_rowcount`. When - set, the ``cursor.rowcount`` attribute from the DBAPI cursor will be - unconditionally memoized at statement execution time, so that whatever - value the DBAPI offers for any kind of statement will be available using - the :attr:`_engine.CursorResult.rowcount` attribute from the - :class:`_engine.CursorResult`. This allows the rowcount to be accessed for - statments such as INSERT and SELECT, to the degree supported by the DBAPI - in use. The :ref:`engine_insertmanyvalues` also supports this option and - will ensure :attr:`_engine.CursorResult.rowcount` is correctly set for a - bulk INSERT of rows when set. diff --git a/doc/build/changelog/unreleased_20/11010.rst b/doc/build/changelog/unreleased_20/11010.rst deleted file mode 100644 index bd24772dd6c..00000000000 --- a/doc/build/changelog/unreleased_20/11010.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm, regression - :tickets: 11010 - - Fixed regression caused by :ticket:`9779` where using the "secondary" table - in a relationship ``and_()`` expression would fail to be aliased to match - how the "secondary" table normally renders within a - :meth:`_sql.Select.join` expression, leading to an invalid query. diff --git a/doc/build/changelog/unreleased_20/11085.rst b/doc/build/changelog/unreleased_20/11085.rst deleted file mode 100644 index 74f877dac7a..00000000000 --- a/doc/build/changelog/unreleased_20/11085.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, orm, performance, regression - :tickets: 11085 - - Adjusted the fix made in :ticket:`10570`, released in 2.0.23, where new - logic was added to reconcile possibly changing bound parameter values - across cache key generations used within the :func:`_orm.with_expression` - construct. The new logic changes the approach by which the new bound - parameter values are associated with the statement, avoiding the need to - deep-copy the statement which can result in a significant performance - penalty for very deep / complex SQL constructs. The new approach no longer - requires this deep-copy step. diff --git a/doc/build/changelog/unreleased_20/8771.rst b/doc/build/changelog/unreleased_20/8771.rst deleted file mode 100644 index 9f501fcb8d9..00000000000 --- a/doc/build/changelog/unreleased_20/8771.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. change:: - :tags: bug, asyncio - :tickets: 8771 - - An error is raised if a :class:`.QueuePool` or other non-asyncio pool class - is passed to :func:`_asyncio.create_async_engine`. This engine only - accepts asyncio-compatible pool classes including - :class:`.AsyncAdaptedQueuePool`. Other pool classes such as - :class:`.NullPool` are compatible with both synchronous and asynchronous - engines as they do not perform any locking. - - .. seealso:: - - :ref:`pool_api` - diff --git a/doc/build/changelog/unreleased_20/pytest81.rst b/doc/build/changelog/unreleased_20/pytest81.rst deleted file mode 100644 index 01b840ee8c6..00000000000 --- a/doc/build/changelog/unreleased_20/pytest81.rst +++ /dev/null @@ -1,4 +0,0 @@ -.. change:: - :tags: change, tests - - pytest support in the tox.ini file has been updated to support pytest 8.1. diff --git a/doc/build/conf.py b/doc/build/conf.py index 599a76c69fa..d91e3a31e38 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.27" +release = "2.0.28" -release_date = "February 13, 2024" +release_date = "March 4, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From a4e85e728ed92d14157d1a47eea0242b1bf08e01 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 4 Mar 2024 08:41:51 -0500 Subject: [PATCH 128/544] Version 2.0.29 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 8a5b51d8282..30b1f9579fe 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.29 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.28 :released: March 4, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 2cdd96e234c..9e983d07fca 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.28" +__version__ = "2.0.29" def __go(lcls: Any) -> None: From ef9520e432183fd25ea0ba9c9c2a8d8b9d36f8f6 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 4 Mar 2024 09:12:34 -0500 Subject: [PATCH 129/544] accommodate False conditions for unique / index merge Fixed issue in ORM annotated declarative where using :func:`_orm.mapped_column()` with an :paramref:`_orm.mapped_column.index` or :paramref:`_orm.mapped_column.unique` setting of False would be overridden by an incoming ``Annotated`` element that featured that parameter set to ``True``, even though the immediate :func:`_orm.mapped_column()` element is more specific and should take precedence. The logic to reconcile the booleans has been enhanced to accommodate a local value of ``False`` as still taking precedence over an incoming ``True`` value from the annotated element. Fixes: #11091 Change-Id: I15cda4a0a07a289015c0a09bbe3ca2849956604e (cherry picked from commit e4c4bd03abae2d3948f894d38992d51c9be2a8c0) --- doc/build/changelog/unreleased_20/11091.rst | 13 +++++++++ lib/sqlalchemy/sql/schema.py | 10 +++---- .../test_tm_future_annotations_sync.py | 28 +++++++++++++++++++ test/orm/declarative/test_typed_mapping.py | 28 +++++++++++++++++++ test/sql/test_metadata.py | 22 +++++++++++++++ 5 files changed, 96 insertions(+), 5 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11091.rst diff --git a/doc/build/changelog/unreleased_20/11091.rst b/doc/build/changelog/unreleased_20/11091.rst new file mode 100644 index 00000000000..30f2fbcd355 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11091.rst @@ -0,0 +1,13 @@ +.. change:: + :tags: bug, orm + :tickets: 11091 + + Fixed issue in ORM annotated declarative where using + :func:`_orm.mapped_column()` with an :paramref:`_orm.mapped_column.index` + or :paramref:`_orm.mapped_column.unique` setting of False would be + overridden by an incoming ``Annotated`` element that featured that + parameter set to ``True``, even though the immediate + :func:`_orm.mapped_column()` element is more specific and should take + precedence. The logic to reconcile the booleans has been enhanced to + accommodate a local value of ``False`` as still taking precedence over an + incoming ``True`` value from the annotated element. diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 2932fffad47..6d5f941786a 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -2569,8 +2569,11 @@ def _merge(self, other: Column[Any]) -> None: new_onupdate = self.onupdate._copy() new_onupdate._set_parent(other) - if self.index and not other.index: - other.index = True + if self.index in (True, False) and other.index is None: + other.index = self.index + + if self.unique in (True, False) and other.unique is None: + other.unique = self.unique if self.doc and other.doc is None: other.doc = self.doc @@ -2578,9 +2581,6 @@ def _merge(self, other: Column[Any]) -> None: if self.comment and other.comment is None: other.comment = self.comment - if self.unique and not other.unique: - other.unique = True - for const in self.constraints: if not const._type_bound: new_const = const._copy() diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 9b55827d499..6b118139178 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -904,7 +904,9 @@ def test_we_got_all_attrs_test_annotated(self): ), ("index", True, lambda column: column.index is True), ("index", _NoArg.NO_ARG, lambda column: column.index is None), + ("index", False, lambda column: column.index is False), ("unique", True, lambda column: column.unique is True), + ("unique", False, lambda column: column.unique is False), ("autoincrement", True, lambda column: column.autoincrement is True), ("system", True, lambda column: column.system is True), ("primary_key", True, lambda column: column.primary_key is True), @@ -1062,6 +1064,32 @@ class User(Base): argument, ) + @testing.combinations(("index",), ("unique",), argnames="paramname") + @testing.combinations((True,), (False,), (None,), argnames="orig") + @testing.combinations((True,), (False,), (None,), argnames="merging") + def test_index_unique_combinations( + self, paramname, orig, merging, decl_base + ): + """test #11091""" + + global myint + + amc = mapped_column(**{paramname: merging}) + myint = Annotated[int, amc] + + mc = mapped_column(**{paramname: orig}) + + class User(decl_base): + __tablename__ = "user" + id: Mapped[int] = mapped_column(primary_key=True) + myname: Mapped[myint] = mc + + result = getattr(User.__table__.c.myname, paramname) + if orig is None: + is_(result, merging) + else: + is_(result, orig) + def test_pep484_newtypes_as_typemap_keys( self, decl_base: Type[DeclarativeBase] ): diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index ba8ab455ca1..0b9f4c1acbd 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -895,7 +895,9 @@ def test_we_got_all_attrs_test_annotated(self): ), ("index", True, lambda column: column.index is True), ("index", _NoArg.NO_ARG, lambda column: column.index is None), + ("index", False, lambda column: column.index is False), ("unique", True, lambda column: column.unique is True), + ("unique", False, lambda column: column.unique is False), ("autoincrement", True, lambda column: column.autoincrement is True), ("system", True, lambda column: column.system is True), ("primary_key", True, lambda column: column.primary_key is True), @@ -1053,6 +1055,32 @@ class User(Base): argument, ) + @testing.combinations(("index",), ("unique",), argnames="paramname") + @testing.combinations((True,), (False,), (None,), argnames="orig") + @testing.combinations((True,), (False,), (None,), argnames="merging") + def test_index_unique_combinations( + self, paramname, orig, merging, decl_base + ): + """test #11091""" + + # anno only: global myint + + amc = mapped_column(**{paramname: merging}) + myint = Annotated[int, amc] + + mc = mapped_column(**{paramname: orig}) + + class User(decl_base): + __tablename__ = "user" + id: Mapped[int] = mapped_column(primary_key=True) + myname: Mapped[myint] = mc + + result = getattr(User.__table__.c.myname, paramname) + if orig is None: + is_(result, merging) + else: + is_(result, orig) + def test_pep484_newtypes_as_typemap_keys( self, decl_base: Type[DeclarativeBase] ): diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 8b43b0f98ac..a54a5fcc8d5 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -4376,6 +4376,28 @@ def compile_(element, compiler, **kw): deregister(schema.CreateColumn) + @testing.combinations(("index",), ("unique",), argnames="paramname") + @testing.combinations((True,), (False,), (None,), argnames="orig") + @testing.combinations((True,), (False,), (None,), argnames="merging") + def test_merge_index_unique(self, paramname, orig, merging): + """test #11091""" + source = Column(**{paramname: merging}) + + target = Column(**{paramname: orig}) + + source._merge(target) + + target_copy = target._copy() + for col in ( + target, + target_copy, + ): + result = getattr(col, paramname) + if orig is None: + is_(result, merging) + else: + is_(result, orig) + @testing.combinations( ("default", lambda ctx: 10), ("default", func.foo()), From 80ae19cb680ea6afdeebc4b8685e7a9de80d91a6 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 9 Mar 2024 10:05:03 -0500 Subject: [PATCH 130/544] use a fixed date that's not near DST switchover CI has been failing here due to the DST switchover regarding live dates. Change-Id: I98b2dbe646180f41f948bec20193fdf3f63501b8 (cherry picked from commit 5b94dfad5ebc0ef9f929a7d9c7200ca577fea9b2) --- lib/sqlalchemy/orm/util.py | 2 +- test/dialect/postgresql/test_types.py | 30 +++++++++++++-------------- 2 files changed, 15 insertions(+), 17 deletions(-) diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 90508206ee6..d2e3f5302ae 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -247,7 +247,7 @@ def __new__( values.clear() values.discard("all") - self = super().__new__(cls, values) # type: ignore + self = super().__new__(cls, values) self.save_update = "save-update" in values self.delete = "delete" in values self.refresh_expire = "refresh-expire" in values diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index a5093c0bc90..08479b445f5 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -5432,31 +5432,29 @@ class _DateTimeTZMultiRangeTests: _tstzs_delta = None def tstzs(self): - utc_now = cast( - func.current_timestamp().op("AT TIME ZONE")("utc"), - DateTime(timezone=True), + # note this was hitting DST issues when these tests were using a + # live date and running on or near 2024-03-09 :). hardcoded to a + # date a few days earlier + utc_now = datetime.datetime( + 2024, 3, 2, 14, 57, 50, 473566, tzinfo=datetime.timezone.utc ) if self._tstzs is None: - with testing.db.connect() as connection: - lower = connection.scalar(select(utc_now)) - upper = lower + datetime.timedelta(1) - self._tstzs = (lower, upper) + lower = utc_now + upper = lower + datetime.timedelta(1) + self._tstzs = (lower, upper) return self._tstzs def tstzs_delta(self): - utc_now = cast( - func.current_timestamp().op("AT TIME ZONE")("utc"), - DateTime(timezone=True), + utc_now = datetime.datetime( + 2024, 3, 2, 14, 57, 50, 473566, tzinfo=datetime.timezone.utc ) if self._tstzs_delta is None: - with testing.db.connect() as connection: - lower = connection.scalar( - select(utc_now) - ) + datetime.timedelta(3) - upper = lower + datetime.timedelta(2) - self._tstzs_delta = (lower, upper) + lower = utc_now + datetime.timedelta(3) + upper = lower + datetime.timedelta(2) + self._tstzs_delta = (lower, upper) + return self._tstzs_delta def _data_str(self): From 37782d47f02a6d5fd98e27930a0a992c1043a804 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 9 Mar 2024 13:41:05 -0500 Subject: [PATCH 131/544] document caveat for #11054 Fixes: #11054 Change-Id: I1a5a9586d024d84dacf37742d710baf7b8f7570f (cherry picked from commit 10fb1328ba53f0dc64355b45abd9e4e321589fae) --- doc/build/core/engines.rst | 37 +++++++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst index 3397a65e83e..64c558a910a 100644 --- a/doc/build/core/engines.rst +++ b/doc/build/core/engines.rst @@ -583,16 +583,49 @@ The logger name of instance such as an :class:`~sqlalchemy.engine.Engine` or string. To set this to a specific name, use the :paramref:`_sa.create_engine.logging_name` and :paramref:`_sa.create_engine.pool_logging_name` with -:func:`sqlalchemy.create_engine`:: +:func:`sqlalchemy.create_engine`; the name will be appended to the logging name +``sqlalchemy.engine.Engine``:: + >>> import logging >>> from sqlalchemy import create_engine >>> from sqlalchemy import text - >>> e = create_engine("sqlite://", echo=True, logging_name="myengine") + >>> logging.basicConfig() + >>> logging.getLogger("sqlalchemy.engine.Engine.myengine").setLevel(logging.INFO) + >>> e = create_engine("sqlite://", logging_name="myengine") >>> with e.connect() as conn: ... conn.execute(text("select 'hi'")) 2020-10-24 12:47:04,291 INFO sqlalchemy.engine.Engine.myengine select 'hi' 2020-10-24 12:47:04,292 INFO sqlalchemy.engine.Engine.myengine () +.. tip:: + + The :paramref:`_sa.create_engine.logging_name` and + :paramref:`_sa.create_engine.pool_logging_name` parameters may also be used in + conjunction with :paramref:`_sa.create_engine.echo` and + :paramref:`_sa.create_engine.echo_pool`. However, an unavoidable double logging + condition will occur if other engines are created with echo flags set to True + and **no** logging name. This is because a handler will be added automatically + for ``sqlalchemy.engine.Engine`` which will log messages both for the name-less + engine as well as engines with logging names. For example:: + + from sqlalchemy import create_engine, text + + e1 = create_engine("sqlite://", echo=True, logging_name="myname") + with e1.begin() as conn: + conn.execute(text("SELECT 1")) + + e2 = create_engine("sqlite://", echo=True) + with e2.begin() as conn: + conn.execute(text("SELECT 2")) + + with e1.begin() as conn: + conn.execute(text("SELECT 3")) + + The above scenario will double log ``SELECT 3``. To resolve, ensure + all engines have a ``logging_name`` set, or use explicit logger / handler + setup without using :paramref:`_sa.create_engine.echo` and + :paramref:`_sa.create_engine.echo_pool`. + .. _dbengine_logging_tokens: Setting Per-Connection / Sub-Engine Tokens From 46587c16e132278a3c76ff23ca020a33f569a9d2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 9 Mar 2024 12:47:01 -0500 Subject: [PATCH 132/544] add extra pep695 conversion step Added support for the :pep:`695` ``TypeAliasType`` construct as well as the python 3.12 native ``type`` keyword to work with ORM Annotated Declarative form when using these constructs to link to a :pep:`593` ``Annotated`` container, allowing the resolution of the ``Annotated`` to proceed when these constructs are used in a :class:`_orm.Mapped` typing container. Fixes: #11130 Change-Id: I9a386943966de2107f15f08dfe6ed2aa84f7e86c (cherry picked from commit 985193c407ffb891c8eed042fac6f9547a34d694) --- doc/build/changelog/unreleased_20/11130.rst | 9 +++++ lib/sqlalchemy/orm/properties.py | 10 ++++-- .../test_tm_future_annotations_sync.py | 33 +++++++++++++++++++ test/orm/declarative/test_typed_mapping.py | 33 +++++++++++++++++++ 4 files changed, 83 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11130.rst diff --git a/doc/build/changelog/unreleased_20/11130.rst b/doc/build/changelog/unreleased_20/11130.rst new file mode 100644 index 00000000000..80fbe08dd2b --- /dev/null +++ b/doc/build/changelog/unreleased_20/11130.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: usecase, orm + :tickets: 11130 + + Added support for the :pep:`695` ``TypeAliasType`` construct as well as the + python 3.12 native ``type`` keyword to work with ORM Annotated Declarative + form when using these constructs to link to a :pep:`593` ``Annotated`` + container, allowing the resolution of the ``Annotated`` to proceed when + these constructs are used in a :class:`_orm.Mapped` typing container. diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 7a5eb8625b2..adee44a77e1 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -58,6 +58,7 @@ from ..util.typing import is_fwd_ref from ..util.typing import is_optional_union from ..util.typing import is_pep593 +from ..util.typing import is_pep695 from ..util.typing import is_union from ..util.typing import Self from ..util.typing import typing_get_args @@ -760,6 +761,11 @@ def _init_column_for_annotation( use_args_from = None + our_original_type = our_type + + if is_pep695(our_type): + our_type = our_type.__value__ + if is_pep593(our_type): our_type_is_pep593 = True @@ -852,9 +858,9 @@ def _init_column_for_annotation( new_sqltype = None if our_type_is_pep593: - checks = [our_type, raw_pep_593_type] + checks = [our_original_type, raw_pep_593_type] else: - checks = [our_type] + checks = [our_original_type] for check_type in checks: new_sqltype = registry._resolve_type(check_type) diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 6b118139178..25e77811339 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -115,6 +115,13 @@ class _SomeDict2(TypedDict): """ type _UnionPep695 = _SomeDict1 | _SomeDict2 type _StrPep695 = str + +type strtypalias_keyword = Annotated[str, mapped_column(info={"hi": "there"})] + +strtypalias_tat: typing.TypeAliasType = Annotated[ + str, mapped_column(info={"hi": "there"})] + +strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] """, globals(), ) @@ -833,6 +840,32 @@ class Test(decl_base): eq_(Test.__table__.c.data.type.length, 30) is_(Test.__table__.c.structure.type._type_affinity, JSON) + @testing.variation("alias_type", ["none", "typekeyword", "typealiastype"]) + @testing.requires.python312 + def test_extract_pep593_from_pep695( + self, decl_base: Type[DeclarativeBase], alias_type + ): + """test #11130""" + + class MyClass(decl_base): + __tablename__ = "my_table" + + id: Mapped[int] = mapped_column(primary_key=True) + + if alias_type.typekeyword: + data_one: Mapped[strtypalias_keyword] # noqa: F821 + elif alias_type.typealiastype: + data_one: Mapped[strtypalias_tat] # noqa: F821 + elif alias_type.none: + data_one: Mapped[strtypalias_plain] # noqa: F821 + else: + alias_type.fail() + + table = MyClass.__table__ + assert table is not None + + eq_(MyClass.data_one.expression.info, {"hi": "there"}) + @testing.requires.python310 def test_we_got_all_attrs_test_annotated(self): argnames = _py_inspect.getfullargspec(mapped_column) diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 0b9f4c1acbd..4afa33c7316 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -106,6 +106,13 @@ class _SomeDict2(TypedDict): """ type _UnionPep695 = _SomeDict1 | _SomeDict2 type _StrPep695 = str + +type strtypalias_keyword = Annotated[str, mapped_column(info={"hi": "there"})] + +strtypalias_tat: typing.TypeAliasType = Annotated[ + str, mapped_column(info={"hi": "there"})] + +strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] """, globals(), ) @@ -824,6 +831,32 @@ class Test(decl_base): eq_(Test.__table__.c.data.type.length, 30) is_(Test.__table__.c.structure.type._type_affinity, JSON) + @testing.variation("alias_type", ["none", "typekeyword", "typealiastype"]) + @testing.requires.python312 + def test_extract_pep593_from_pep695( + self, decl_base: Type[DeclarativeBase], alias_type + ): + """test #11130""" + + class MyClass(decl_base): + __tablename__ = "my_table" + + id: Mapped[int] = mapped_column(primary_key=True) + + if alias_type.typekeyword: + data_one: Mapped[strtypalias_keyword] # noqa: F821 + elif alias_type.typealiastype: + data_one: Mapped[strtypalias_tat] # noqa: F821 + elif alias_type.none: + data_one: Mapped[strtypalias_plain] # noqa: F821 + else: + alias_type.fail() + + table = MyClass.__table__ + assert table is not None + + eq_(MyClass.data_one.expression.info, {"hi": "there"}) + @testing.requires.python310 def test_we_got_all_attrs_test_annotated(self): argnames = _py_inspect.getfullargspec(mapped_column) From 07b8c4ba5655c5b95bd839732a291707654bf113 Mon Sep 17 00:00:00 2001 From: "Francisco R. Del Roio" Date: Sun, 25 Feb 2024 14:37:27 -0500 Subject: [PATCH 133/544] Fixed typing issues with sync code runners Fixed typing issue allowing asyncio ``run_sync()`` methods to correctly type the parameters according to the callable that was passed, making use of :pep:`612` ``ParamSpec`` variables. Pull request courtesy Francisco R. Del Roio. Closes: #11055 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11055 Pull-request-sha: 712b4382b16e4c07c09ac40a570c4bfb76c28161 Change-Id: I94ec8bbb0688d6c6e1610f8f769abab550179c14 (cherry picked from commit b687624f63b8613f3c487866292fa88f763c79ee) --- doc/build/changelog/unreleased_20/11055.rst | 8 ++++++++ lib/sqlalchemy/ext/asyncio/engine.py | 12 ++++++++++-- lib/sqlalchemy/ext/asyncio/session.py | 13 +++++++++++-- .../plain_files/ext/asyncio/async_sessionmaker.py | 7 +++++++ test/typing/plain_files/ext/asyncio/engines.py | 12 ++++++++++++ 5 files changed, 48 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11055.rst diff --git a/doc/build/changelog/unreleased_20/11055.rst b/doc/build/changelog/unreleased_20/11055.rst new file mode 100644 index 00000000000..8784d7aec11 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11055.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, typing + :tickets: 11055 + + Fixed typing issue allowing asyncio ``run_sync()`` methods to correctly + type the parameters according to the callable that was passed, making use + of :pep:`612` ``ParamSpec`` variables. Pull request courtesy Francisco R. + Del Roio. diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index dc6f89d6b59..5d7d7e6b425 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -41,6 +41,8 @@ from ...engine.base import Transaction from ...exc import ArgumentError from ...util.concurrency import greenlet_spawn +from ...util.typing import Concatenate +from ...util.typing import ParamSpec if TYPE_CHECKING: from ...engine.cursor import CursorResult @@ -61,6 +63,7 @@ from ...sql.base import Executable from ...sql.selectable import TypedReturnsRows +_P = ParamSpec("_P") _T = TypeVar("_T", bound=Any) @@ -813,7 +816,10 @@ async def stream_scalars( yield result.scalars() async def run_sync( - self, fn: Callable[..., _T], *arg: Any, **kw: Any + self, + fn: Callable[Concatenate[Connection, _P], _T], + *arg: _P.args, + **kw: _P.kwargs, ) -> _T: """Invoke the given synchronous (i.e. not async) callable, passing a synchronous-style :class:`_engine.Connection` as the first @@ -877,7 +883,9 @@ async def do_something_async(async_engine: AsyncEngine) -> None: """ # noqa: E501 - return await greenlet_spawn(fn, self._proxied, *arg, **kw) + return await greenlet_spawn( + fn, self._proxied, *arg, _require_await=False, **kw + ) def __await__(self) -> Generator[Any, None, AsyncConnection]: return self.start().__await__() diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index a9ea55e4966..c5fe469a0d4 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -38,6 +38,9 @@ from ...orm import SessionTransaction from ...orm import state as _instance_state from ...util.concurrency import greenlet_spawn +from ...util.typing import Concatenate +from ...util.typing import ParamSpec + if TYPE_CHECKING: from .engine import AsyncConnection @@ -71,6 +74,7 @@ _AsyncSessionBind = Union["AsyncEngine", "AsyncConnection"] +_P = ParamSpec("_P") _T = TypeVar("_T", bound=Any) @@ -332,7 +336,10 @@ async def refresh( ) async def run_sync( - self, fn: Callable[..., _T], *arg: Any, **kw: Any + self, + fn: Callable[Concatenate[Session, _P], _T], + *arg: _P.args, + **kw: _P.kwargs, ) -> _T: """Invoke the given synchronous (i.e. not async) callable, passing a synchronous-style :class:`_orm.Session` as the first @@ -386,7 +393,9 @@ async def do_something_async(async_engine: AsyncEngine) -> None: :ref:`session_run_sync` """ # noqa: E501 - return await greenlet_spawn(fn, self.sync_session, *arg, **kw) + return await greenlet_spawn( + fn, self.sync_session, *arg, _require_await=False, **kw + ) @overload async def execute( diff --git a/test/typing/plain_files/ext/asyncio/async_sessionmaker.py b/test/typing/plain_files/ext/asyncio/async_sessionmaker.py index d9997141a10..b081aa1b130 100644 --- a/test/typing/plain_files/ext/asyncio/async_sessionmaker.py +++ b/test/typing/plain_files/ext/asyncio/async_sessionmaker.py @@ -52,6 +52,10 @@ def work_with_a_session_two(sess: Session, param: Optional[str] = None) -> Any: pass +def work_with_wrong_parameter(session: Session, foo: int) -> Any: + pass + + async def async_main() -> None: """Main program function.""" @@ -71,6 +75,9 @@ async def async_main() -> None: await session.run_sync(work_with_a_session_one) await session.run_sync(work_with_a_session_two, param="foo") + # EXPECTED_MYPY: Missing positional argument "foo" in call to "run_sync" of "AsyncSession" + await session.run_sync(work_with_wrong_parameter) + session.add_all( [ A(bs=[B(), B()], data="a1"), diff --git a/test/typing/plain_files/ext/asyncio/engines.py b/test/typing/plain_files/ext/asyncio/engines.py index 598d319a776..01475dc71e5 100644 --- a/test/typing/plain_files/ext/asyncio/engines.py +++ b/test/typing/plain_files/ext/asyncio/engines.py @@ -1,7 +1,14 @@ +from typing import Any + +from sqlalchemy import Connection from sqlalchemy import text from sqlalchemy.ext.asyncio import create_async_engine +def work_sync(conn: Connection, foo: int) -> Any: + pass + + async def asyncio() -> None: e = create_async_engine("sqlite://") @@ -53,3 +60,8 @@ async def asyncio() -> None: # EXPECTED_TYPE: CursorResult[Any] reveal_type(result) + + await conn.run_sync(work_sync, 1) + + # EXPECTED_MYPY: Missing positional argument "foo" in call to "run_sync" of "AsyncConnection" + await conn.run_sync(work_sync) From ed3fa95e1671f3d920068f6bd36d23b98f9533bd Mon Sep 17 00:00:00 2001 From: Eugene Toder Date: Mon, 11 Mar 2024 07:42:47 -0400 Subject: [PATCH 134/544] Allow using AsyncEngine in compile This works, so only need to update the type annotation. This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed Closes: #11103 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11103 Pull-request-sha: ba9e61a3902d5666a5176aedd50afe8ae7762bff Change-Id: I3d08b930a8cae0539bf9b436d5e806d8912cdee0 (cherry picked from commit d2a743d0bcd88129f571f2256cd18f1b02036fd2) --- lib/sqlalchemy/engine/base.py | 1 + lib/sqlalchemy/ext/asyncio/engine.py | 4 ++-- lib/sqlalchemy/sql/_typing.py | 10 ++++++++ lib/sqlalchemy/sql/elements.py | 6 ++--- test/ext/asyncio/test_engine_py3k.py | 24 ++++++++++++------- test/typing/plain_files/engine/engines.py | 5 ++++ .../typing/plain_files/ext/asyncio/engines.py | 6 +++++ 7 files changed, 43 insertions(+), 13 deletions(-) diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 3c11d14d5b7..403ec452b9a 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -109,6 +109,7 @@ class Connection(ConnectionEventsTarget, inspection.Inspectable["Inspector"]): """ + dialect: Dialect dispatch: dispatcher[ConnectionEventsTarget] _sqla_logger_namespace = "sqlalchemy.engine.Connection" diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 5d7d7e6b425..8fc8e96db06 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -930,7 +930,7 @@ def invalidated(self) -> Any: return self._proxied.invalidated @property - def dialect(self) -> Any: + def dialect(self) -> Dialect: r"""Proxy for the :attr:`_engine.Connection.dialect` attribute on behalf of the :class:`_asyncio.AsyncConnection` class. @@ -939,7 +939,7 @@ def dialect(self) -> Any: return self._proxied.dialect @dialect.setter - def dialect(self, attr: Any) -> None: + def dialect(self, attr: Dialect) -> None: self._proxied.dialect = attr @property diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index ea9cbe1f482..ba5faffd4d6 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -69,6 +69,7 @@ from .sqltypes import TableValueType from .sqltypes import TupleType from .type_api import TypeEngine + from ..engine import Dialect from ..util.typing import TypeGuard _T = TypeVar("_T", bound=Any) @@ -92,6 +93,15 @@ class _CoreAdapterProto(Protocol): def __call__(self, obj: _CE) -> _CE: ... +class _HasDialect(Protocol): + """protocol for Engine/Connection-like objects that have dialect + attribute. + """ + + @property + def dialect(self) -> Dialect: ... + + # match column types that are not ORM entities _NOT_ENTITY = TypeVar( "_NOT_ENTITY", diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index e5a9fb0624c..9f0ed10a4c9 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -83,6 +83,7 @@ from ._typing import _ByArgument from ._typing import _ColumnExpressionArgument from ._typing import _ColumnExpressionOrStrLabelArgument + from ._typing import _HasDialect from ._typing import _InfoType from ._typing import _PropagateAttrsType from ._typing import _TypeEngineArgument @@ -107,7 +108,6 @@ from .visitors import anon_map from ..engine import Connection from ..engine import Dialect - from ..engine import Engine from ..engine.interfaces import _CoreMultiExecuteParams from ..engine.interfaces import CacheStats from ..engine.interfaces import CompiledCacheType @@ -244,7 +244,7 @@ class CompilerElement(Visitable): @util.preload_module("sqlalchemy.engine.url") def compile( self, - bind: Optional[Union[Engine, Connection]] = None, + bind: Optional[_HasDialect] = None, dialect: Optional[Dialect] = None, **kw: Any, ) -> Compiled: @@ -776,7 +776,7 @@ def _compiler(self, dialect: Dialect, **kw: Any) -> SQLCompiler: def compile( # noqa: A001 self, - bind: Optional[Union[Engine, Connection]] = None, + bind: Optional[_HasDialect] = None, dialect: Optional[Dialect] = None, **kw: Any, ) -> SQLCompiler: ... diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index c12363f4d0b..9fb12e6936f 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -403,6 +403,13 @@ async def go(): eq_(m.mock_calls, []) + @async_test + async def test_statement_compile(self, async_engine): + stmt = _select1(async_engine) + eq_(str(select(1).compile(async_engine)), stmt) + async with async_engine.connect() as conn: + eq_(str(select(1).compile(conn)), stmt) + def test_clear_compiled_cache(self, async_engine): async_engine.sync_engine._compiled_cache["foo"] = "bar" eq_(async_engine.sync_engine._compiled_cache["foo"], "bar") @@ -954,19 +961,13 @@ async def test_no_async_listeners_pool_event(self, async_engine): ): event.listen(async_engine, "checkout", mock.Mock()) - def select1(self, engine): - if engine.dialect.name == "oracle": - return "select 1 from dual" - else: - return "select 1" - @async_test async def test_sync_before_cursor_execute_engine(self, async_engine): canary = mock.Mock() event.listen(async_engine.sync_engine, "before_cursor_execute", canary) - s1 = self.select1(async_engine) + s1 = _select1(async_engine) async with async_engine.connect() as conn: sync_conn = conn.sync_connection await conn.execute(text(s1)) @@ -980,7 +981,7 @@ async def test_sync_before_cursor_execute_engine(self, async_engine): async def test_sync_before_cursor_execute_connection(self, async_engine): canary = mock.Mock() - s1 = self.select1(async_engine) + s1 = _select1(async_engine) async with async_engine.connect() as conn: sync_conn = conn.sync_connection @@ -1522,3 +1523,10 @@ async def thing(engine): tasks = [thing(engine) for _ in range(10)] await asyncio.gather(*tasks) + + +def _select1(engine): + if engine.dialect.name == "oracle": + return "SELECT 1 FROM DUAL" + else: + return "SELECT 1" diff --git a/test/typing/plain_files/engine/engines.py b/test/typing/plain_files/engine/engines.py index 5777b914841..a204fb9182f 100644 --- a/test/typing/plain_files/engine/engines.py +++ b/test/typing/plain_files/engine/engines.py @@ -1,5 +1,6 @@ from sqlalchemy import create_engine from sqlalchemy import Pool +from sqlalchemy import select from sqlalchemy import text @@ -30,5 +31,9 @@ def regular() -> None: engine = create_engine("postgresql://scott:tiger@localhost/test") status: str = engine.pool.status() other_pool: Pool = engine.pool.recreate() + ce = select(1).compile(e) + ce.statement + cc = select(1).compile(conn) + cc.statement print(status, other_pool) diff --git a/test/typing/plain_files/ext/asyncio/engines.py b/test/typing/plain_files/ext/asyncio/engines.py index 01475dc71e5..1f7843082a9 100644 --- a/test/typing/plain_files/ext/asyncio/engines.py +++ b/test/typing/plain_files/ext/asyncio/engines.py @@ -1,6 +1,7 @@ from typing import Any from sqlalchemy import Connection +from sqlalchemy import select from sqlalchemy import text from sqlalchemy.ext.asyncio import create_async_engine @@ -65,3 +66,8 @@ async def asyncio() -> None: # EXPECTED_MYPY: Missing positional argument "foo" in call to "run_sync" of "AsyncConnection" await conn.run_sync(work_sync) + + ce = select(1).compile(e) + ce.statement + cc = select(1).compile(conn) + cc.statement From 69e59ab06df24ad2c8bec26da4a2fc2c6af33d74 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 11 Mar 2024 21:58:46 +0100 Subject: [PATCH 135/544] fix usage of kwargs to execute in docs Change-Id: I033cba49ba6c12113643b88e48c5917f2b70a307 (cherry picked from commit af4df5e2a2405cfef3aa26bbb8f48e24d954a370) --- doc/build/core/custom_types.rst | 5 +++- doc/build/errors.rst | 2 +- lib/sqlalchemy/dialects/postgresql/json.py | 2 +- lib/sqlalchemy/sql/_elements_constructors.py | 30 +++++++++++--------- lib/sqlalchemy/sql/elements.py | 5 ++-- 5 files changed, 26 insertions(+), 18 deletions(-) diff --git a/doc/build/core/custom_types.rst b/doc/build/core/custom_types.rst index b9d8953b4e8..f9c02052499 100644 --- a/doc/build/core/custom_types.rst +++ b/doc/build/core/custom_types.rst @@ -527,7 +527,10 @@ transparently:: with engine.begin() as conn: metadata_obj.create_all(conn) - conn.execute(message.insert(), username="some user", message="this is my message") + conn.execute( + message.insert(), + {"username": "some user", "message": "this is my message"}, + ) print( conn.scalar(select(message.c.message).where(message.c.username == "some user")) diff --git a/doc/build/errors.rst b/doc/build/errors.rst index d6645123154..4c12e0fb179 100644 --- a/doc/build/errors.rst +++ b/doc/build/errors.rst @@ -572,7 +572,7 @@ is executed:: Above, no value has been provided for the parameter "my_param". The correct approach is to provide a value:: - result = conn.execute(stmt, my_param=12) + result = conn.execute(stmt, {"my_param": 12}) When the message takes the form "a value is required for bind parameter in parameter group ", the message is referring to the "executemany" style diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index dff12e7f498..3790fa359b1 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -155,7 +155,7 @@ def __init__(self, none_as_null=False, astext_type=None): be used to persist a NULL value:: from sqlalchemy import null - conn.execute(table.insert(), data=null()) + conn.execute(table.insert(), {"data": null()}) .. seealso:: diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index 27bac59e126..77cc2a8021d 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -493,8 +493,9 @@ def bindparam( from sqlalchemy import bindparam - stmt = select(users_table).\ - where(users_table.c.name == bindparam('username')) + stmt = select(users_table).where( + users_table.c.name == bindparam("username") + ) The above statement, when rendered, will produce SQL similar to:: @@ -504,22 +505,25 @@ def bindparam( would typically be applied at execution time to a method like :meth:`_engine.Connection.execute`:: - result = connection.execute(stmt, username='wendy') + result = connection.execute(stmt, {"username": "wendy"}) Explicit use of :func:`.bindparam` is also common when producing UPDATE or DELETE statements that are to be invoked multiple times, where the WHERE criterion of the statement is to change on each invocation, such as:: - stmt = (users_table.update(). - where(user_table.c.name == bindparam('username')). - values(fullname=bindparam('fullname')) - ) + stmt = ( + users_table.update() + .where(user_table.c.name == bindparam("username")) + .values(fullname=bindparam("fullname")) + ) connection.execute( - stmt, [{"username": "wendy", "fullname": "Wendy Smith"}, - {"username": "jack", "fullname": "Jack Jones"}, - ] + stmt, + [ + {"username": "wendy", "fullname": "Wendy Smith"}, + {"username": "jack", "fullname": "Jack Jones"}, + ], ) SQLAlchemy's Core expression system makes wide use of @@ -568,7 +572,7 @@ def bindparam( bound placeholders based on the arguments passed, as in:: stmt = users_table.insert() - result = connection.execute(stmt, name='Wendy') + result = connection.execute(stmt, {"name": "Wendy"}) The above will produce SQL output as:: @@ -1589,7 +1593,7 @@ def text(text: str) -> TextClause: E.g.:: t = text("SELECT * FROM users WHERE id=:user_id") - result = connection.execute(t, user_id=12) + result = connection.execute(t, {"user_id": 12}) For SQL statements where a colon is required verbatim, as within an inline string, use a backslash to escape:: @@ -1619,7 +1623,7 @@ def text(text: str) -> TextClause: such as for the WHERE clause of a SELECT statement:: s = select(users.c.id, users.c.name).where(text("id=:user_id")) - result = connection.execute(s, user_id=12) + result = connection.execute(s, {"user_id": 12}) :func:`_expression.text` is also used for the construction of a full, standalone statement using plain text. diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index e5a9fb0624c..fe9f09e6b7a 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -1911,8 +1911,9 @@ class BindParameter(roles.InElementRole, KeyedColumnElement[_T]): from sqlalchemy import bindparam - stmt = select(users_table).\ - where(users_table.c.name == bindparam('username')) + stmt = select(users_table).where( + users_table.c.name == bindparam("username") + ) Detailed discussion of how :class:`.BindParameter` is used is at :func:`.bindparam`. From 746c3a34fb9995b78a651e0b288903f18423ff96 Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Mon, 11 Mar 2024 21:59:51 +0100 Subject: [PATCH 136/544] fix imports in "Self-Referential Many-to-Many Relationship" code example (#11120) Some were unused, some were missing. (cherry picked from commit 461ababb789dd024f4e9d0c5f06e405eb911026e) --- doc/build/orm/join_conditions.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/build/orm/join_conditions.rst b/doc/build/orm/join_conditions.rst index 5846b5d206f..1a26d94a8b7 100644 --- a/doc/build/orm/join_conditions.rst +++ b/doc/build/orm/join_conditions.rst @@ -543,9 +543,9 @@ is when establishing a many-to-many relationship from a class to itself, as show from typing import List - from sqlalchemy import Integer, ForeignKey, String, Column, Table - from sqlalchemy.orm import DeclarativeBase - from sqlalchemy.orm import relationship + from sqlalchemy import Integer, ForeignKey, Column, Table + from sqlalchemy.orm import DeclarativeBase, Mapped + from sqlalchemy.orm import mapped_column, relationship class Base(DeclarativeBase): From 9d4152911e1e0d95f0753df3d501bbffc8b41040 Mon Sep 17 00:00:00 2001 From: acceptacross <150119116+acceptacross@users.noreply.github.com> Date: Tue, 12 Mar 2024 05:01:42 +0800 Subject: [PATCH 137/544] chore: remove repetitive words (#11134) Signed-off-by: acceptacross (cherry picked from commit 64b661d7058818ad6852b208a877804eba294d91) --- doc/build/changelog/changelog_14.rst | 6 +++--- doc/build/changelog/changelog_20.rst | 2 +- doc/build/orm/declarative_mixins.rst | 2 +- doc/build/tutorial/data_select.rst | 2 +- test/dialect/test_sqlite.py | 2 +- test/orm/test_selectin_relations.py | 4 ++-- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index 55e671e18b8..1d6a3f775ae 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -976,7 +976,7 @@ This document details individual issue-level changes made throughout Fixed regression where using ORM update() with synchronize_session='fetch' would fail due to the use of evaluators that are now used to determine the - in-Python value for expressions in the the SET clause when refreshing + in-Python value for expressions in the SET clause when refreshing objects; if the evaluators make use of math operators against non-numeric values such as PostgreSQL JSONB, the non-evaluable condition would fail to be detected correctly. The evaluator now limits the use of math mutation @@ -2810,7 +2810,7 @@ This document details individual issue-level changes made throughout :class:`_result.Result` class and implemented it for the filtered result implementations that are used by the ORM, so that it is possible to call the :meth:`_engine.CursorResult.close` method on the underlying - :class:`_engine.CursorResult` when the the ``yield_per`` execution option + :class:`_engine.CursorResult` when the ``yield_per`` execution option is in use to close a server side cursor before remaining ORM results have been fetched. This was again already available for Core result sets but the change makes it available for 2.0 style ORM results as well. @@ -9141,7 +9141,7 @@ This document details individual issue-level changes made throughout cascade operation actually takes place. The new behavior can be established as always by setting the flag to ``False`` on a specific :func:`_orm.relationship`, or more generally can be set up across the board - by setting the the :paramref:`_orm.Session.future` flag to True. + by setting the :paramref:`_orm.Session.future` flag to True. .. seealso:: diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 30b1f9579fe..8e3ee935b98 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -3612,7 +3612,7 @@ Added an error message when a :func:`_orm.relationship` is mapped against an abstract container type, such as ``Mapped[Sequence[B]]``, without providing the :paramref:`_orm.relationship.container_class` parameter which - is necessary when the type is abstract. Previously the the abstract + is necessary when the type is abstract. Previously the abstract container would attempt to be instantiated at a later step and fail. diff --git a/doc/build/orm/declarative_mixins.rst b/doc/build/orm/declarative_mixins.rst index 0ee8a952bb8..9f26207c07a 100644 --- a/doc/build/orm/declarative_mixins.rst +++ b/doc/build/orm/declarative_mixins.rst @@ -152,7 +152,7 @@ Augmenting the Base In addition to using a pure mixin, most of the techniques in this section can also be applied to the base class directly, for patterns that should apply to all classes derived from a particular base. The example -below illustrates some of the the previous section's example in terms of the +below illustrates some of the previous section's example in terms of the ``Base`` class:: from sqlalchemy import ForeignKey diff --git a/doc/build/tutorial/data_select.rst b/doc/build/tutorial/data_select.rst index 42b484de8e4..aa77539b97b 100644 --- a/doc/build/tutorial/data_select.rst +++ b/doc/build/tutorial/data_select.rst @@ -447,7 +447,7 @@ explicitly:: FROM user_account JOIN address ON user_account.id = address.user_id -The other is the the :meth:`_sql.Select.join` method, which indicates only the +The other is the :meth:`_sql.Select.join` method, which indicates only the right side of the JOIN, the left hand-side is inferred:: >>> print(select(user_table.c.name, address_table.c.email_address).join(address_table)) diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 245b762cf37..07612480f2a 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -3462,7 +3462,7 @@ def test_on_conflict_do_update_no_row_actually_affected(self, connection): ) # The last inserted primary key should be 2 here - # it is taking the result from the the exotic fixture + # it is taking the result from the exotic fixture eq_(result.inserted_primary_key, (2,)) eq_( diff --git a/test/orm/test_selectin_relations.py b/test/orm/test_selectin_relations.py index 93b3d8710ce..d46362abdc8 100644 --- a/test/orm/test_selectin_relations.py +++ b/test/orm/test_selectin_relations.py @@ -3340,7 +3340,7 @@ def test_use_join_parent_criteria_degrade_on_defer(self): "FROM a WHERE a.id IN (__[POSTCOMPILE_id_1]) ORDER BY a.id", [{"id_1": [1, 3]}], ), - # in the very unlikely case that the the FK col on parent is + # in the very unlikely case that the FK col on parent is # deferred, we degrade to the JOIN version so that we don't need to # emit either for each parent object individually, or as a second # query for them. @@ -3431,7 +3431,7 @@ def test_use_join_parent_degrade_on_defer(self): CompiledSQL( "SELECT a.id AS a_id, a.q AS a_q FROM a ORDER BY a.id", [{}] ), - # in the very unlikely case that the the FK col on parent is + # in the very unlikely case that the FK col on parent is # deferred, we degrade to the JOIN version so that we don't need to # emit either for each parent object individually, or as a second # query for them. From f745e5cfebd65c2fcdb2d8b021371618c7a6bbc2 Mon Sep 17 00:00:00 2001 From: Jens Troeger Date: Mon, 11 Mar 2024 17:11:45 -0400 Subject: [PATCH 138/544] add a docs cross-reference between adding columns & relationships to existing table mappings For context see discussion https://github.com/sqlalchemy/sqlalchemy/discussions/11124. This change adds the requested cross-reference to the documentation. This pull request is: - [X] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11133 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11133 Pull-request-sha: f8cc7a9c510f058f75bcb4308f3f398b8ae70de8 Change-Id: Ic683354fa05560d869d47ceda820d88e758e2973 (cherry picked from commit ba9c86f2075bd4eb1d71caca58b6da6fe19e35ac) --- doc/build/orm/basic_relationships.rst | 4 +++- doc/build/orm/declarative_tables.rst | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/doc/build/orm/basic_relationships.rst b/doc/build/orm/basic_relationships.rst index 0860f69fcf5..a1bdb0525c3 100644 --- a/doc/build/orm/basic_relationships.rst +++ b/doc/build/orm/basic_relationships.rst @@ -1102,8 +1102,10 @@ that will be passed to ``eval()`` are: are **evaluated as Python code expressions using eval(). DO NOT PASS UNTRUSTED INPUT TO THESE ARGUMENTS.** +.. _orm_declarative_table_adding_relationship: + Adding Relationships to Mapped Classes After Declaration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It should also be noted that in a similar way as described at :ref:`orm_declarative_table_adding_columns`, any :class:`_orm.MapperProperty` diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst index 4a1cbd0da3d..d2ed13841f2 100644 --- a/doc/build/orm/declarative_tables.rst +++ b/doc/build/orm/declarative_tables.rst @@ -1158,6 +1158,10 @@ additional columns are present on mapped subclasses that have no :class:`.Table` of their own. This is illustrated in the section :ref:`single_inheritance`. +.. seealso:: + + :ref:`orm_declarative_table_adding_relationship` - similar examples for :func:`_orm.relationship` + .. note:: Assignment of mapped properties to an already mapped class will only function correctly if the "declarative base" class is used, meaning From 0dda05a5e6b266858a5a56ce8d87b371ff5d47c3 Mon Sep 17 00:00:00 2001 From: Daniel Robert Date: Mon, 11 Mar 2024 14:34:20 -0700 Subject: [PATCH 139/544] Fixes: #11083 (#11095) (cherry picked from commit 3551c7b66ab0318deef419fbe61fe038b6e2825c) Change-Id: I959dff7f4ee6218267627e878283e3c48b88b314 --- lib/sqlalchemy/engine/util.py | 3 ++- test/typing/plain_files/orm/session.py | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py index 3d95ac58625..186ca4c3201 100644 --- a/lib/sqlalchemy/engine/util.py +++ b/lib/sqlalchemy/engine/util.py @@ -17,6 +17,7 @@ from .. import util from ..util._has_cy import HAS_CYEXTENSION from ..util.typing import Protocol +from ..util.typing import Self if typing.TYPE_CHECKING or not HAS_CYEXTENSION: from ._py_util import _distill_params_20 as _distill_params_20 @@ -113,7 +114,7 @@ def _trans_ctx_check(cls, subject: _TConsSubject) -> None: "before emitting further commands." ) - def __enter__(self) -> TransactionalContext: + def __enter__(self) -> Self: subject = self._get_subject() # none for outer transaction, may be non-None for nested diff --git a/test/typing/plain_files/orm/session.py b/test/typing/plain_files/orm/session.py index 0f1c35eafa1..43fb17a7542 100644 --- a/test/typing/plain_files/orm/session.py +++ b/test/typing/plain_files/orm/session.py @@ -97,6 +97,12 @@ class Address(Base): User.id ).offset(User.id) + # test #11083 + + with sess.begin() as tx: + # EXPECTED_TYPE: SessionTransaction + reveal_type(tx) + # more result tests in typed_results.py From 01c537dc172dd15055dcf72c7e9077cfb6a976c1 Mon Sep 17 00:00:00 2001 From: Andreas Deininger Date: Mon, 11 Mar 2024 22:40:14 +0100 Subject: [PATCH 140/544] Fixing typos (#11105) (cherry picked from commit 1f6a129d80453e274d1e14631f065b1afaa37b96) Change-Id: I5d6197cae2470e3df8d09b3ca8ea186a91e4811a --- doc/build/changelog/changelog_20.rst | 2 +- doc/build/changelog/whatsnew_20.rst | 2 +- doc/build/core/custom_types.rst | 2 +- doc/build/core/operators.rst | 2 +- doc/build/index.rst | 2 +- doc/build/orm/collection_api.rst | 2 +- doc/build/orm/composites.rst | 2 +- doc/build/orm/dataclasses.rst | 2 +- doc/build/orm/declarative_tables.rst | 4 ++-- doc/build/orm/persistence_techniques.rst | 4 ++-- doc/build/orm/queryguide/columns.rst | 2 +- doc/build/orm/queryguide/dml.rst | 6 +++--- doc/build/tutorial/data_update.rst | 2 +- doc/build/tutorial/orm_data_manipulation.rst | 2 +- 14 files changed, 18 insertions(+), 18 deletions(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 8e3ee935b98..8dc3bb9c762 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -27,7 +27,7 @@ value the DBAPI offers for any kind of statement will be available using the :attr:`_engine.CursorResult.rowcount` attribute from the :class:`_engine.CursorResult`. This allows the rowcount to be accessed for - statments such as INSERT and SELECT, to the degree supported by the DBAPI + statements such as INSERT and SELECT, to the degree supported by the DBAPI in use. The :ref:`engine_insertmanyvalues` also supports this option and will ensure :attr:`_engine.CursorResult.rowcount` is correctly set for a bulk INSERT of rows when set. diff --git a/doc/build/changelog/whatsnew_20.rst b/doc/build/changelog/whatsnew_20.rst index 179ed55f2da..230d5893ea3 100644 --- a/doc/build/changelog/whatsnew_20.rst +++ b/doc/build/changelog/whatsnew_20.rst @@ -1050,7 +1050,7 @@ implemented by :meth:`_orm.Session.bulk_insert_mappings`, with additional enhancements. This will optimize the batching of rows making use of the new :ref:`fast insertmany ` feature, while also adding support for -heterogenous parameter sets and multiple-table mappings like joined table +heterogeneous parameter sets and multiple-table mappings like joined table inheritance:: >>> users = session.scalars( diff --git a/doc/build/core/custom_types.rst b/doc/build/core/custom_types.rst index f9c02052499..90fc5031861 100644 --- a/doc/build/core/custom_types.rst +++ b/doc/build/core/custom_types.rst @@ -173,7 +173,7 @@ Backend-agnostic GUID Type .. note:: Since version 2.0 the built-in :class:`_types.Uuid` type that behaves similarly should be preferred. This example is presented - just as an example of a type decorator that recieves and returns + just as an example of a type decorator that receives and returns python objects. Receives and returns Python uuid() objects. diff --git a/doc/build/core/operators.rst b/doc/build/core/operators.rst index 0450aab03ee..35c25fe75c3 100644 --- a/doc/build/core/operators.rst +++ b/doc/build/core/operators.rst @@ -303,7 +303,7 @@ databases support: using the :meth:`_sql.ColumnOperators.__eq__` overloaded operator, i.e. ``==``, in conjunction with the ``None`` or :func:`_sql.null` value. In this way, there's typically not a need to use :meth:`_sql.ColumnOperators.is_` - explicitly, paricularly when used with a dynamic value:: + explicitly, particularly when used with a dynamic value:: >>> a = None >>> print(column("x") == a) diff --git a/doc/build/index.rst b/doc/build/index.rst index 37b807723f3..4ca88fa59c5 100644 --- a/doc/build/index.rst +++ b/doc/build/index.rst @@ -168,7 +168,7 @@ SQLAlchemy Documentation * :doc:`Frequently Asked Questions ` - A collection of common problems and solutions * :doc:`Glossary ` - Terms used in SQLAlchemy's documentation - * :doc:`Error Message Guide ` - Explainations of many SQLAlchemy Errors + * :doc:`Error Message Guide ` - Explanations of many SQLAlchemy Errors * :doc:`Complete table of of contents ` * :ref:`Index ` diff --git a/doc/build/orm/collection_api.rst b/doc/build/orm/collection_api.rst index 3d05981a819..be8e4ea9516 100644 --- a/doc/build/orm/collection_api.rst +++ b/doc/build/orm/collection_api.rst @@ -129,7 +129,7 @@ Python code, as well as in a few special cases, the collection class for a In the absence of :paramref:`_orm.relationship.collection_class` or :class:`_orm.Mapped`, the default collection type is ``list``. -Beyond ``list`` and ``set`` builtins, there is also support for two varities of +Beyond ``list`` and ``set`` builtins, there is also support for two varieties of dictionary, described below at :ref:`orm_dictionary_collection`. There is also support for any arbitrary mutable sequence type can be set up as the target collection, with some additional configuration steps; this is described in the diff --git a/doc/build/orm/composites.rst b/doc/build/orm/composites.rst index 2e625509e02..b0ddb9ea488 100644 --- a/doc/build/orm/composites.rst +++ b/doc/build/orm/composites.rst @@ -182,7 +182,7 @@ Other mapping forms for composites The :func:`_orm.composite` construct may be passed the relevant columns using a :func:`_orm.mapped_column` construct, a :class:`_schema.Column`, or the string name of an existing mapped column. The following examples -illustrate an equvalent mapping as that of the main section above. +illustrate an equivalent mapping as that of the main section above. * Map columns directly, then pass to composite diff --git a/doc/build/orm/dataclasses.rst b/doc/build/orm/dataclasses.rst index 1fa37938ec6..2c45a4d0196 100644 --- a/doc/build/orm/dataclasses.rst +++ b/doc/build/orm/dataclasses.rst @@ -424,7 +424,7 @@ scalar object references may make use of The above mapping will generate an empty list for ``Parent.children`` when a new ``Parent()`` object is constructed without passing ``children``, and similarly a ``None`` value for ``Child.parent`` when a new ``Child()`` object -is constructed without passsing ``parent``. +is constructed without passing ``parent``. While the :paramref:`_orm.relationship.default_factory` can be automatically derived from the given collection class of the :func:`_orm.relationship` diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst index d2ed13841f2..b2c91981b3e 100644 --- a/doc/build/orm/declarative_tables.rst +++ b/doc/build/orm/declarative_tables.rst @@ -237,7 +237,7 @@ The two qualities that :func:`_orm.mapped_column` derives from the In the absence of **both** of these parameters, the presence of ``typing.Optional[]`` within the :class:`_orm.Mapped` type annotation will be used to determine nullability, where ``typing.Optional[]`` means ``NULL``, - and the absense of ``typing.Optional[]`` means ``NOT NULL``. If there is no + and the absence of ``typing.Optional[]`` means ``NOT NULL``. If there is no ``Mapped[]`` annotation present at all, and there is no :paramref:`_orm.mapped_column.nullable` or :paramref:`_orm.mapped_column.primary_key` parameter, then SQLAlchemy's usual @@ -539,7 +539,7 @@ specific to each attribute:: When using ``Annotated`` types in this way, the configuration of the type may also be affected on a per-attribute basis. For the types in the above -example that feature explcit use of :paramref:`_orm.mapped_column.nullable`, +example that feature explicit use of :paramref:`_orm.mapped_column.nullable`, we can apply the ``Optional[]`` generic modifier to any of our types so that the field is optional or not at the Python level, which will be independent of the ``NULL`` / ``NOT NULL`` setting that takes place in the database:: diff --git a/doc/build/orm/persistence_techniques.rst b/doc/build/orm/persistence_techniques.rst index 69fad33b22a..da914e5c939 100644 --- a/doc/build/orm/persistence_techniques.rst +++ b/doc/build/orm/persistence_techniques.rst @@ -332,7 +332,7 @@ Case 2: Table includes trigger-generated values which are not compatible with RE The ``"auto"`` setting of :paramref:`_orm.Mapper.eager_defaults` means that a backend that supports RETURNING will usually make use of RETURNING with -INSERT statements in order to retreive newly generated default values. +INSERT statements in order to retrieve newly generated default values. However there are limitations of server-generated values that are generated using triggers, such that RETURNING can't be used: @@ -367,7 +367,7 @@ this looks like:: On SQL Server with the pyodbc driver, an INSERT for the above table will not use RETURNING and will use the SQL Server ``scope_identity()`` function -to retreive the newly generated primary key value: +to retrieve the newly generated primary key value: .. sourcecode:: sql diff --git a/doc/build/orm/queryguide/columns.rst b/doc/build/orm/queryguide/columns.rst index 93d0919ba56..ace6a63f4ce 100644 --- a/doc/build/orm/queryguide/columns.rst +++ b/doc/build/orm/queryguide/columns.rst @@ -595,7 +595,7 @@ by default not loadable:: ... sqlalchemy.exc.InvalidRequestError: 'Book.summary' is not available due to raiseload=True -Only by overridding their behavior at query time, typically using +Only by overriding their behavior at query time, typically using :func:`_orm.undefer` or :func:`_orm.undefer_group`, or less commonly :func:`_orm.defer`, may the attributes be loaded. The example below applies ``undefer('*')`` to undefer all attributes, also making use of diff --git a/doc/build/orm/queryguide/dml.rst b/doc/build/orm/queryguide/dml.rst index a2c10c1bb34..a4b00da7257 100644 --- a/doc/build/orm/queryguide/dml.rst +++ b/doc/build/orm/queryguide/dml.rst @@ -204,7 +204,7 @@ the operation will INSERT one row at a time:: .. _orm_queryguide_insert_heterogeneous_params: -Using Heterogenous Parameter Dictionaries +Using Heterogeneous Parameter Dictionaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. Setup code, not for display @@ -215,7 +215,7 @@ Using Heterogenous Parameter Dictionaries BEGIN (implicit)... The ORM bulk insert feature supports lists of parameter dictionaries that are -"heterogenous", which basically means "individual dictionaries can have different +"heterogeneous", which basically means "individual dictionaries can have different keys". When this condition is detected, the ORM will break up the parameter dictionaries into groups corresponding to each set of keys and batch accordingly into separate INSERT statements:: @@ -552,7 +552,7 @@ are not present: or other multi-table mappings are not supported, since that would require multiple INSERT statements. -* :ref:`Heterogenous parameter sets ` +* :ref:`Heterogeneous parameter sets ` are not supported - each element in the VALUES set must have the same columns. diff --git a/doc/build/tutorial/data_update.rst b/doc/build/tutorial/data_update.rst index 48cf5c058aa..e32b6676c76 100644 --- a/doc/build/tutorial/data_update.rst +++ b/doc/build/tutorial/data_update.rst @@ -280,7 +280,7 @@ Facts about :attr:`_engine.CursorResult.rowcount`: * :attr:`_engine.CursorResult.rowcount` is not necessarily available for an UPDATE or DELETE statement that uses RETURNING, or for one that uses an - :ref:`executemany ` execution. The availablility + :ref:`executemany ` execution. The availability depends on the DBAPI module in use. * In any case where the DBAPI does not determine the rowcount for some type diff --git a/doc/build/tutorial/orm_data_manipulation.rst b/doc/build/tutorial/orm_data_manipulation.rst index b4beae0e070..9329d205245 100644 --- a/doc/build/tutorial/orm_data_manipulation.rst +++ b/doc/build/tutorial/orm_data_manipulation.rst @@ -157,7 +157,7 @@ Another effect of the INSERT that occurred was that the ORM has retrieved the new primary key identifiers for each new object; internally it normally uses the same :attr:`_engine.CursorResult.inserted_primary_key` accessor we introduced previously. The ``squidward`` and ``krabs`` objects now have these new -primary key identifiers associated with them and we can view them by acesssing +primary key identifiers associated with them and we can view them by accessing the ``id`` attribute:: >>> squidward.id From 442832637ac64378abdad8f209b4f6be6e942944 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 11 Mar 2024 22:10:55 +0000 Subject: [PATCH 141/544] Merge "Fixes: #10933 typing in ColumnExpressionArgument" into main (cherry picked from commit 716189460f69a9f44dce3af1d47eab4560def86b) --- lib/sqlalchemy/orm/_orm_constructors.py | 5 ++++- lib/sqlalchemy/orm/util.py | 5 ++++- lib/sqlalchemy/sql/lambdas.py | 2 +- test/typing/plain_files/orm/orm_querying.py | 10 ++++++++++ 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index f74de91c1d9..b9f618af0d7 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -716,7 +716,10 @@ def composite( def with_loader_criteria( entity_or_base: _EntityType[Any], - where_criteria: _ColumnExpressionArgument[bool], + where_criteria: Union[ + _ColumnExpressionArgument[bool], + Callable[[Any], _ColumnExpressionArgument[bool]], + ], loader_only: bool = False, include_aliases: bool = False, propagate_to_loaders: bool = True, diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index d2e3f5302ae..f8431386e4e 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1378,7 +1378,10 @@ class LoaderCriteriaOption(CriteriaOption): def __init__( self, entity_or_base: _EntityType[Any], - where_criteria: _ColumnExpressionArgument[bool], + where_criteria: Union[ + _ColumnExpressionArgument[bool], + Callable[[Any], _ColumnExpressionArgument[bool]], + ], loader_only: bool = False, include_aliases: bool = False, propagate_to_loaders: bool = True, diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index 726fa2411f8..7a6b7b8f776 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -437,7 +437,7 @@ class DeferredLambdaElement(LambdaElement): def __init__( self, - fn: _LambdaType, + fn: _AnyLambdaType, role: Type[roles.SQLRole], opts: Union[Type[LambdaOptions], LambdaOptions] = LambdaOptions, lambda_args: Tuple[Any, ...] = (), diff --git a/test/typing/plain_files/orm/orm_querying.py b/test/typing/plain_files/orm/orm_querying.py index fa59baad43a..3251147dd68 100644 --- a/test/typing/plain_files/orm/orm_querying.py +++ b/test/typing/plain_files/orm/orm_querying.py @@ -1,5 +1,6 @@ from __future__ import annotations +from sqlalchemy import ColumnElement from sqlalchemy import ForeignKey from sqlalchemy import orm from sqlalchemy import select @@ -124,3 +125,12 @@ def load_options_error() -> None: # EXPECTED_MYPY_RE: Argument 1 to .* has incompatible type .* orm.undefer(B.a).undefer("bar"), ) + + +# test 10959 +def test_10959_with_loader_criteria() -> None: + def where_criteria(cls_: type[A]) -> ColumnElement[bool]: + return cls_.data == "some data" + + orm.with_loader_criteria(A, lambda cls: cls.data == "some data") + orm.with_loader_criteria(A, where_criteria) From 264d9202b8e5fd9d487b6cd5a62d699268e93e46 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 11 Mar 2024 23:11:28 +0100 Subject: [PATCH 142/544] Mention getitem on automap classes property Fixes #11097 Change-Id: I05198c8288e11fb0c645e9a7d46652fa979b56f7 (cherry picked from commit f0822f0d930c33ec66e440db99e90641f612338c) --- lib/sqlalchemy/ext/automap.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index 7d1f92534a3..76986077c83 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -1002,6 +1002,12 @@ class that is produced by the :func:`.declarative.declarative_base` User, Address = Base.classes.User, Base.classes.Address + For class names that overlap with a method name of + :class:`.util.Properties`, such as ``items()``, the getitem form + is also supported:: + + Item = Base.classes["item"] + """ by_module: ClassVar[ByModuleProperties] From 0e532a69a159988069402d40d16f3253be8da2b1 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 11 Mar 2024 22:35:49 +0000 Subject: [PATCH 143/544] Merge "Update docs references to .execute() with string literal" into main (cherry picked from commit ca759fdc6c105922141898b29dfaafbd8b76b873) --- doc/build/core/connections.rst | 28 ++++++++++++------------ doc/build/core/engines.rst | 6 ++--- doc/build/core/reflection.rst | 5 +++-- doc/build/orm/persistence_techniques.rst | 2 +- doc/build/orm/session_transaction.rst | 8 +++---- lib/sqlalchemy/orm/events.py | 2 +- 6 files changed, 26 insertions(+), 25 deletions(-) diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst index 1de53fdc85a..597d317f072 100644 --- a/doc/build/core/connections.rst +++ b/doc/build/core/connections.rst @@ -140,15 +140,15 @@ each time the transaction is ended, and a new statement is emitted, a new transaction begins implicitly:: with engine.connect() as connection: - connection.execute("") + connection.execute(text("")) connection.commit() # commits "some statement" # new transaction starts - connection.execute("") + connection.execute(text("")) connection.rollback() # rolls back "some other statement" # new transaction starts - connection.execute("") + connection.execute(text("")) connection.commit() # commits "a third statement" .. versionadded:: 2.0 "commit as you go" style is a new feature of @@ -321,7 +321,7 @@ begin a transaction:: isolation_level="REPEATABLE READ" ) as connection: with connection.begin(): - connection.execute("") + connection.execute(text("")) .. tip:: The return value of the :meth:`_engine.Connection.execution_options` method is the same @@ -443,8 +443,8 @@ If we wanted to check out a :class:`_engine.Connection` object and use it with engine.connect() as connection: connection.execution_options(isolation_level="AUTOCOMMIT") - connection.execute("") - connection.execute("") + connection.execute(text("")) + connection.execute(text("")) Above illustrates normal usage of "DBAPI autocommit" mode. There is no need to make use of methods such as :meth:`_engine.Connection.begin` @@ -472,8 +472,8 @@ In the example below, statements remain # this begin() does not affect the DBAPI connection, isolation stays at AUTOCOMMIT with connection.begin() as trans: - connection.execute("") - connection.execute("") + connection.execute(text("")) + connection.execute(text("")) When we run a block like the above with logging turned on, the logging will attempt to indicate that while a DBAPI level ``.commit()`` is called, @@ -496,11 +496,11 @@ called after autobegin has already occurred:: connection = connection.execution_options(isolation_level="AUTOCOMMIT") # "transaction" is autobegin (but has no effect due to autocommit) - connection.execute("") + connection.execute(text("")) # this will raise; "transaction" is already begun with connection.begin() as trans: - connection.execute("") + connection.execute(text("")) The above example also demonstrates the same theme that the "autocommit" isolation level is a configurational detail of the underlying database @@ -545,7 +545,7 @@ before we call upon :meth:`_engine.Connection.begin`:: connection.execution_options(isolation_level="AUTOCOMMIT") # run statement(s) in autocommit mode - connection.execute("") + connection.execute(text("")) # "commit" the autobegun "transaction" connection.commit() @@ -555,7 +555,7 @@ before we call upon :meth:`_engine.Connection.begin`:: # use a begin block with connection.begin() as trans: - connection.execute("") + connection.execute(text("")) Above, to manually revert the isolation level we made use of :attr:`_engine.Connection.default_isolation_level` to restore the default @@ -568,11 +568,11 @@ use two blocks :: # use an autocommit block with engine.connect().execution_options(isolation_level="AUTOCOMMIT") as connection: # run statement in autocommit mode - connection.execute("") + connection.execute(text("")) # use a regular block with engine.begin() as connection: - connection.execute("") + connection.execute(text("")) To sum up: diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst index 64c558a910a..ed9c2b1e4dd 100644 --- a/doc/build/core/engines.rst +++ b/doc/build/core/engines.rst @@ -649,7 +649,7 @@ tokens:: >>> from sqlalchemy import create_engine >>> e = create_engine("sqlite://", echo="debug") >>> with e.connect().execution_options(logging_token="track1") as conn: - ... conn.execute("select 1").all() + ... conn.execute(text("select 1")).all() 2021-02-03 11:48:45,754 INFO sqlalchemy.engine.Engine [track1] select 1 2021-02-03 11:48:45,754 INFO sqlalchemy.engine.Engine [track1] [raw sql] () 2021-02-03 11:48:45,754 DEBUG sqlalchemy.engine.Engine [track1] Col ('1',) @@ -666,14 +666,14 @@ of an application without creating new engines:: >>> e1 = e.execution_options(logging_token="track1") >>> e2 = e.execution_options(logging_token="track2") >>> with e1.connect() as conn: - ... conn.execute("select 1").all() + ... conn.execute(text("select 1")).all() 2021-02-03 11:51:08,960 INFO sqlalchemy.engine.Engine [track1] select 1 2021-02-03 11:51:08,960 INFO sqlalchemy.engine.Engine [track1] [raw sql] () 2021-02-03 11:51:08,960 DEBUG sqlalchemy.engine.Engine [track1] Col ('1',) 2021-02-03 11:51:08,961 DEBUG sqlalchemy.engine.Engine [track1] Row (1,) >>> with e2.connect() as conn: - ... conn.execute("select 2").all() + ... conn.execute(text("select 2")).all() 2021-02-03 11:52:05,518 INFO sqlalchemy.engine.Engine [track2] Select 1 2021-02-03 11:52:05,519 INFO sqlalchemy.engine.Engine [track2] [raw sql] () 2021-02-03 11:52:05,520 DEBUG sqlalchemy.engine.Engine [track2] Col ('1',) diff --git a/doc/build/core/reflection.rst b/doc/build/core/reflection.rst index 4f3805b7ed2..043f6f8ee7e 100644 --- a/doc/build/core/reflection.rst +++ b/doc/build/core/reflection.rst @@ -123,8 +123,9 @@ object's dictionary of tables:: metadata_obj = MetaData() metadata_obj.reflect(bind=someengine) - for table in reversed(metadata_obj.sorted_tables): - someengine.execute(table.delete()) + with someengine.begin() as conn: + for table in reversed(metadata_obj.sorted_tables): + conn.execute(table.delete()) .. _metadata_reflection_schemas: diff --git a/doc/build/orm/persistence_techniques.rst b/doc/build/orm/persistence_techniques.rst index da914e5c939..c7741ef9c2f 100644 --- a/doc/build/orm/persistence_techniques.rst +++ b/doc/build/orm/persistence_techniques.rst @@ -90,7 +90,7 @@ This is most easily accomplished using the session = Session() # execute a string statement - result = session.execute("select * from table where id=:id", {"id": 7}) + result = session.execute(text("select * from table where id=:id"), {"id": 7}) # execute a SQL expression construct result = session.execute(select(mytable).where(mytable.c.id == 7)) diff --git a/doc/build/orm/session_transaction.rst b/doc/build/orm/session_transaction.rst index 10da76eda80..55ade3e5326 100644 --- a/doc/build/orm/session_transaction.rst +++ b/doc/build/orm/session_transaction.rst @@ -60,7 +60,7 @@ or rolled back:: session.commit() # commits # will automatically begin again - result = session.execute("< some select statement >") + result = session.execute(text("< some select statement >")) session.add_all([more_objects, ...]) session.commit() # commits @@ -100,7 +100,7 @@ first:: session.commit() # commits - result = session.execute("") + result = session.execute(text("")) # remaining transactional state from the .execute() call is # discarded @@ -529,8 +529,8 @@ used in a read-only fashion**, that is:: with autocommit_session() as session: - some_objects = session.execute("") - some_other_objects = session.execute("") + some_objects = session.execute(text("")) + some_other_objects = session.execute(text("")) # closes connection diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 2233d7ee156..1cd51bfd017 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -1939,7 +1939,7 @@ def after_soft_rollback( @event.listens_for(Session, "after_soft_rollback") def do_something(session, previous_transaction): if session.is_active: - session.execute("select * from some_table") + session.execute(text("select * from some_table")) :param session: The target :class:`.Session`. :param previous_transaction: The :class:`.SessionTransaction` From 9c217ea2e95d720928e40fb3a16c4f2706738868 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 26 Feb 2024 22:16:18 +0100 Subject: [PATCH 144/544] add doctests to asyncio tutorial Change-Id: I28c94a7bc1e7ae572af0d206b8e63a110dc6fd7a (cherry picked from commit e32954b91eef968be33ac4b46c16055daffa90dd) --- doc/build/orm/extensions/asyncio.rst | 366 ++++++++++++++++----------- test/base/test_tutorials.py | 103 ++------ 2 files changed, 243 insertions(+), 226 deletions(-) diff --git a/doc/build/orm/extensions/asyncio.rst b/doc/build/orm/extensions/asyncio.rst index 6649a981037..fbd965d15d9 100644 --- a/doc/build/orm/extensions/asyncio.rst +++ b/doc/build/orm/extensions/asyncio.rst @@ -64,47 +64,64 @@ methods which both deliver asynchronous context managers. The :class:`_asyncio.AsyncConnection` can then invoke statements using either the :meth:`_asyncio.AsyncConnection.execute` method to deliver a buffered :class:`_engine.Result`, or the :meth:`_asyncio.AsyncConnection.stream` method -to deliver a streaming server-side :class:`_asyncio.AsyncResult`:: - - import asyncio - - from sqlalchemy import Column - from sqlalchemy import MetaData - from sqlalchemy import select - from sqlalchemy import String - from sqlalchemy import Table - from sqlalchemy.ext.asyncio import create_async_engine - - meta = MetaData() - t1 = Table("t1", meta, Column("name", String(50), primary_key=True)) - - - async def async_main() -> None: - engine = create_async_engine( - "postgresql+asyncpg://scott:tiger@localhost/test", - echo=True, - ) - - async with engine.begin() as conn: - await conn.run_sync(meta.create_all) - - await conn.execute( - t1.insert(), [{"name": "some name 1"}, {"name": "some name 2"}] - ) - - async with engine.connect() as conn: - # select a Result, which will be delivered with buffered - # results - result = await conn.execute(select(t1).where(t1.c.name == "some name 1")) - - print(result.fetchall()) - - # for AsyncEngine created in function scope, close and - # clean-up pooled connections - await engine.dispose() - - - asyncio.run(async_main()) +to deliver a streaming server-side :class:`_asyncio.AsyncResult`: + +.. sourcecode:: pycon+sql + + >>> import asyncio + + >>> from sqlalchemy import Column + >>> from sqlalchemy import MetaData + >>> from sqlalchemy import select + >>> from sqlalchemy import String + >>> from sqlalchemy import Table + >>> from sqlalchemy.ext.asyncio import create_async_engine + + >>> meta = MetaData() + >>> t1 = Table("t1", meta, Column("name", String(50), primary_key=True)) + + + >>> async def async_main() -> None: + ... engine = create_async_engine("sqlite+aiosqlite://", echo=True) + ... + ... async with engine.begin() as conn: + ... await conn.run_sync(meta.drop_all) + ... await conn.run_sync(meta.create_all) + ... + ... await conn.execute( + ... t1.insert(), [{"name": "some name 1"}, {"name": "some name 2"}] + ... ) + ... + ... async with engine.connect() as conn: + ... # select a Result, which will be delivered with buffered + ... # results + ... result = await conn.execute(select(t1).where(t1.c.name == "some name 1")) + ... + ... print(result.fetchall()) + ... + ... # for AsyncEngine created in function scope, close and + ... # clean-up pooled connections + ... await engine.dispose() + + + >>> asyncio.run(async_main()) + {execsql}BEGIN (implicit) + ... + CREATE TABLE t1 ( + name VARCHAR(50) NOT NULL, + PRIMARY KEY (name) + ) + ... + INSERT INTO t1 (name) VALUES (?) + [...] [('some name 1',), ('some name 2',)] + COMMIT + BEGIN (implicit) + SELECT t1.name + FROM t1 + WHERE t1.name = ? + [...] ('some name 1',) + [('some name 1',)] + ROLLBACK Above, the :meth:`_asyncio.AsyncConnection.run_sync` method may be used to invoke special DDL functions such as :meth:`_schema.MetaData.create_all` that @@ -154,114 +171,165 @@ this. :ref:`asyncio_concurrency` and :ref:`session_faq_threadsafe` for background. The example below illustrates a complete example including mapper and session -configuration:: - - from __future__ import annotations - - import asyncio - import datetime - from typing import List - - from sqlalchemy import ForeignKey - from sqlalchemy import func - from sqlalchemy import select - from sqlalchemy.ext.asyncio import AsyncAttrs - from sqlalchemy.ext.asyncio import async_sessionmaker - from sqlalchemy.ext.asyncio import AsyncSession - from sqlalchemy.ext.asyncio import create_async_engine - from sqlalchemy.orm import DeclarativeBase - from sqlalchemy.orm import Mapped - from sqlalchemy.orm import mapped_column - from sqlalchemy.orm import relationship - from sqlalchemy.orm import selectinload - - - class Base(AsyncAttrs, DeclarativeBase): - pass - - - class A(Base): - __tablename__ = "a" - - id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[str] - create_date: Mapped[datetime.datetime] = mapped_column(server_default=func.now()) - bs: Mapped[List[B]] = relationship() - - - class B(Base): - __tablename__ = "b" - id: Mapped[int] = mapped_column(primary_key=True) - a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) - data: Mapped[str] - - - async def insert_objects(async_session: async_sessionmaker[AsyncSession]) -> None: - async with async_session() as session: - async with session.begin(): - session.add_all( - [ - A(bs=[B(data="b1"), B(data="b2")], data="a1"), - A(bs=[], data="a2"), - A(bs=[B(data="b3"), B(data="b4")], data="a3"), - ] - ) - - - async def select_and_update_objects( - async_session: async_sessionmaker[AsyncSession], - ) -> None: - async with async_session() as session: - stmt = select(A).options(selectinload(A.bs)) - - result = await session.execute(stmt) - - for a in result.scalars(): - print(a) - print(f"created at: {a.create_date}") - for b in a.bs: - print(b, b.data) - - result = await session.execute(select(A).order_by(A.id).limit(1)) - - a1 = result.scalars().one() - - a1.data = "new data" - - await session.commit() - - # access attribute subsequent to commit; this is what - # expire_on_commit=False allows - print(a1.data) - - # alternatively, AsyncAttrs may be used to access any attribute - # as an awaitable (new in 2.0.13) - for b1 in await a1.awaitable_attrs.bs: - print(b1, b1.data) - - - async def async_main() -> None: - engine = create_async_engine( - "postgresql+asyncpg://scott:tiger@localhost/test", - echo=True, - ) - - # async_sessionmaker: a factory for new AsyncSession objects. - # expire_on_commit - don't expire objects after transaction commit - async_session = async_sessionmaker(engine, expire_on_commit=False) - - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - await insert_objects(async_session) - await select_and_update_objects(async_session) - - # for AsyncEngine created in function scope, close and - # clean-up pooled connections - await engine.dispose() - - - asyncio.run(async_main()) +configuration: + +.. sourcecode:: pycon+sql + + >>> from __future__ import annotations + + >>> import asyncio + >>> import datetime + >>> from typing import List + + >>> from sqlalchemy import ForeignKey + >>> from sqlalchemy import func + >>> from sqlalchemy import select + >>> from sqlalchemy.ext.asyncio import AsyncAttrs + >>> from sqlalchemy.ext.asyncio import async_sessionmaker + >>> from sqlalchemy.ext.asyncio import AsyncSession + >>> from sqlalchemy.ext.asyncio import create_async_engine + >>> from sqlalchemy.orm import DeclarativeBase + >>> from sqlalchemy.orm import Mapped + >>> from sqlalchemy.orm import mapped_column + >>> from sqlalchemy.orm import relationship + >>> from sqlalchemy.orm import selectinload + + + >>> class Base(AsyncAttrs, DeclarativeBase): + ... pass + + >>> class B(Base): + ... __tablename__ = "b" + ... + ... id: Mapped[int] = mapped_column(primary_key=True) + ... a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) + ... data: Mapped[str] + + >>> class A(Base): + ... __tablename__ = "a" + ... + ... id: Mapped[int] = mapped_column(primary_key=True) + ... data: Mapped[str] + ... create_date: Mapped[datetime.datetime] = mapped_column(server_default=func.now()) + ... bs: Mapped[List[B]] = relationship() + + >>> async def insert_objects(async_session: async_sessionmaker[AsyncSession]) -> None: + ... async with async_session() as session: + ... async with session.begin(): + ... session.add_all( + ... [ + ... A(bs=[B(data="b1"), B(data="b2")], data="a1"), + ... A(bs=[], data="a2"), + ... A(bs=[B(data="b3"), B(data="b4")], data="a3"), + ... ] + ... ) + + + >>> async def select_and_update_objects( + ... async_session: async_sessionmaker[AsyncSession], + ... ) -> None: + ... async with async_session() as session: + ... stmt = select(A).order_by(A.id).options(selectinload(A.bs)) + ... + ... result = await session.execute(stmt) + ... + ... for a in result.scalars(): + ... print(a, a.data) + ... print(f"created at: {a.create_date}") + ... for b in a.bs: + ... print(b, b.data) + ... + ... result = await session.execute(select(A).order_by(A.id).limit(1)) + ... + ... a1 = result.scalars().one() + ... + ... a1.data = "new data" + ... + ... await session.commit() + ... + ... # access attribute subsequent to commit; this is what + ... # expire_on_commit=False allows + ... print(a1.data) + ... + ... # alternatively, AsyncAttrs may be used to access any attribute + ... # as an awaitable (new in 2.0.13) + ... for b1 in await a1.awaitable_attrs.bs: + ... print(b1, b1.data) + + + >>> async def async_main() -> None: + ... engine = create_async_engine("sqlite+aiosqlite://", echo=True) + ... + ... # async_sessionmaker: a factory for new AsyncSession objects. + ... # expire_on_commit - don't expire objects after transaction commit + ... async_session = async_sessionmaker(engine, expire_on_commit=False) + ... + ... async with engine.begin() as conn: + ... await conn.run_sync(Base.metadata.create_all) + ... + ... await insert_objects(async_session) + ... await select_and_update_objects(async_session) + ... + ... # for AsyncEngine created in function scope, close and + ... # clean-up pooled connections + ... await engine.dispose() + + + >>> asyncio.run(async_main()) + {execsql}BEGIN (implicit) + ... + CREATE TABLE a ( + id INTEGER NOT NULL, + data VARCHAR NOT NULL, + create_date DATETIME DEFAULT (CURRENT_TIMESTAMP) NOT NULL, + PRIMARY KEY (id) + ) + ... + CREATE TABLE b ( + id INTEGER NOT NULL, + a_id INTEGER NOT NULL, + data VARCHAR NOT NULL, + PRIMARY KEY (id), + FOREIGN KEY(a_id) REFERENCES a (id) + ) + ... + COMMIT + BEGIN (implicit) + INSERT INTO a (data) VALUES (?) RETURNING id, create_date + [...] ('a1',) + ... + INSERT INTO b (a_id, data) VALUES (?, ?) RETURNING id + [...] (1, 'b2') + ... + COMMIT + BEGIN (implicit) + SELECT a.id, a.data, a.create_date + FROM a ORDER BY a.id + [...] () + SELECT b.a_id AS b_a_id, b.id AS b_id, b.data AS b_data + FROM b + WHERE b.a_id IN (?, ?, ?) + [...] (1, 2, 3) + a1 + created at: ... + b1 + b2 + a2 + created at: ... + a3 + created at: ... + b3 + b4 + SELECT a.id, a.data, a.create_date + FROM a ORDER BY a.id + LIMIT ? OFFSET ? + [...] (1, 0) + UPDATE a SET data=? WHERE a.id = ? + [...] ('new data', 1) + COMMIT + new data + b1 + b2 In the example above, the :class:`_asyncio.AsyncSession` is instantiated using the optional :class:`_asyncio.async_sessionmaker` helper, which provides diff --git a/test/base/test_tutorials.py b/test/base/test_tutorials.py index b920f25f0a5..7543b1c100c 100644 --- a/test/base/test_tutorials.py +++ b/test/base/test_tutorials.py @@ -1,14 +1,17 @@ from __future__ import annotations +import asyncio import doctest import logging import os import re import sys +from sqlalchemy.engine.url import make_url from sqlalchemy.testing import config from sqlalchemy.testing import fixtures from sqlalchemy.testing import requires +from sqlalchemy.testing import skip_test class DocTest(fixtures.TestBase): @@ -65,12 +68,9 @@ def _run_doctest(self, *fnames): doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.IGNORE_EXCEPTION_DETAIL - | _get_allow_unicode_flag() ) runner = doctest.DocTestRunner( - verbose=None, - optionflags=optionflags, - checker=_get_unicode_checker(), + verbose=config.options.verbose >= 2, optionflags=optionflags ) parser = doctest.DocTestParser() globs = {"print_function": print} @@ -163,90 +163,39 @@ def test_orm_queryguide_select(self): ) def test_orm_queryguide_inheritance(self): - self._run_doctest( - "orm/queryguide/inheritance.rst", - ) + self._run_doctest("orm/queryguide/inheritance.rst") @requires.update_from def test_orm_queryguide_dml(self): - self._run_doctest( - "orm/queryguide/dml.rst", - ) + self._run_doctest("orm/queryguide/dml.rst") def test_orm_large_collections(self): - self._run_doctest( - "orm/large_collections.rst", - ) + self._run_doctest("orm/large_collections.rst") def test_orm_queryguide_columns(self): - self._run_doctest( - "orm/queryguide/columns.rst", - ) + self._run_doctest("orm/queryguide/columns.rst") def test_orm_quickstart(self): self._run_doctest("orm/quickstart.rst") - -# unicode checker courtesy pytest - - -def _get_unicode_checker(): - """ - Returns a doctest.OutputChecker subclass that takes in account the - ALLOW_UNICODE option to ignore u'' prefixes in strings. Useful - when the same doctest should run in Python 2 and Python 3. - - An inner class is used to avoid importing "doctest" at the module - level. - """ - if hasattr(_get_unicode_checker, "UnicodeOutputChecker"): - return _get_unicode_checker.UnicodeOutputChecker() - - import doctest - import re - - class UnicodeOutputChecker(doctest.OutputChecker): - """ - Copied from doctest_nose_plugin.py from the nltk project: - https://github.com/nltk/nltk - """ - - _literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) - - def check_output(self, want, got, optionflags): - res = doctest.OutputChecker.check_output( - self, want, got, optionflags - ) - if res: - return True - - if not (optionflags & _get_allow_unicode_flag()): - return False - - else: # pragma: no cover - # the code below will end up executed only in Python 2 in - # our tests, and our coverage check runs in Python 3 only - def remove_u_prefixes(txt): - return re.sub(self._literal_re, r"\1\2", txt) - - want = remove_u_prefixes(want) - got = remove_u_prefixes(got) - res = doctest.OutputChecker.check_output( - self, want, got, optionflags - ) - return res - - _get_unicode_checker.UnicodeOutputChecker = UnicodeOutputChecker - return _get_unicode_checker.UnicodeOutputChecker() - - -def _get_allow_unicode_flag(): - """ - Registers and returns the ALLOW_UNICODE flag. - """ - import doctest - - return doctest.register_optionflag("ALLOW_UNICODE") + @config.fixture(scope="class") + def restore_asyncio(self): + # NOTE: this is required since test_asyncio will remove the global + # loop. 2.1 uses runners that don't require this hack + yield + ep = asyncio.get_event_loop_policy() + try: + ep.get_event_loop() + except RuntimeError: + ep.set_event_loop(ep.new_event_loop()) + + @requires.greenlet + def test_asyncio(self, restore_asyncio): + try: + make_url("sqlite+aiosqlite://").get_dialect().import_dbapi() + except ImportError: + skip_test("missing aiosqile") + self._run_doctest("orm/extensions/asyncio.rst") # increase number to force pipeline run. 1 From a77c42eae271c08848738dd958d547b5308bf3f1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 11 Mar 2024 19:59:06 -0400 Subject: [PATCH 145/544] update section be qualified for PGBouncer the NullPool advice is for PGBouncer only Change-Id: Ib79cae8965435b78fbde6e2d4de5e35fcd2a2f21 (cherry picked from commit f99209583272b65a71c0b4884fd14edcf6577939) --- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 4c60c3d832e..df2656de2a8 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -112,8 +112,8 @@ .. _asyncpg_prepared_statement_name: -Prepared Statement Name ------------------------ +Prepared Statement Name with PGBouncer +-------------------------------------- By default, asyncpg enumerates prepared statements in numeric order, which can lead to errors if a name has already been taken for another prepared @@ -128,7 +128,7 @@ from uuid import uuid4 engine = create_async_engine( - "postgresql+asyncpg://user:pass@hostname/dbname", + "postgresql+asyncpg://user:pass@somepgbouncer/dbname", poolclass=NullPool, connect_args={ 'prepared_statement_name_func': lambda: f'__asyncpg_{uuid4()}__', @@ -141,7 +141,7 @@ https://github.com/sqlalchemy/sqlalchemy/issues/6467 -.. warning:: To prevent a buildup of useless prepared statements in +.. warning:: When using PGBouncer, to prevent a buildup of useless prepared statements in your application, it's important to use the :class:`.NullPool` pool class, and to configure PgBouncer to use `DISCARD `_ when returning connections. The DISCARD command is used to release resources held by the db connection, From 507f4aa1fc5ff10caa08bce4cb71e741ce40036e Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 12 Mar 2024 23:05:50 +0100 Subject: [PATCH 146/544] improve docs for sqlite memory db Change-Id: Icdbc13bdad3fb5ae69e79605bb7e6cb82d538c80 References: #10968 (cherry picked from commit e560794883c5a3259aa4208ee7c5aa0740cb3087) --- lib/sqlalchemy/dialects/sqlite/pysqlite.py | 9 ++++++--- lib/sqlalchemy/ext/automap.py | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index e2487790071..f39baf32171 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -51,11 +51,14 @@ # absolute path on Windows e = create_engine('sqlite:///C:\\path\\to\\database.db') -The sqlite ``:memory:`` identifier is the default if no filepath is -present. Specify ``sqlite://`` and nothing else:: +To use sqlite ``:memory:`` database specify it as the filename using +``sqlite://:memory:``. It's also the default if no filepath is +present, specifying only ``sqlite://`` and nothing else:: # in-memory database - e = create_engine('sqlite://') + e = create_engine('sqlite://:memory:') + # also in-memory database + e2 = create_engine('sqlite://') .. _pysqlite_uri_connections: diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index 76986077c83..bf6a5f26909 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -1006,7 +1006,7 @@ class that is produced by the :func:`.declarative.declarative_base` :class:`.util.Properties`, such as ``items()``, the getitem form is also supported:: - Item = Base.classes["item"] + Item = Base.classes["items"] """ From 609073dd989cb5775055f25fb39e1ad64a98059c Mon Sep 17 00:00:00 2001 From: Ethan Langevin Date: Mon, 11 Mar 2024 07:41:58 -0400 Subject: [PATCH 147/544] Make instrumented attribute covariant as well Allows mapped relationships to use covariant types which makes it possible to define methods that operate on relationships in a typesafe way ### Description See: https://github.com/sqlalchemy/sqlalchemy/issues/11112 for a more in depth explanation. Just changed the type parameter in `InstrumentedAttribute` from `_T` to `_T_co`. ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [x] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11113 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11113 Pull-request-sha: 3c100f28661f3440769175a17c2763ed25f4b83a Change-Id: Iff715c24f1556d5604dcd33661a0ee7232b9404b (cherry picked from commit 058e10f2b7e5686198dc744107b32952e55dc93c) --- lib/sqlalchemy/orm/attributes.py | 8 +++---- .../plain_files/orm/mapped_covariant.py | 22 +++++++++++++++++++ 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index d9b2d8213d1..5b16ce3d6b3 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -503,7 +503,7 @@ def _queryable_attribute_unreduce( return getattr(entity, key) -class InstrumentedAttribute(QueryableAttribute[_T]): +class InstrumentedAttribute(QueryableAttribute[_T_co]): """Class bound instrumented attribute which adds basic :term:`descriptor` methods. @@ -544,14 +544,14 @@ def __delete__(self, instance: object) -> None: @overload def __get__( self, instance: None, owner: Any - ) -> InstrumentedAttribute[_T]: ... + ) -> InstrumentedAttribute[_T_co]: ... @overload - def __get__(self, instance: object, owner: Any) -> _T: ... + def __get__(self, instance: object, owner: Any) -> _T_co: ... def __get__( self, instance: Optional[object], owner: Any - ) -> Union[InstrumentedAttribute[_T], _T]: + ) -> Union[InstrumentedAttribute[_T_co], _T_co]: if instance is None: return self diff --git a/test/typing/plain_files/orm/mapped_covariant.py b/test/typing/plain_files/orm/mapped_covariant.py index 9f964021b31..680e925de36 100644 --- a/test/typing/plain_files/orm/mapped_covariant.py +++ b/test/typing/plain_files/orm/mapped_covariant.py @@ -2,12 +2,15 @@ from datetime import datetime from typing import Protocol +from typing import Sequence +from typing import TypeVar from typing import Union from sqlalchemy import ForeignKey from sqlalchemy import func from sqlalchemy import Nullable from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship @@ -43,6 +46,8 @@ class Parent(Base): name: Mapped[str] = mapped_column(primary_key=True) + children: Mapped[Sequence["Child"]] = relationship("Child") + class Child(Base): __tablename__ = "child" @@ -55,6 +60,23 @@ class Child(Base): assert get_parent_name(Child(parent=Parent(name="foo"))) == "foo" +# Make sure that relationships are covariant as well +_BaseT = TypeVar("_BaseT", bound=Base, covariant=True) +RelationshipType = ( + InstrumentedAttribute[_BaseT] + | InstrumentedAttribute[Sequence[_BaseT]] + | InstrumentedAttribute[_BaseT | None] +) + + +def operate_on_relationships( + relationships: list[RelationshipType[_BaseT]], +) -> int: + return len(relationships) + + +assert operate_on_relationships([Parent.children, Child.parent]) == 2 + # other test From c4aac18d0fca55b1be5071ae74d4eeebc05f6d7d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 13 Mar 2024 18:23:07 -0400 Subject: [PATCH 148/544] accommodate schema_translate_map in _deliver_insertmanyvalues_batches Fixed issue in :ref:`engine_insertmanyvalues` feature where using a primary key column with an "inline execute" default generator such as an explicit :class:`.Sequence` with an explcit schema name, while at the same time using the :paramref:`_engine.Connection.execution_options.schema_translate_map` feature would fail to render the sequence or the parameters properly, leading to errors. Fixes: #11157 Change-Id: I35666af46d40996aff35d3d39f48c150d838e6e4 (cherry picked from commit e3f7bc683ac3ea6d7c517b9c7ffeaa911860d732) --- doc/build/changelog/unreleased_20/11157.rst | 11 ++ lib/sqlalchemy/engine/default.py | 8 ++ lib/sqlalchemy/sql/compiler.py | 26 +++- test/sql/test_insert_exec.py | 126 ++++++++++++++++++++ 4 files changed, 168 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11157.rst diff --git a/doc/build/changelog/unreleased_20/11157.rst b/doc/build/changelog/unreleased_20/11157.rst new file mode 100644 index 00000000000..8f1e85c348d --- /dev/null +++ b/doc/build/changelog/unreleased_20/11157.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, engine + :tickets: 11157 + + Fixed issue in :ref:`engine_insertmanyvalues` feature where using a primary + key column with an "inline execute" default generator such as an explicit + :class:`.Sequence` with an explcit schema name, while at the same time + using the + :paramref:`_engine.Connection.execution_options.schema_translate_map` + feature would fail to render the sequence or the parameters properly, + leading to errors. diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 82c396414b5..e3a158b032a 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -771,6 +771,13 @@ def _deliver_insertmanyvalues_batches( sentinel_value_resolvers = None + if compiled.schema_translate_map: + schema_translate_map = context.execution_options.get( + "schema_translate_map", {} + ) + else: + schema_translate_map = None + if is_returning: result: Optional[List[Any]] = [] context._insertmanyvalues_rows = result @@ -791,6 +798,7 @@ def _deliver_insertmanyvalues_batches( generic_setinputsizes, batch_size, sort_by_parameter_order, + schema_translate_map, ): yield imv_batch diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 2edf9f49aa6..7ebccbf4dad 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -29,6 +29,7 @@ import collections.abc as collections_abc import contextlib from enum import IntEnum +import functools import itertools import operator import re @@ -5403,6 +5404,7 @@ def _deliver_insertmanyvalues_batches( generic_setinputsizes: Optional[_GenericSetInputSizesType], batch_size: int, sort_by_parameter_order: bool, + schema_translate_map: Optional[SchemaTranslateMapType], ) -> Iterator[_InsertManyValuesBatch]: imv = self._insertmanyvalues assert imv is not None @@ -5454,7 +5456,19 @@ def _deliver_insertmanyvalues_batches( ) return - executemany_values = f"({imv.single_values_expr})" + if schema_translate_map: + rst = functools.partial( + self.preparer._render_schema_translates, + schema_translate_map=schema_translate_map, + ) + else: + rst = None + + imv_single_values_expr = imv.single_values_expr + if rst: + imv_single_values_expr = rst(imv_single_values_expr) + + executemany_values = f"({imv_single_values_expr})" statement = statement.replace(executemany_values, "__EXECMANY_TOKEN__") # Use optional insertmanyvalues_max_parameters @@ -5488,6 +5502,12 @@ def _deliver_insertmanyvalues_batches( insert_crud_params = imv.insert_crud_params assert insert_crud_params is not None + if rst: + insert_crud_params = [ + (col, key, rst(expr), st) + for col, key, expr, st in insert_crud_params + ] + escaped_bind_names: Mapping[str, str] expand_pos_lower_index = expand_pos_upper_index = 0 @@ -5535,10 +5555,10 @@ def apply_placeholders(keys, formatted): if imv.embed_values_counter: executemany_values_w_comma = ( - f"({imv.single_values_expr}, _IMV_VALUES_COUNTER), " + f"({imv_single_values_expr}, _IMV_VALUES_COUNTER), " ) else: - executemany_values_w_comma = f"({imv.single_values_expr}), " + executemany_values_w_comma = f"({imv_single_values_expr}), " all_names_we_will_expand: Set[str] = set() for elem in imv.insert_crud_params: diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py index 16300aad0ff..1c31e822689 100644 --- a/test/sql/test_insert_exec.py +++ b/test/sql/test_insert_exec.py @@ -17,6 +17,7 @@ from sqlalchemy import INT from sqlalchemy import Integer from sqlalchemy import literal +from sqlalchemy import MetaData from sqlalchemy import select from sqlalchemy import Sequence from sqlalchemy import sql @@ -1456,6 +1457,131 @@ def test_invalid_identities( coll(expected_data), ) + @testing.requires.sequences + @testing.variation("explicit_sentinel", [True, False]) + @testing.variation("sequence_actually_translates", [True, False]) + @testing.variation("the_table_translates", [True, False]) + def test_sequence_schema_translate( + self, + metadata, + connection, + explicit_sentinel, + warn_for_downgrades, + randomize_returning, + sort_by_parameter_order, + sequence_actually_translates, + the_table_translates, + ): + """test #11157""" + + # so there's a bit of a bug which is that functions has_table() + # and has_sequence() do not take schema translate map into account, + # at all. So on MySQL, where we dont have transactional DDL, the + # DROP for Table / Sequence does not really work for all test runs + # when the schema is set to a "to be translated" kind of name. + # so, make a Table/Sequence with fixed schema name for the CREATE, + # then use a different object for the test that has a translate + # schema name + Table( + "t1", + metadata, + Column( + "id", + Integer, + Sequence("some_seq", start=1, schema=config.test_schema), + primary_key=True, + insert_sentinel=bool(explicit_sentinel), + ), + Column("data", String(50)), + schema=config.test_schema if the_table_translates else None, + ) + metadata.create_all(connection) + + if sequence_actually_translates: + connection = connection.execution_options( + schema_translate_map={ + "should_be_translated": config.test_schema + } + ) + sequence = Sequence( + "some_seq", start=1, schema="should_be_translated" + ) + else: + connection = connection.execution_options( + schema_translate_map={"foo": "bar"} + ) + sequence = Sequence("some_seq", start=1, schema=config.test_schema) + + m2 = MetaData() + t1 = Table( + "t1", + m2, + Column( + "id", + Integer, + sequence, + primary_key=True, + insert_sentinel=bool(explicit_sentinel), + ), + Column("data", String(50)), + schema=( + "should_be_translated" + if sequence_actually_translates and the_table_translates + else config.test_schema if the_table_translates else None + ), + ) + + fixtures.insertmanyvalues_fixture( + connection, + randomize_rows=bool(randomize_returning), + warn_on_downgraded=bool(warn_for_downgrades), + ) + + stmt = insert(t1).returning( + t1.c.id, + t1.c.data, + sort_by_parameter_order=bool(sort_by_parameter_order), + ) + data = [{"data": f"d{i}"} for i in range(10)] + + use_imv = testing.db.dialect.use_insertmanyvalues + if ( + use_imv + and explicit_sentinel + and sort_by_parameter_order + and not ( + testing.db.dialect.insertmanyvalues_implicit_sentinel + & InsertmanyvaluesSentinelOpts.SEQUENCE + ) + ): + with expect_raises_message( + exc.InvalidRequestError, + r"Column t1.id can't be explicitly marked as a sentinel " + r"column .* as the particular type of default generation", + ): + connection.execute(stmt, data) + return + + with self._expect_downgrade_warnings( + warn_for_downgrades=warn_for_downgrades, + sort_by_parameter_order=sort_by_parameter_order, + server_autoincrement=True, + autoincrement_is_sequence=True, + ): + result = connection.execute(stmt, data) + + if sort_by_parameter_order: + coll = list + else: + coll = set + + expected_data = [(i + 1, f"d{i}") for i in range(10)] + + eq_( + coll(result), + coll(expected_data), + ) + @testing.combinations( Integer(), String(50), From 17250bff39f7cdd244a7580a77171e288d0a4269 Mon Sep 17 00:00:00 2001 From: Sean Bright Date: Fri, 15 Mar 2024 13:57:28 -0400 Subject: [PATCH 149/544] mysql: Add new reserved words from MySQL 8.3. Adds the following new keywords from MySQL 8.3: * `intersect` * `parallel` * `qualify` Sourced from https://dev.mysql.com/doc/refman/8.3/en/keywords.html Fixes: #11166 ### Description ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [x] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11167 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11167 Pull-request-sha: adc789cdc6ea66d73925e2a819ea70e60ec282e4 Change-Id: I4441389a4ebec02cdb8372051b6fab1280bcf198 (cherry picked from commit 03cedd7e2ff994636b915039b700858ae835c786) --- lib/sqlalchemy/dialects/mysql/reserved_words.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/sqlalchemy/dialects/mysql/reserved_words.py b/lib/sqlalchemy/dialects/mysql/reserved_words.py index 009988a6085..04764c17e77 100644 --- a/lib/sqlalchemy/dialects/mysql/reserved_words.py +++ b/lib/sqlalchemy/dialects/mysql/reserved_words.py @@ -282,6 +282,7 @@ } ) +# https://dev.mysql.com/doc/refman/8.3/en/keywords.html # https://dev.mysql.com/doc/refman/8.0/en/keywords.html # https://dev.mysql.com/doc/refman/5.7/en/keywords.html # https://dev.mysql.com/doc/refman/5.6/en/keywords.html @@ -403,6 +404,7 @@ "int4", "int8", "integer", + "intersect", "interval", "into", "io_after_gtids", @@ -468,6 +470,7 @@ "outfile", "over", "parse_gcol_expr", + "parallel", "partition", "percent_rank", "persist", @@ -476,6 +479,7 @@ "primary", "procedure", "purge", + "qualify", "range", "rank", "read", From 892b8f4c3aeec29cd31d3d9e2b88232d4f4ec3be Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Fri, 15 Mar 2024 20:42:16 +0100 Subject: [PATCH 150/544] fix mypy on python<3.10 Change-Id: Ice16ff3685f89c64607ef37a906e17c53a5324fd (cherry picked from commit 34a974e509190497cd41831342dda0bdadf88891) --- test/typing/plain_files/orm/mapped_covariant.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/test/typing/plain_files/orm/mapped_covariant.py b/test/typing/plain_files/orm/mapped_covariant.py index 680e925de36..0b65073fde6 100644 --- a/test/typing/plain_files/orm/mapped_covariant.py +++ b/test/typing/plain_files/orm/mapped_covariant.py @@ -1,6 +1,7 @@ """Tests Mapped covariance.""" from datetime import datetime +from typing import List from typing import Protocol from typing import Sequence from typing import TypeVar @@ -62,15 +63,15 @@ class Child(Base): # Make sure that relationships are covariant as well _BaseT = TypeVar("_BaseT", bound=Base, covariant=True) -RelationshipType = ( - InstrumentedAttribute[_BaseT] - | InstrumentedAttribute[Sequence[_BaseT]] - | InstrumentedAttribute[_BaseT | None] -) +RelationshipType = Union[ + InstrumentedAttribute[_BaseT], + InstrumentedAttribute[Sequence[_BaseT]], + InstrumentedAttribute[Union[_BaseT, None]], +] def operate_on_relationships( - relationships: list[RelationshipType[_BaseT]], + relationships: List[RelationshipType[_BaseT]], ) -> int: return len(relationships) From 7609fb5d8bac8574d7329ecad9925b9139750a2d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 15 Mar 2024 10:51:02 -0400 Subject: [PATCH 151/544] remove sentinel_value_resolvers and use pre-bind values Made a change to the adjustment made in version 2.0.10 for :ticket:`9618`, which added the behavior of reconciling RETURNING rows from a bulk INSERT to the parameters that were passed to it. This behavior included a comparison of already-DB-converted bound parameter values against returned row values that was not always "symmetrical" for SQL column types such as UUIDs, depending on specifics of how different DBAPIs receive such values versus how they return them, necessitating the need for additional "sentinel value resolver" methods on these column types. Unfortunately this broke third party column types such as UUID/GUID types in libraries like SQLModel which did not implement this special method, raising an error "Can't match sentinel values in result set to parameter sets". Rather than attempt to further explain and document this implementation detail of the "insertmanyvalues" feature including a public version of the new method, the approach is intead revised to no longer need this extra conversion step, and the logic that does the comparison now works on the pre-converted bound parameter value compared to the post-result-processed value, which should always be of a matching datatype. In the unusual case that a custom SQL column type that also happens to be used in a "sentinel" column for bulk INSERT is not receiving and returning the same value type, the "Can't match" error will be raised, however the mitigation is straightforward in that the same Python datatype should be passed as that returned. Fixes: #11160 Change-Id: Ica62571e923ad9545eb90502e6732b11875b164a (cherry picked from commit 4c0af9e93dab62a04aa00f7c9a07c984e0e316df) --- doc/build/changelog/unreleased_20/11160.rst | 26 +++++ lib/sqlalchemy/dialects/mssql/base.py | 23 ---- lib/sqlalchemy/engine/default.py | 117 +++++++++----------- lib/sqlalchemy/sql/compiler.py | 80 +++++++------ lib/sqlalchemy/sql/sqltypes.py | 25 ----- lib/sqlalchemy/sql/type_api.py | 24 ---- lib/sqlalchemy/testing/fixtures/sql.py | 4 + setup.cfg | 8 +- test/sql/test_insert_exec.py | 27 ++--- 9 files changed, 138 insertions(+), 196 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11160.rst diff --git a/doc/build/changelog/unreleased_20/11160.rst b/doc/build/changelog/unreleased_20/11160.rst new file mode 100644 index 00000000000..1c8ae3a2a74 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11160.rst @@ -0,0 +1,26 @@ +.. change:: + :tags: bug, engine + :tickets: 11160 + + Made a change to the adjustment made in version 2.0.10 for :ticket:`9618`, + which added the behavior of reconciling RETURNING rows from a bulk INSERT + to the parameters that were passed to it. This behavior included a + comparison of already-DB-converted bound parameter values against returned + row values that was not always "symmetrical" for SQL column types such as + UUIDs, depending on specifics of how different DBAPIs receive such values + versus how they return them, necessitating the need for additional + "sentinel value resolver" methods on these column types. Unfortunately + this broke third party column types such as UUID/GUID types in libraries + like SQLModel which did not implement this special method, raising an error + "Can't match sentinel values in result set to parameter sets". Rather than + attempt to further explain and document this implementation detail of the + "insertmanyvalues" feature including a public version of the new + method, the approach is intead revised to no longer need this extra + conversion step, and the logic that does the comparison now works on the + pre-converted bound parameter value compared to the post-result-processed + value, which should always be of a matching datatype. In the unusual case + that a custom SQL column type that also happens to be used in a "sentinel" + column for bulk INSERT is not receiving and returning the same value type, + the "Can't match" error will be raised, however the mitigation is + straightforward in that the same Python datatype should be passed as that + returned. diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index ff69d6aa147..872f8584da4 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1555,29 +1555,6 @@ def process(value): return process - def _sentinel_value_resolver(self, dialect): - if not self.native_uuid: - # dealing entirely with strings going in and out of - # CHAR(32) - return None - - # true if we expect the returned UUID values to be strings - # pymssql sends UUID objects back, pyodbc sends strings, - # however pyodbc converts them to uppercase coming back, so - # need special logic here - character_based_uuid = not dialect.supports_native_uuid - - if character_based_uuid: - # we sent UUID objects in all cases, see bind_processor() - def process(uuid_value): - return str(uuid_value).upper() - - return process - elif not self.as_uuid: - return _python_UUID - else: - return None - class UNIQUEIDENTIFIER(sqltypes.Uuid[sqltypes._UUID_RETURN]): __visit_name__ = "UNIQUEIDENTIFIER" diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index e3a158b032a..90cafe4f4ba 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -95,6 +95,7 @@ from ..sql.elements import BindParameter from ..sql.schema import Column from ..sql.type_api import _BindProcessorType + from ..sql.type_api import _ResultProcessorType from ..sql.type_api import TypeEngine # When we're handed literal SQL, ensure it's a SELECT query @@ -761,6 +762,14 @@ def _deliver_insertmanyvalues_batches( context = cast(DefaultExecutionContext, context) compiled = cast(SQLCompiler, context.compiled) + _composite_sentinel_proc: Sequence[ + Optional[_ResultProcessorType[Any]] + ] = () + _scalar_sentinel_proc: Optional[_ResultProcessorType[Any]] = None + _sentinel_proc_initialized: bool = False + + compiled_parameters = context.compiled_parameters + imv = compiled._insertmanyvalues assert imv is not None @@ -769,8 +778,6 @@ def _deliver_insertmanyvalues_batches( "insertmanyvalues_page_size", self.insertmanyvalues_page_size ) - sentinel_value_resolvers = None - if compiled.schema_translate_map: schema_translate_map = context.execution_options.get( "schema_translate_map", {} @@ -784,10 +791,6 @@ def _deliver_insertmanyvalues_batches( sort_by_parameter_order = imv.sort_by_parameter_order - if imv.num_sentinel_columns: - sentinel_value_resolvers = ( - compiled._imv_sentinel_value_resolvers - ) else: sort_by_parameter_order = False result = None @@ -795,6 +798,7 @@ def _deliver_insertmanyvalues_batches( for imv_batch in compiled._deliver_insertmanyvalues_batches( statement, parameters, + compiled_parameters, generic_setinputsizes, batch_size, sort_by_parameter_order, @@ -803,6 +807,7 @@ def _deliver_insertmanyvalues_batches( yield imv_batch if is_returning: + rows = context.fetchall_for_returning(cursor) # I would have thought "is_returning: Final[bool]" @@ -823,11 +828,46 @@ def _deliver_insertmanyvalues_batches( # otherwise, create dictionaries to match up batches # with parameters assert imv.sentinel_param_keys + assert imv.sentinel_columns + + _nsc = imv.num_sentinel_columns + if not _sentinel_proc_initialized: + if composite_sentinel: + _composite_sentinel_proc = [ + col.type._cached_result_processor( + self, cursor_desc[1] + ) + for col, cursor_desc in zip( + imv.sentinel_columns, + cursor.description[-_nsc:], + ) + ] + else: + _scalar_sentinel_proc = ( + imv.sentinel_columns[0] + ).type._cached_result_processor( + self, cursor.description[-1][1] + ) + _sentinel_proc_initialized = True + + rows_by_sentinel: Union[ + Dict[Tuple[Any, ...], Any], + Dict[Any, Any], + ] if composite_sentinel: - _nsc = imv.num_sentinel_columns rows_by_sentinel = { - tuple(row[-_nsc:]): row for row in rows + tuple( + (proc(val) if proc else val) + for val, proc in zip( + row[-_nsc:], _composite_sentinel_proc + ) + ): row + for row in rows + } + elif _scalar_sentinel_proc: + rows_by_sentinel = { + _scalar_sentinel_proc(row[-1]): row for row in rows } else: rows_by_sentinel = {row[-1]: row for row in rows} @@ -846,63 +886,10 @@ def _deliver_insertmanyvalues_batches( ) try: - if composite_sentinel: - if sentinel_value_resolvers: - # composite sentinel (PK) with value resolvers - ordered_rows = [ - rows_by_sentinel[ - tuple( - ( - _resolver(parameters[_spk]) # type: ignore # noqa: E501 - if _resolver - else parameters[_spk] # type: ignore # noqa: E501 - ) - for _resolver, _spk in zip( - sentinel_value_resolvers, - imv.sentinel_param_keys, - ) - ) - ] - for parameters in imv_batch.batch - ] - else: - # composite sentinel (PK) with no value - # resolvers - ordered_rows = [ - rows_by_sentinel[ - tuple( - parameters[_spk] # type: ignore - for _spk in imv.sentinel_param_keys - ) - ] - for parameters in imv_batch.batch - ] - else: - _sentinel_param_key = imv.sentinel_param_keys[0] - if ( - sentinel_value_resolvers - and sentinel_value_resolvers[0] - ): - # single-column sentinel with value resolver - _sentinel_value_resolver = ( - sentinel_value_resolvers[0] - ) - ordered_rows = [ - rows_by_sentinel[ - _sentinel_value_resolver( - parameters[_sentinel_param_key] # type: ignore # noqa: E501 - ) - ] - for parameters in imv_batch.batch - ] - else: - # single-column sentinel with no value resolver - ordered_rows = [ - rows_by_sentinel[ - parameters[_sentinel_param_key] # type: ignore # noqa: E501 - ] - for parameters in imv_batch.batch - ] + ordered_rows = [ + rows_by_sentinel[sentinel_keys] + for sentinel_keys in imv_batch.sentinel_values + ] except KeyError as ke: # see test_insert_exec.py:: # IMVSentinelTest::test_sentinel_cant_match_keys diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 7ebccbf4dad..813d3fa0a05 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -115,7 +115,6 @@ from .selectable import Select from .selectable import SelectState from .type_api import _BindProcessorType - from .type_api import _SentinelProcessorType from ..engine.cursor import CursorResultMetaData from ..engine.interfaces import _CoreSingleExecuteParams from ..engine.interfaces import _DBAPIAnyExecuteParams @@ -546,8 +545,8 @@ class _InsertManyValues(NamedTuple): """ - sentinel_param_keys: Optional[Sequence[Union[str, int]]] = None - """parameter str keys / int indexes in each param dictionary / tuple + sentinel_param_keys: Optional[Sequence[str]] = None + """parameter str keys in each param dictionary / tuple that would link to the client side "sentinel" values for that row, which we can use to match up parameter sets to result rows. @@ -557,6 +556,10 @@ class _InsertManyValues(NamedTuple): .. versionadded:: 2.0.10 + .. versionchanged:: 2.0.29 - the sequence is now string dictionary keys + only, used against the "compiled parameteters" collection before + the parameters were converted by bound parameter processors + """ implicit_sentinel: bool = False @@ -601,6 +604,7 @@ class _InsertManyValuesBatch(NamedTuple): replaced_parameters: _DBAPIAnyExecuteParams processed_setinputsizes: Optional[_GenericSetInputSizesType] batch: Sequence[_DBAPISingleExecuteParams] + sentinel_values: Sequence[Tuple[Any, ...]] current_batch_size: int batchnum: int total_batches: int @@ -1676,19 +1680,9 @@ def find_position(m: re.Match[str]) -> str: for v in self._insertmanyvalues.insert_crud_params ] - sentinel_param_int_idxs = ( - [ - self.positiontup.index(cast(str, _param_key)) - for _param_key in self._insertmanyvalues.sentinel_param_keys # noqa: E501 - ] - if self._insertmanyvalues.sentinel_param_keys is not None - else None - ) - self._insertmanyvalues = self._insertmanyvalues._replace( single_values_expr=single_values_expr, insert_crud_params=insert_crud_params, - sentinel_param_keys=sentinel_param_int_idxs, ) def _process_numeric(self): @@ -1757,21 +1751,11 @@ def _process_numeric(self): for v in self._insertmanyvalues.insert_crud_params ] - sentinel_param_int_idxs = ( - [ - self.positiontup.index(cast(str, _param_key)) - for _param_key in self._insertmanyvalues.sentinel_param_keys # noqa: E501 - ] - if self._insertmanyvalues.sentinel_param_keys is not None - else None - ) - self._insertmanyvalues = self._insertmanyvalues._replace( # This has the numbers (:1, :2) single_values_expr=single_values_expr, # The single binds are instead %s so they can be formatted insert_crud_params=insert_crud_params, - sentinel_param_keys=sentinel_param_int_idxs, ) @util.memoized_property @@ -1803,23 +1787,6 @@ def _bind_processors( if value is not None } - @util.memoized_property - def _imv_sentinel_value_resolvers( - self, - ) -> Optional[Sequence[Optional[_SentinelProcessorType[Any]]]]: - imv = self._insertmanyvalues - if imv is None or imv.sentinel_columns is None: - return None - - sentinel_value_resolvers = [ - _scol.type._cached_sentinel_value_processor(self.dialect) - for _scol in imv.sentinel_columns - ] - if util.NONE_SET.issuperset(sentinel_value_resolvers): - return None - else: - return sentinel_value_resolvers - def is_subquery(self): return len(self.stack) > 1 @@ -5401,6 +5368,7 @@ def _deliver_insertmanyvalues_batches( self, statement: str, parameters: _DBAPIMultiExecuteParams, + compiled_parameters: List[_MutableCoreSingleExecuteParams], generic_setinputsizes: Optional[_GenericSetInputSizesType], batch_size: int, sort_by_parameter_order: bool, @@ -5409,6 +5377,13 @@ def _deliver_insertmanyvalues_batches( imv = self._insertmanyvalues assert imv is not None + if not imv.sentinel_param_keys: + _sentinel_from_params = None + else: + _sentinel_from_params = operator.itemgetter( + *imv.sentinel_param_keys + ) + lenparams = len(parameters) if imv.is_default_expr and not self.dialect.supports_default_metavalue: # backend doesn't support @@ -5440,14 +5415,23 @@ def _deliver_insertmanyvalues_batches( downgraded = False if use_row_at_a_time: - for batchnum, param in enumerate( - cast("Sequence[_DBAPISingleExecuteParams]", parameters), 1 + for batchnum, (param, compiled_param) in enumerate( + cast( + "Sequence[Tuple[_DBAPISingleExecuteParams, _MutableCoreSingleExecuteParams]]", # noqa: E501 + zip(parameters, compiled_parameters), + ), + 1, ): yield _InsertManyValuesBatch( statement, param, generic_setinputsizes, [param], + ( + [_sentinel_from_params(compiled_param)] + if _sentinel_from_params + else [] + ), 1, batchnum, lenparams, @@ -5492,6 +5476,9 @@ def _deliver_insertmanyvalues_batches( ) batches = cast("List[Sequence[Any]]", list(parameters)) + compiled_batches = cast( + "List[Sequence[Any]]", list(compiled_parameters) + ) processed_setinputsizes: Optional[_GenericSetInputSizesType] = None batchnum = 1 @@ -5592,7 +5579,11 @@ def apply_placeholders(keys, formatted): while batches: batch = batches[0:batch_size] + compiled_batch = compiled_batches[0:batch_size] + batches[0:batch_size] = [] + compiled_batches[0:batch_size] = [] + if batches: current_batch_size = batch_size else: @@ -5707,6 +5698,11 @@ def apply_placeholders(keys, formatted): replaced_parameters, processed_setinputsizes, batch, + ( + [_sentinel_from_params(cb) for cb in compiled_batch] + if _sentinel_from_params + else [] + ), current_batch_size, batchnum, total_batches, diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index fa4c7827fc3..cdfd0a7c8ad 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -3661,31 +3661,6 @@ def process(value): return process - def _sentinel_value_resolver(self, dialect): - """For the "insertmanyvalues" feature only, return a callable that - will receive the uuid object or string - as it is normally passed to the DB in the parameter set, after - bind_processor() is called. Convert this value to match - what it would be as coming back from a RETURNING or similar - statement for the given backend. - - Individual dialects and drivers may need their own implementations - based on how their UUID types send data and how the drivers behave - (e.g. pyodbc) - - """ - if not self.native_uuid or not dialect.supports_native_uuid: - # dealing entirely with strings going in and out of - # CHAR(32) - return None - - elif self.as_uuid: - # we sent UUID objects and we are getting UUID objects back - return None - else: - # we sent strings and we are getting UUID objects back - return _python_UUID - class UUID(Uuid[_UUID_RETURN], type_api.NativeForEmulated): """Represent the SQL UUID type. diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 414b91ab4da..b1207337b13 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -574,18 +574,6 @@ class explicitly. """ return None - def _sentinel_value_resolver( - self, dialect: Dialect - ) -> Optional[_SentinelProcessorType[_T]]: - """Return an optional callable that will match parameter values - (post-bind processing) to result values - (pre-result-processing), for use in the "sentinel" feature. - - .. versionadded:: 2.0.10 - - """ - return None - @util.memoized_property def _has_bind_expression(self) -> bool: """memoized boolean, check if bind_expression is implemented. @@ -933,18 +921,6 @@ def _cached_result_processor( d["result"][coltype] = rp return rp - def _cached_sentinel_value_processor( - self, dialect: Dialect - ) -> Optional[_SentinelProcessorType[_T]]: - try: - return dialect._type_memos[self]["sentinel"] - except KeyError: - pass - - d = self._dialect_info(dialect) - d["sentinel"] = bp = d["impl"]._sentinel_value_resolver(dialect) - return bp - def _cached_custom_processor( self, dialect: Dialect, key: str, fn: Callable[[TypeEngine[_T]], _O] ) -> _O: diff --git a/lib/sqlalchemy/testing/fixtures/sql.py b/lib/sqlalchemy/testing/fixtures/sql.py index ab532ab0e6d..830fa276593 100644 --- a/lib/sqlalchemy/testing/fixtures/sql.py +++ b/lib/sqlalchemy/testing/fixtures/sql.py @@ -459,6 +459,10 @@ def __init__(self, cursor): # by not having the other methods we assert that those aren't being # used + @property + def description(self): + return self.cursor.description + def fetchall(self): rows = self.cursor.fetchall() rows = list(rows) diff --git a/setup.cfg b/setup.cfg index 2a8a68132ad..45b6c47914c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -178,10 +178,10 @@ asyncmy = mysql+asyncmy://scott:tiger@127.0.0.1:3306/test?charset=utf8mb4 asyncmy_fallback = mysql+asyncmy://scott:tiger@127.0.0.1:3306/test?charset=utf8mb4&async_fallback=true mariadb = mariadb+mysqldb://scott:tiger@127.0.0.1:3306/test mariadb_connector = mariadb+mariadbconnector://scott:tiger@127.0.0.1:3306/test -mssql = mssql+pyodbc://scott:tiger^5HHH@mssql2017:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes -mssql_async = mssql+aioodbc://scott:tiger^5HHH@mssql2017:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes -pymssql = mssql+pymssql://scott:tiger^5HHH@mssql2017:1433/test -docker_mssql = mssql+pyodbc://scott:tiger^5HHH@127.0.0.1:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes +mssql = mssql+pyodbc://scott:tiger^5HHH@mssql2022:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional +mssql_async = mssql+aioodbc://scott:tiger^5HHH@mssql2022:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional +pymssql = mssql+pymssql://scott:tiger^5HHH@mssql2022:1433/test +docker_mssql = mssql+pyodbc://scott:tiger^5HHH@127.0.0.1:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional oracle = oracle+cx_oracle://scott:tiger@oracle18c/xe cxoracle = oracle+cx_oracle://scott:tiger@oracle18c/xe oracledb = oracle+oracledb://scott:tiger@oracle18c/xe diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py index 1c31e822689..ebb0b23a5f6 100644 --- a/test/sql/test_insert_exec.py +++ b/test/sql/test_insert_exec.py @@ -1764,10 +1764,8 @@ def test_sentinel_cant_match_keys( """test assertions to ensure sentinel values passed in parameter structures can be identified when they come back in cursor.fetchall(). - Values that are further modified by the database driver or by - SQL expressions (as in the case below) before being INSERTed - won't match coming back out, so datatypes need to implement - _sentinel_value_resolver() if this is the case. + Sentinels are now matched based on the data on the outside of the + type, that is, before the bind, and after the result. """ @@ -1780,11 +1778,8 @@ def bind_expression(self, bindparam): if resolve_sentinel_values: - def _sentinel_value_resolver(self, dialect): - def fix_sentinels(value): - return value.lower() - - return fix_sentinels + def process_result_value(self, value, dialect): + return value.replace("upper", "UPPER") t1 = Table( "data", @@ -1816,10 +1811,16 @@ def fix_sentinels(value): connection.execute(stmt, data) else: result = connection.execute(stmt, data) - eq_( - set(result.all()), - {(f"d{i}", f"upper_d{i}") for i in range(10)}, - ) + if resolve_sentinel_values: + eq_( + set(result.all()), + {(f"d{i}", f"UPPER_d{i}") for i in range(10)}, + ) + else: + eq_( + set(result.all()), + {(f"d{i}", f"upper_d{i}") for i in range(10)}, + ) @testing.variation("add_insert_sentinel", [True, False]) def test_sentinel_insert_default_pk_only( From 9df1bc691aef6bce5a9b4fb4e8b313a12f75a306 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 18 Mar 2024 10:22:06 -0400 Subject: [PATCH 152/544] add missing cache_ok directive to MyEpochType Change-Id: Ic4da52b02a4ba36d87d73974fe428b91d9d7915c (cherry picked from commit 4ef36de359449abd49b90726a1d06aef9a4084e7) --- lib/sqlalchemy/sql/type_api.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 414b91ab4da..1198284a8af 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -1575,6 +1575,8 @@ class produces the same behavior each time, it may be set to ``True``. class MyEpochType(types.TypeDecorator): impl = types.Integer + cache_ok = True + epoch = datetime.date(1970, 1, 1) def process_bind_param(self, value, dialect): From 8869373d0e5c0fd8f5dfbb6972fefacd6aa45eed Mon Sep 17 00:00:00 2001 From: Gord Thompson Date: Mon, 18 Mar 2024 07:26:17 -0600 Subject: [PATCH 153/544] Add impyla to external dialect list Change-Id: I5a85db43a11c2c993597d0fa737377ee460b7629 (cherry picked from commit d8174392dce20004d9158a90949b4ff11b830247) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 52690f640a9..120af79efec 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -97,6 +97,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | IBM Netezza Performance Server [1]_ | nzalchemy_ | +------------------------------------------------+---------------------------------------+ +| Impala | impyla_ | ++------------------------------------------------+---------------------------------------+ | Microsoft Access (via pyodbc) | sqlalchemy-access_ | +------------------------------------------------+---------------------------------------+ | Microsoft SQL Server (via python-tds) | sqlalchemy-tds_ | @@ -153,3 +155,4 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _firebolt-sqlalchemy: https://pypi.org/project/firebolt-sqlalchemy/ .. _pyathena: https://github.com/laughingman7743/PyAthena/ .. _sqlalchemy-yugabytedb: https://pypi.org/project/sqlalchemy-yugabytedb/ +.. _impyla: https://pypi.org/project/impyla/ From c0a589f247435141a78c7ec18c0e3a55d2683064 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 18 Mar 2024 21:50:35 +0100 Subject: [PATCH 154/544] document improvement for load_only mention that load_only can be used to control what populate existing refreses. Change-Id: I9bd6fbe8674005d9f32f9d1bc263bf860b53c3ec (cherry picked from commit 93da4ba1446162f1476598b4f13c307ae7bfb1f1) --- doc/build/orm/queryguide/api.rst | 2 + lib/sqlalchemy/orm/strategy_options.py | 111 +++++++++++++++---------- 2 files changed, 67 insertions(+), 46 deletions(-) diff --git a/doc/build/orm/queryguide/api.rst b/doc/build/orm/queryguide/api.rst index 15301cbd003..fe4d6b02a49 100644 --- a/doc/build/orm/queryguide/api.rst +++ b/doc/build/orm/queryguide/api.rst @@ -111,6 +111,8 @@ a per-query basis. Options for which this apply include: * The :func:`_orm.with_loader_criteria` option +* The :func:`_orm.load_only` option to select what attributes to refresh + The ``populate_existing`` execution option is equvialent to the :meth:`_orm.Query.populate_existing` method in :term:`1.x style` ORM queries. diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 3e70ada7e63..25c6332112f 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -108,9 +108,9 @@ def contains_eager( The option is used in conjunction with an explicit join that loads the desired rows, i.e.:: - sess.query(Order).\ - join(Order.user).\ - options(contains_eager(Order.user)) + sess.query(Order).join(Order.user).options( + contains_eager(Order.user) + ) The above query would join from the ``Order`` entity to its related ``User`` entity, and the returned ``Order`` objects would have the @@ -121,11 +121,9 @@ def contains_eager( :ref:`orm_queryguide_populate_existing` execution option assuming the primary collection of parent objects may already have been loaded:: - sess.query(User).\ - join(User.addresses).\ - filter(Address.email_address.like('%@aol.com')).\ - options(contains_eager(User.addresses)).\ - populate_existing() + sess.query(User).join(User.addresses).filter( + Address.email_address.like("%@aol.com") + ).options(contains_eager(User.addresses)).populate_existing() See the section :ref:`contains_eager` for complete usage details. @@ -191,10 +189,18 @@ def load_only(self, *attrs: _AttrType, raiseload: bool = False) -> Self: the lead entity can be specifically referred to using the :class:`_orm.Load` constructor:: - stmt = select(User, Address).join(User.addresses).options( - Load(User).load_only(User.name, User.fullname), - Load(Address).load_only(Address.email_address) - ) + stmt = ( + select(User, Address) + .join(User.addresses) + .options( + Load(User).load_only(User.name, User.fullname), + Load(Address).load_only(Address.email_address), + ) + ) + + When used together with the + :ref:`populate_existing ` + execution option only the attributes listed will be refreshed. :param \*attrs: Attributes to be loaded, all others will be deferred. @@ -247,28 +253,31 @@ def joinedload( examples:: # joined-load the "orders" collection on "User" - query(User).options(joinedload(User.orders)) + select(User).options(joinedload(User.orders)) # joined-load Order.items and then Item.keywords - query(Order).options( - joinedload(Order.items).joinedload(Item.keywords)) + select(Order).options( + joinedload(Order.items).joinedload(Item.keywords) + ) # lazily load Order.items, but when Items are loaded, # joined-load the keywords collection - query(Order).options( - lazyload(Order.items).joinedload(Item.keywords)) + select(Order).options( + lazyload(Order.items).joinedload(Item.keywords) + ) :param innerjoin: if ``True``, indicates that the joined eager load should use an inner join instead of the default of left outer join:: - query(Order).options(joinedload(Order.user, innerjoin=True)) + select(Order).options(joinedload(Order.user, innerjoin=True)) In order to chain multiple eager joins together where some may be OUTER and others INNER, right-nested joins are used to link them:: - query(A).options( - joinedload(A.bs, innerjoin=False). - joinedload(B.cs, innerjoin=True) + select(A).options( + joinedload(A.bs, innerjoin=False).joinedload( + B.cs, innerjoin=True + ) ) The above query, linking A.bs via "outer" join and B.cs via "inner" @@ -283,11 +292,11 @@ def joinedload( will render as LEFT OUTER JOIN. For example, supposing ``A.bs`` is an outerjoin:: - query(A).options( - joinedload(A.bs). - joinedload(B.cs, innerjoin="unnested") + select(A).options( + joinedload(A.bs).joinedload(B.cs, innerjoin="unnested") ) + The above join will render as "a LEFT OUTER JOIN b LEFT OUTER JOIN c", rather than as "a LEFT OUTER JOIN (b JOIN c)". @@ -338,16 +347,18 @@ def subqueryload(self, attr: _AttrType) -> Self: examples:: # subquery-load the "orders" collection on "User" - query(User).options(subqueryload(User.orders)) + select(User).options(subqueryload(User.orders)) # subquery-load Order.items and then Item.keywords - query(Order).options( - subqueryload(Order.items).subqueryload(Item.keywords)) + select(Order).options( + subqueryload(Order.items).subqueryload(Item.keywords) + ) # lazily load Order.items, but when Items are loaded, # subquery-load the keywords collection - query(Order).options( - lazyload(Order.items).subqueryload(Item.keywords)) + select(Order).options( + lazyload(Order.items).subqueryload(Item.keywords) + ) .. seealso:: @@ -373,16 +384,18 @@ def selectinload( examples:: # selectin-load the "orders" collection on "User" - query(User).options(selectinload(User.orders)) + select(User).options(selectinload(User.orders)) # selectin-load Order.items and then Item.keywords - query(Order).options( - selectinload(Order.items).selectinload(Item.keywords)) + select(Order).options( + selectinload(Order.items).selectinload(Item.keywords) + ) # lazily load Order.items, but when Items are loaded, # selectin-load the keywords collection - query(Order).options( - lazyload(Order.items).selectinload(Item.keywords)) + select(Order).options( + lazyload(Order.items).selectinload(Item.keywords) + ) :param recursion_depth: optional int; when set to a positive integer in conjunction with a self-referential relationship, @@ -558,17 +571,20 @@ def defaultload(self, attr: _AttrType) -> Self: element of an element:: session.query(MyClass).options( - defaultload(MyClass.someattribute). - joinedload(MyOtherClass.someotherattribute) + defaultload(MyClass.someattribute).joinedload( + MyOtherClass.someotherattribute + ) ) :func:`.defaultload` is also useful for setting column-level options on a related class, namely that of :func:`.defer` and :func:`.undefer`:: - session.query(MyClass).options( - defaultload(MyClass.someattribute). - defer("some_column"). - undefer("some_other_column") + session.scalars( + select(MyClass).options( + defaultload(MyClass.someattribute) + .defer("some_column") + .undefer("some_other_column") + ) ) .. seealso:: @@ -609,7 +625,7 @@ def defer(self, key: _AttrType, raiseload: bool = False) -> Self: at once using :meth:`_orm.Load.options`:: - session.query(MyClass).options( + select(MyClass).options( defaultload(MyClass.someattr).options( defer(RelatedClass.some_column), defer(RelatedClass.some_other_column), @@ -660,11 +676,13 @@ def undefer(self, key: _AttrType) -> Self: # undefer all columns specific to a single class using Load + * session.query(MyClass, MyOtherClass).options( - Load(MyClass).undefer("*")) + Load(MyClass).undefer("*") + ) # undefer a column on a related object - session.query(MyClass).options( - defaultload(MyClass.items).undefer(MyClass.text)) + select(MyClass).options( + defaultload(MyClass.items).undefer(MyClass.text) + ) :param key: Attribute to be undeferred. @@ -697,8 +715,9 @@ def undefer_group(self, name: str) -> Self: spelled out using relationship loader options, such as :func:`_orm.defaultload`:: - session.query(MyClass).options( - defaultload("someattr").undefer_group("large_attrs")) + select(MyClass).options( + defaultload("someattr").undefer_group("large_attrs") + ) .. seealso:: From af17a0070b65b16b34e52357ee5e6498cb2b38de Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 18 Mar 2024 21:52:41 +0100 Subject: [PATCH 155/544] fix typo from d8174392dce20004d9158a90949b4ff11b830247 Change-Id: If76715abf6de8fc85580080a73aa5faa138aa968 (cherry picked from commit 82ae47ba9959c63cfaa9169869ef08db61f71fd7) --- doc/build/dialects/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 120af79efec..b6c9c8e88d5 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -97,7 +97,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | IBM Netezza Performance Server [1]_ | nzalchemy_ | +------------------------------------------------+---------------------------------------+ -| Impala | impyla_ | +| Impala | impyla_ | +------------------------------------------------+---------------------------------------+ | Microsoft Access (via pyodbc) | sqlalchemy-access_ | +------------------------------------------------+---------------------------------------+ From b09effd44105550c4c3ea8f5e94941ee1024a972 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 18 Mar 2024 22:01:00 +0100 Subject: [PATCH 156/544] do not convert uuid to string in postgresql and mssql Change-Id: Ic3c87d8c654926f7ef28ba9ec6dd21c50a1171cf (cherry picked from commit 5032cf6bc14f76e47063696a58ea3cce44e0f13f) --- doc/build/core/custom_types.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/doc/build/core/custom_types.rst b/doc/build/core/custom_types.rst index 90fc5031861..5390824dda8 100644 --- a/doc/build/core/custom_types.rst +++ b/doc/build/core/custom_types.rst @@ -212,10 +212,8 @@ string, using a CHAR(36) type:: return dialect.type_descriptor(self._default_type) def process_bind_param(self, value, dialect): - if value is None: + if value is None or dialect.name in ("postgresql", "mssql"): return value - elif dialect.name in ("postgresql", "mssql"): - return str(value) else: if not isinstance(value, uuid.UUID): value = uuid.UUID(value) From 5e3578d52492a5017843474b937253bdffe0263e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 19 Mar 2024 08:35:00 -0400 Subject: [PATCH 157/544] add notes clarifying the role of "$user" in pg search_path references: https://github.com/sqlalchemy/alembic/discussions/1447 Change-Id: I2ef55813699f84ac7fbca6de7522f0d3d78e6029 (cherry picked from commit 58a50c06836792da201bb610ee2f0463ac1bb073) --- doc/build/orm/queryguide/dml.rst | 2 +- lib/sqlalchemy/dialects/postgresql/base.py | 68 ++++++++++++++++++---- 2 files changed, 58 insertions(+), 12 deletions(-) diff --git a/doc/build/orm/queryguide/dml.rst b/doc/build/orm/queryguide/dml.rst index a4b00da7257..91fe9e7741d 100644 --- a/doc/build/orm/queryguide/dml.rst +++ b/doc/build/orm/queryguide/dml.rst @@ -205,7 +205,7 @@ the operation will INSERT one row at a time:: .. _orm_queryguide_insert_heterogeneous_params: Using Heterogeneous Parameter Dictionaries -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. Setup code, not for display diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 3fe8900ac44..af457a2761c 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -346,7 +346,9 @@ def set_search_path(dbapi_connection, connection_record): .. admonition:: Section Best Practices Summarized keep the ``search_path`` variable set to its default of ``public``, without - any other schema names. For other schema names, name these explicitly + any other schema names. Ensure the username used to connect **does not** + match remote schemas, or ensure the ``"$user"`` token is **removed** from + ``search_path``. For other schema names, name these explicitly within :class:`_schema.Table` definitions. Alternatively, the ``postgresql_ignore_search_path`` option will cause all reflected :class:`_schema.Table` objects to have a :attr:`_schema.Table.schema` @@ -355,12 +357,63 @@ def set_search_path(dbapi_connection, connection_record): The PostgreSQL dialect can reflect tables from any schema, as outlined in :ref:`metadata_reflection_schemas`. +In all cases, the first thing SQLAlchemy does when reflecting tables is +to **determine the default schema for the current database connection**. +It does this using the PostgreSQL ``current_schema()`` +function, illustated below using a PostgreSQL client session (i.e. using +the ``psql`` tool):: + + test=> select current_schema(); + current_schema + ---------------- + public + (1 row) + +Above we see that on a plain install of PostgreSQL, the default schema name +is the name ``public``. + +However, if your database username **matches the name of a schema**, PostgreSQL's +default is to then **use that name as the default schema**. Below, we log in +using the username ``scott``. When we create a schema named ``scott``, **it +implicitly changes the default schema**:: + + test=> select current_schema(); + current_schema + ---------------- + public + (1 row) + + test=> create schema scott; + CREATE SCHEMA + test=> select current_schema(); + current_schema + ---------------- + scott + (1 row) + +The behavior of ``current_schema()`` is derived from the +`PostgreSQL search path +`_ +variable ``search_path``, which in modern PostgreSQL versions defaults to this:: + + test=> show search_path; + search_path + ----------------- + "$user", public + (1 row) + +Where above, the ``"$user"`` variable will inject the current username as the +default schema, if one exists. Otherwise, ``public`` is used. + +When a :class:`_schema.Table` object is reflected, if it is present in the +schema indicated by the ``current_schema()`` function, **the schema name assigned +to the table is the Python value ``None``**. Otherwise, the schema name +will be assigned as the name of that schema. + With regards to tables which these :class:`_schema.Table` objects refer to via foreign key constraint, a decision must be made as to how the ``.schema`` is represented in those remote tables, in the case where that -remote schema name is also a member of the current -`PostgreSQL search path -`_. +remote schema name is also a member of the current ``search_path``. By default, the PostgreSQL dialect mimics the behavior encouraged by PostgreSQL's own ``pg_get_constraintdef()`` builtin procedure. This function @@ -466,13 +519,6 @@ def set_search_path(dbapi_connection, connection_record): described here are only for those users who can't, or prefer not to, stay within these guidelines. -Note that **in all cases**, the "default" schema is always reflected as -``None``. The "default" schema on PostgreSQL is that which is returned by the -PostgreSQL ``current_schema()`` function. On a typical PostgreSQL -installation, this is the name ``public``. So a table that refers to another -which is in the ``public`` (i.e. default) schema will always have the -``.schema`` attribute set to ``None``. - .. seealso:: :ref:`reflection_schema_qualified_interaction` - discussion of the issue From 788e2a13d693435257c5e262acd7b77f319ef8f5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 19 Mar 2024 10:51:01 -0400 Subject: [PATCH 158/544] work around boldface concerns Change-Id: I99ed117bb0f1bdc1a8750bd13db5a69d5c398ae0 (cherry picked from commit 697dcc94e412e013aba298e17613ee097f423e04) --- lib/sqlalchemy/dialects/postgresql/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index af457a2761c..ebb7d546db0 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -407,8 +407,8 @@ def set_search_path(dbapi_connection, connection_record): When a :class:`_schema.Table` object is reflected, if it is present in the schema indicated by the ``current_schema()`` function, **the schema name assigned -to the table is the Python value ``None``**. Otherwise, the schema name -will be assigned as the name of that schema. +to the ".schema" attribute of the Table is the Python "None" value**. Otherwise, the +".schema" attribute will be assigned the string name of that schema. With regards to tables which these :class:`_schema.Table` objects refer to via foreign key constraint, a decision must be made as to how From 79879c629354d6a2948e2f296788e5cd47b6d790 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 20 Mar 2024 10:23:41 -0400 Subject: [PATCH 159/544] assign variant mapping on adapt() Fixed regression from the 1.4 series where the refactor of the :meth:`_types.TypeEngine.with_variant` method introduced at :ref:`change_6980` failed to accommodate for the ``.copy()`` method, which will lose the variant mappings that are set up. This becomes an issue for the very specific case of a "schema" type, which includes types such as :class:`.Enum` and :class:`.ARRAY`, when they are then used in the context of an ORM Declarative mapping with mixins where copying of types comes into play. The variant mapping is now copied as well. Fixes: #11176 Change-Id: Icf1a2752f60fce863c87ead8b0fe298b0f3d3766 (cherry picked from commit 29a428955a904e235e1b85e928cbe89155aeca82) --- doc/build/changelog/unreleased_20/11176.rst | 12 ++++++++++++ lib/sqlalchemy/sql/type_api.py | 4 +++- test/sql/test_types.py | 13 +++++++++++++ 3 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11176.rst diff --git a/doc/build/changelog/unreleased_20/11176.rst b/doc/build/changelog/unreleased_20/11176.rst new file mode 100644 index 00000000000..cc35ab1d543 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11176.rst @@ -0,0 +1,12 @@ +.. change:: + :tag: bug, sql, regression + :tickets: 11176 + + Fixed regression from the 1.4 series where the refactor of the + :meth:`_types.TypeEngine.with_variant` method introduced at + :ref:`change_6980` failed to accommodate for the ``.copy()`` method, which + will lose the variant mappings that are set up. This becomes an issue for + the very specific case of a "schema" type, which includes types such as + :class:`.Enum` and :class:`.ARRAY`, when they are then used in the context + of an ORM Declarative mapping with mixins where copying of types comes into + play. The variant mapping is now copied as well. diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 43d47064d4e..4233e7f16e8 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -1005,9 +1005,11 @@ def adapt( types with "implementation" types that are specific to a particular dialect. """ - return util.constructor_copy( + typ = util.constructor_copy( self, cast(Type[TypeEngine[Any]], cls), **kw ) + typ._variant_mapping = self._variant_mapping + return typ def coerce_compared_value( self, op: Optional[OperatorType], value: Any diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 898d6fa0a8c..0127004438c 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -1695,6 +1695,19 @@ def get_col_spec(self): ) self.composite = self.variant.with_variant(self.UTypeThree(), "mysql") + def test_copy_doesnt_lose_variants(self): + """test #11176""" + + v = self.UTypeOne().with_variant(self.UTypeTwo(), "postgresql") + + v_c = v.copy() + + self.assert_compile(v_c, "UTYPEONE", dialect="default") + + self.assert_compile( + v_c, "UTYPETWO", dialect=dialects.postgresql.dialect() + ) + def test_one_dialect_is_req(self): with expect_raises_message( exc.ArgumentError, "At least one dialect name is required" From 33344b0aa423c71a2d5c91257033c8bab2c547f9 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 9 Nov 2023 10:27:19 -0500 Subject: [PATCH 160/544] use a private return class for the "catch all" relationship Fixed Declarative issue where typing a relationship using :class:`_orm.Relationship` rather than :class:`_orm.Mapped` would inadvertently pull in the "dynamic" relationship loader strategy for that attribute. Fixes: #10611 Change-Id: Ie4421050b583827fdf96c27ae9d7fe7ca596e77e (cherry picked from commit 7cfb940b1a9392f6e3784aff8f487c37ebcd588b) --- doc/build/changelog/unreleased_20/10611.rst | 8 ++++ lib/sqlalchemy/ext/mypy/names.py | 29 +++++-------- lib/sqlalchemy/orm/_orm_constructors.py | 6 +-- lib/sqlalchemy/orm/base.py | 3 +- lib/sqlalchemy/orm/exc.py | 5 ++- lib/sqlalchemy/orm/interfaces.py | 11 +++-- lib/sqlalchemy/orm/relationships.py | 43 ++++++++++++------- lib/sqlalchemy/orm/util.py | 8 ++++ lib/sqlalchemy/util/langhelpers.py | 9 ++-- .../test_tm_future_annotations_sync.py | 43 ++++++++++++++++++- test/orm/declarative/test_typed_mapping.py | 43 ++++++++++++++++++- test/typing/plain_files/orm/relationship.py | 37 ++++++++++++++++ 12 files changed, 194 insertions(+), 51 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10611.rst diff --git a/doc/build/changelog/unreleased_20/10611.rst b/doc/build/changelog/unreleased_20/10611.rst new file mode 100644 index 00000000000..2627e4d37c8 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10611.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm + :tickets: 10611 + + Fixed Declarative issue where typing a relationship using + :class:`_orm.Relationship` rather than :class:`_orm.Mapped` would + inadvertently pull in the "dynamic" relationship loader strategy for that + attribute. diff --git a/lib/sqlalchemy/ext/mypy/names.py b/lib/sqlalchemy/ext/mypy/names.py index 35b4e2ba819..fc3d708e7dd 100644 --- a/lib/sqlalchemy/ext/mypy/names.py +++ b/lib/sqlalchemy/ext/mypy/names.py @@ -58,6 +58,14 @@ NAMED_TYPE_BUILTINS_LIST = "builtins.list" NAMED_TYPE_SQLA_MAPPED = "sqlalchemy.orm.base.Mapped" +_RelFullNames = { + "sqlalchemy.orm.relationships.Relationship", + "sqlalchemy.orm.relationships.RelationshipProperty", + "sqlalchemy.orm.relationships._RelationshipDeclared", + "sqlalchemy.orm.Relationship", + "sqlalchemy.orm.RelationshipProperty", +} + _lookup: Dict[str, Tuple[int, Set[str]]] = { "Column": ( COLUMN, @@ -66,24 +74,9 @@ "sqlalchemy.sql.Column", }, ), - "Relationship": ( - RELATIONSHIP, - { - "sqlalchemy.orm.relationships.Relationship", - "sqlalchemy.orm.relationships.RelationshipProperty", - "sqlalchemy.orm.Relationship", - "sqlalchemy.orm.RelationshipProperty", - }, - ), - "RelationshipProperty": ( - RELATIONSHIP, - { - "sqlalchemy.orm.relationships.Relationship", - "sqlalchemy.orm.relationships.RelationshipProperty", - "sqlalchemy.orm.Relationship", - "sqlalchemy.orm.RelationshipProperty", - }, - ), + "Relationship": (RELATIONSHIP, _RelFullNames), + "RelationshipProperty": (RELATIONSHIP, _RelFullNames), + "_RelationshipDeclared": (RELATIONSHIP, _RelFullNames), "registry": ( REGISTRY, { diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index b9f618af0d7..7cb536b2976 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -28,8 +28,8 @@ from .properties import MappedSQLExpression from .query import AliasOption from .relationships import _RelationshipArgumentType +from .relationships import _RelationshipDeclared from .relationships import _RelationshipSecondaryArgument -from .relationships import Relationship from .relationships import RelationshipProperty from .session import Session from .util import _ORMJoin @@ -950,7 +950,7 @@ def relationship( omit_join: Literal[None, False] = None, sync_backref: Optional[bool] = None, **kw: Any, -) -> Relationship[Any]: +) -> _RelationshipDeclared[Any]: """Provide a relationship between two mapped classes. This corresponds to a parent-child or associative table relationship. @@ -1756,7 +1756,7 @@ class that will be synchronized with this one. It is usually """ - return Relationship( + return _RelationshipDeclared( argument, secondary=secondary, uselist=uselist, diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index 86af81cd6ef..c9005298d82 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -21,6 +21,7 @@ from typing import no_type_check from typing import Optional from typing import overload +from typing import Tuple from typing import Type from typing import TYPE_CHECKING from typing import TypeVar @@ -579,7 +580,7 @@ class InspectionAttr: """ - __slots__ = () + __slots__: Tuple[str, ...] = () is_selectable = False """Return True if this object is an instance of diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py index 8ab831002ab..39dd5401128 100644 --- a/lib/sqlalchemy/orm/exc.py +++ b/lib/sqlalchemy/orm/exc.py @@ -16,6 +16,7 @@ from typing import TYPE_CHECKING from typing import TypeVar +from .util import _mapper_property_as_plain_name from .. import exc as sa_exc from .. import util from ..exc import MultipleResultsFound # noqa @@ -191,8 +192,8 @@ def __init__( % ( util.clsname_as_plain_name(actual_strategy_type), requesting_property, - util.clsname_as_plain_name(applied_to_property_type), - util.clsname_as_plain_name(applies_to), + _mapper_property_as_plain_name(applied_to_property_type), + _mapper_property_as_plain_name(applies_to), ), ) diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 2f090588fe6..36336e7a2c2 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -149,13 +149,17 @@ class ORMColumnDescription(TypedDict): class _IntrospectsAnnotations: __slots__ = () + @classmethod + def _mapper_property_name(cls) -> str: + return cls.__name__ + def found_in_pep593_annotated(self) -> Any: """return a copy of this object to use in declarative when the object is found inside of an Annotated object.""" raise NotImplementedError( - f"Use of the {self.__class__} construct inside of an " - f"Annotated object is not yet supported." + f"Use of the {self._mapper_property_name()!r} " + "construct inside of an Annotated object is not yet supported." ) def declarative_scan( @@ -181,7 +185,8 @@ def _raise_for_required(self, key: str, cls: Type[Any]) -> NoReturn: raise sa_exc.ArgumentError( f"Python typing annotation is required for attribute " f'"{cls.__name__}.{key}" when primary argument(s) for ' - f'"{self.__class__.__name__}" construct are None or not present' + f'"{self._mapper_property_name()}" ' + "construct are None or not present" ) diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index afff24c8ccc..b5e33ffdbb9 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -1755,19 +1755,17 @@ def declarative_scan( argument = extracted_mapped_annotation assert originating_module is not None - is_write_only = mapped_container is not None and issubclass( - mapped_container, WriteOnlyMapped - ) - if is_write_only: - self.lazy = "write_only" - self.strategy_key = (("lazy", self.lazy),) - - is_dynamic = mapped_container is not None and issubclass( - mapped_container, DynamicMapped - ) - if is_dynamic: - self.lazy = "dynamic" - self.strategy_key = (("lazy", self.lazy),) + if mapped_container is not None: + is_write_only = issubclass(mapped_container, WriteOnlyMapped) + is_dynamic = issubclass(mapped_container, DynamicMapped) + if is_write_only: + self.lazy = "write_only" + self.strategy_key = (("lazy", self.lazy),) + elif is_dynamic: + self.lazy = "dynamic" + self.strategy_key = (("lazy", self.lazy),) + else: + is_write_only = is_dynamic = False argument = de_optionalize_union_types(argument) @@ -3465,11 +3463,9 @@ def __call__(self, c: ClauseElement) -> bool: _remote_col_exclude = _ColInAnnotations("remote", "should_not_adapt") -class Relationship( # type: ignore +class Relationship( RelationshipProperty[_T], _DeclarativeMapped[_T], - WriteOnlyMapped[_T], # not compatible with Mapped[_T] - DynamicMapped[_T], # not compatible with Mapped[_T] ): """Describes an object property that holds a single item or list of items that correspond to a related database table. @@ -3487,3 +3483,18 @@ class Relationship( # type: ignore inherit_cache = True """:meta private:""" + + +class _RelationshipDeclared( # type: ignore[misc] + Relationship[_T], + WriteOnlyMapped[_T], # not compatible with Mapped[_T] + DynamicMapped[_T], # not compatible with Mapped[_T] +): + """Relationship subclass used implicitly for declarative mapping.""" + + inherit_cache = True + """:meta private:""" + + @classmethod + def _mapper_property_name(cls) -> str: + return "Relationship" diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index f8431386e4e..8e153e63dbd 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -2406,3 +2406,11 @@ def _extract_mapped_subtype( ) return annotated.__args__[0], annotated.__origin__ + + +def _mapper_property_as_plain_name(prop: Type[Any]) -> str: + if hasattr(prop, "_mapper_property_name"): + name = prop._mapper_property_name() + else: + name = None + return util.clsname_as_plain_name(prop, name) diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 396a039771d..4390ae18352 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -174,10 +174,11 @@ def string_or_unprintable(element: Any) -> str: return "unprintable element %r" % element -def clsname_as_plain_name(cls: Type[Any]) -> str: - return " ".join( - n.lower() for n in re.findall(r"([A-Z][a-z]+|SQL)", cls.__name__) - ) +def clsname_as_plain_name( + cls: Type[Any], use_name: Optional[str] = None +) -> str: + name = use_name or cls.__name__ + return " ".join(n.lower() for n in re.findall(r"([A-Z][a-z]+|SQL)", name)) def method_is_overridden( diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 25e77811339..1a045ec1bfb 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -68,14 +68,18 @@ from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import MappedAsDataclass +from sqlalchemy.orm import Relationship from sqlalchemy.orm import relationship from sqlalchemy.orm import remote from sqlalchemy.orm import Session from sqlalchemy.orm import undefer from sqlalchemy.orm import WriteOnlyMapped +from sqlalchemy.orm.attributes import CollectionAttributeImpl from sqlalchemy.orm.collections import attribute_keyed_dict from sqlalchemy.orm.collections import KeyFuncDict +from sqlalchemy.orm.dynamic import DynamicAttributeImpl from sqlalchemy.orm.properties import MappedColumn +from sqlalchemy.orm.writeonly import WriteOnlyAttributeImpl from sqlalchemy.schema import CreateTable from sqlalchemy.sql.base import _NoArg from sqlalchemy.sql.sqltypes import Enum @@ -1185,8 +1189,7 @@ class SomeRelated(decl_base): with expect_raises_message( NotImplementedError, - r"Use of the \ construct inside of an Annotated " + r"Use of the 'Relationship' construct inside of an Annotated " r"object is not yet supported.", ): @@ -2491,6 +2494,42 @@ class Base(DeclarativeBase): yield Base Base.registry.dispose() + @testing.combinations( + (Relationship, CollectionAttributeImpl), + (Mapped, CollectionAttributeImpl), + (WriteOnlyMapped, WriteOnlyAttributeImpl), + (DynamicMapped, DynamicAttributeImpl), + argnames="mapped_cls,implcls", + ) + def test_use_relationship(self, decl_base, mapped_cls, implcls): + """test #10611""" + + global B + + class B(decl_base): + __tablename__ = "b" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) + + class A(decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True) + + # for future annotations support, need to write these + # directly in source code + if mapped_cls is Relationship: + bs: Relationship[List[B]] = relationship() + elif mapped_cls is Mapped: + bs: Mapped[List[B]] = relationship() + elif mapped_cls is WriteOnlyMapped: + bs: WriteOnlyMapped[List[B]] = relationship() + elif mapped_cls is DynamicMapped: + bs: DynamicMapped[List[B]] = relationship() + + decl_base.registry.configure() + assert isinstance(A.bs.impl, implcls) + def test_no_typing_in_rhs(self, decl_base): class A(decl_base): __tablename__ = "a" diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 4afa33c7316..175da290239 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -59,14 +59,18 @@ from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import MappedAsDataclass +from sqlalchemy.orm import Relationship from sqlalchemy.orm import relationship from sqlalchemy.orm import remote from sqlalchemy.orm import Session from sqlalchemy.orm import undefer from sqlalchemy.orm import WriteOnlyMapped +from sqlalchemy.orm.attributes import CollectionAttributeImpl from sqlalchemy.orm.collections import attribute_keyed_dict from sqlalchemy.orm.collections import KeyFuncDict +from sqlalchemy.orm.dynamic import DynamicAttributeImpl from sqlalchemy.orm.properties import MappedColumn +from sqlalchemy.orm.writeonly import WriteOnlyAttributeImpl from sqlalchemy.schema import CreateTable from sqlalchemy.sql.base import _NoArg from sqlalchemy.sql.sqltypes import Enum @@ -1176,8 +1180,7 @@ class SomeRelated(decl_base): with expect_raises_message( NotImplementedError, - r"Use of the \ construct inside of an Annotated " + r"Use of the 'Relationship' construct inside of an Annotated " r"object is not yet supported.", ): @@ -2482,6 +2485,42 @@ class Base(DeclarativeBase): yield Base Base.registry.dispose() + @testing.combinations( + (Relationship, CollectionAttributeImpl), + (Mapped, CollectionAttributeImpl), + (WriteOnlyMapped, WriteOnlyAttributeImpl), + (DynamicMapped, DynamicAttributeImpl), + argnames="mapped_cls,implcls", + ) + def test_use_relationship(self, decl_base, mapped_cls, implcls): + """test #10611""" + + # anno only: global B + + class B(decl_base): + __tablename__ = "b" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) + + class A(decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True) + + # for future annotations support, need to write these + # directly in source code + if mapped_cls is Relationship: + bs: Relationship[List[B]] = relationship() + elif mapped_cls is Mapped: + bs: Mapped[List[B]] = relationship() + elif mapped_cls is WriteOnlyMapped: + bs: WriteOnlyMapped[List[B]] = relationship() + elif mapped_cls is DynamicMapped: + bs: DynamicMapped[List[B]] = relationship() + + decl_base.registry.configure() + assert isinstance(A.bs.impl, implcls) + def test_no_typing_in_rhs(self, decl_base): class A(decl_base): __tablename__ = "a" diff --git a/test/typing/plain_files/orm/relationship.py b/test/typing/plain_files/orm/relationship.py index 6bfe19cc4e8..5caf57de7bd 100644 --- a/test/typing/plain_files/orm/relationship.py +++ b/test/typing/plain_files/orm/relationship.py @@ -21,6 +21,7 @@ from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import registry +from sqlalchemy.orm import Relationship from sqlalchemy.orm import relationship from sqlalchemy.orm import Session @@ -29,11 +30,22 @@ class Base(DeclarativeBase): pass +class Group(Base): + __tablename__ = "group" + + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] = mapped_column() + + addresses_style_one_anno_only: Mapped[List["User"]] + addresses_style_two_anno_only: Mapped[Set["User"]] + + class User(Base): __tablename__ = "user" id: Mapped[int] = mapped_column(primary_key=True) name: Mapped[str] = mapped_column() + group_id = mapped_column(ForeignKey("group.id")) # this currently doesnt generate an error. not sure how to get the # overloads to hit this one, nor am i sure i really want to do that @@ -58,6 +70,19 @@ class Address(Base): user_style_one: Mapped[User] = relationship() user_style_two: Mapped["User"] = relationship() + rel_style_one: Relationship[List["MoreMail"]] = relationship() + # everything works even if using Relationship instead of Mapped + # users should use Mapped though + rel_style_one_anno_only: Relationship[Set["MoreMail"]] + + +class MoreMail(Base): + __tablename__ = "address" + + id = mapped_column(Integer, primary_key=True) + aggress_id = mapped_column(ForeignKey("address.id")) + email: Mapped[str] + class SelfReferential(Base): """test for #9150""" @@ -100,6 +125,18 @@ class SelfReferential(Base): # EXPECTED_RE_TYPE: sqlalchemy.orm.attributes.InstrumentedAttribute\[builtins.set\*?\[relationship.Address\]\] reveal_type(User.addresses_style_two) + # EXPECTED_RE_TYPE: sqlalchemy.*.InstrumentedAttribute\[builtins.list\*?\[relationship.User\]\] + reveal_type(Group.addresses_style_one_anno_only) + + # EXPECTED_RE_TYPE: sqlalchemy.orm.attributes.InstrumentedAttribute\[builtins.set\*?\[relationship.User\]\] + reveal_type(Group.addresses_style_two_anno_only) + + # EXPECTED_RE_TYPE: sqlalchemy.*.InstrumentedAttribute\[builtins.list\*?\[relationship.MoreMail\]\] + reveal_type(Address.rel_style_one) + + # EXPECTED_RE_TYPE: sqlalchemy.*.InstrumentedAttribute\[builtins.set\*?\[relationship.MoreMail\]\] + reveal_type(Address.rel_style_one_anno_only) + mapper_registry: registry = registry() From ed9a4d9b1ed48a7d91dd2dafb31b97997a39d004 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 20 Mar 2024 22:18:35 +0100 Subject: [PATCH 161/544] A scalar subquery that returns bool is now correctly typed Fixes: #10937 Change-Id: Iba4986be14fefd4210b727ddb7ae7e9291ab7f7f (cherry picked from commit bf7289f9d4218275d32ce7cfcb24a8da3475d95d) --- lib/sqlalchemy/sql/_typing.py | 1 + test/typing/plain_files/orm/orm_querying.py | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index ba5faffd4d6..c861bae6e0f 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -107,6 +107,7 @@ def dialect(self) -> Dialect: ... "_NOT_ENTITY", int, str, + bool, "datetime", "date", "time", diff --git a/test/typing/plain_files/orm/orm_querying.py b/test/typing/plain_files/orm/orm_querying.py index 3251147dd68..83e0fefabbc 100644 --- a/test/typing/plain_files/orm/orm_querying.py +++ b/test/typing/plain_files/orm/orm_querying.py @@ -3,6 +3,7 @@ from sqlalchemy import ColumnElement from sqlalchemy import ForeignKey from sqlalchemy import orm +from sqlalchemy import ScalarSelect from sqlalchemy import select from sqlalchemy.orm import aliased from sqlalchemy.orm import DeclarativeBase @@ -134,3 +135,12 @@ def where_criteria(cls_: type[A]) -> ColumnElement[bool]: orm.with_loader_criteria(A, lambda cls: cls.data == "some data") orm.with_loader_criteria(A, where_criteria) + + +def test_10937() -> None: + stmt: ScalarSelect[bool] = select(A.id == B.id).scalar_subquery() + stmt1: ScalarSelect[bool] = select(A.id > 0).scalar_subquery() + stmt2: ScalarSelect[int] = select(A.id + 2).scalar_subquery() + stmt3: ScalarSelect[str] = select(A.data + B.data).scalar_subquery() + + select(stmt, stmt2, stmt3, stmt1) From 8b1bc05d444351c53daa42e827848fbecfb50e7e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 21 Mar 2024 02:13:22 -0400 Subject: [PATCH 162/544] ensure ARRAY.__init__ documents before Comparator also add note for zero_indexes to generic and PG ARRAY types References: https://github.com/sqlalchemy/sqlalchemy/discussions/11100 Change-Id: I2087da695787a930f325cfb2fa4156d19c8e8f31 (cherry picked from commit 7c70ab8c6b7b9ce1c566862c4ca0438e0b0e9131) --- doc/build/core/type_basics.rst | 4 +- doc/build/dialects/postgresql.rst | 2 +- lib/sqlalchemy/dialects/postgresql/array.py | 75 ++++++------- lib/sqlalchemy/sql/sqltypes.py | 110 ++++++++++---------- 4 files changed, 97 insertions(+), 94 deletions(-) diff --git a/doc/build/core/type_basics.rst b/doc/build/core/type_basics.rst index a8bb0f84afb..f3817fe0c99 100644 --- a/doc/build/core/type_basics.rst +++ b/doc/build/core/type_basics.rst @@ -259,7 +259,9 @@ its exact name in DDL with ``CREATE TABLE`` is issued. .. autoclass:: ARRAY - :members: + :members: __init__, Comparator + :member-order: bysource + .. autoclass:: BIGINT diff --git a/doc/build/dialects/postgresql.rst b/doc/build/dialects/postgresql.rst index e822d069ce6..2d377e3623e 100644 --- a/doc/build/dialects/postgresql.rst +++ b/doc/build/dialects/postgresql.rst @@ -458,7 +458,7 @@ construction arguments, are as follows: .. autoclass:: ARRAY :members: __init__, Comparator - + :member-order: bysource .. autoclass:: BIT diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index e88c27d2de7..1d63655ee05 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -183,8 +183,9 @@ class also mytable.c.data.contains([1, 2]) - The :class:`_postgresql.ARRAY` type may not be supported on all - PostgreSQL DBAPIs; it is currently known to work on psycopg2 only. + Indexed access is one-based by default, to match that of PostgreSQL; + for zero-based indexed access, set + :paramref:`_postgresql.ARRAY.zero_indexes`. Additionally, the :class:`_postgresql.ARRAY` type does not work directly in @@ -224,41 +225,6 @@ class SomeOrmClass(Base): """ - class Comparator(sqltypes.ARRAY.Comparator): - """Define comparison operations for :class:`_types.ARRAY`. - - Note that these operations are in addition to those provided - by the base :class:`.types.ARRAY.Comparator` class, including - :meth:`.types.ARRAY.Comparator.any` and - :meth:`.types.ARRAY.Comparator.all`. - - """ - - def contains(self, other, **kwargs): - """Boolean expression. Test if elements are a superset of the - elements of the argument array expression. - - kwargs may be ignored by this operator but are required for API - conformance. - """ - return self.operate(CONTAINS, other, result_type=sqltypes.Boolean) - - def contained_by(self, other): - """Boolean expression. Test if elements are a proper subset of the - elements of the argument array expression. - """ - return self.operate( - CONTAINED_BY, other, result_type=sqltypes.Boolean - ) - - def overlap(self, other): - """Boolean expression. Test if array has elements in common with - an argument array expression. - """ - return self.operate(OVERLAP, other, result_type=sqltypes.Boolean) - - comparator_factory = Comparator - def __init__( self, item_type: _TypeEngineArgument[Any], @@ -310,6 +276,41 @@ def __init__( self.dimensions = dimensions self.zero_indexes = zero_indexes + class Comparator(sqltypes.ARRAY.Comparator): + """Define comparison operations for :class:`_types.ARRAY`. + + Note that these operations are in addition to those provided + by the base :class:`.types.ARRAY.Comparator` class, including + :meth:`.types.ARRAY.Comparator.any` and + :meth:`.types.ARRAY.Comparator.all`. + + """ + + def contains(self, other, **kwargs): + """Boolean expression. Test if elements are a superset of the + elements of the argument array expression. + + kwargs may be ignored by this operator but are required for API + conformance. + """ + return self.operate(CONTAINS, other, result_type=sqltypes.Boolean) + + def contained_by(self, other): + """Boolean expression. Test if elements are a proper subset of the + elements of the argument array expression. + """ + return self.operate( + CONTAINED_BY, other, result_type=sqltypes.Boolean + ) + + def overlap(self, other): + """Boolean expression. Test if array has elements in common with + an argument array expression. + """ + return self.operate(OVERLAP, other, result_type=sqltypes.Boolean) + + comparator_factory = Comparator + @property def hashable(self): return self.as_tuple diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index cdfd0a7c8ad..1af3c5e339f 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -2766,23 +2766,23 @@ class ARRAY( dimension parameter will generally assume single-dimensional behaviors. SQL expressions of type :class:`_types.ARRAY` have support for "index" and - "slice" behavior. The Python ``[]`` operator works normally here, given - integer indexes or slices. Arrays default to 1-based indexing. - The operator produces binary expression + "slice" behavior. The ``[]`` operator produces expression constructs which will produce the appropriate SQL, both for SELECT statements:: select(mytable.c.data[5], mytable.c.data[2:7]) as well as UPDATE statements when the :meth:`_expression.Update.values` - method - is used:: + method is used:: mytable.update().values({ mytable.c.data[5]: 7, mytable.c.data[2:7]: [1, 2, 3] }) + Indexed access is one-based by default; + for zero-based index conversion, set :paramref:`_types.ARRAY.zero_indexes`. + The :class:`_types.ARRAY` type also provides for the operators :meth:`.types.ARRAY.Comparator.any` and :meth:`.types.ARRAY.Comparator.all`. The PostgreSQL-specific version of @@ -2827,6 +2827,56 @@ class SomeOrmClass(Base): """If True, Python zero-based indexes should be interpreted as one-based on the SQL expression side.""" + def __init__( + self, + item_type: _TypeEngineArgument[Any], + as_tuple: bool = False, + dimensions: Optional[int] = None, + zero_indexes: bool = False, + ): + """Construct an :class:`_types.ARRAY`. + + E.g.:: + + Column('myarray', ARRAY(Integer)) + + Arguments are: + + :param item_type: The data type of items of this array. Note that + dimensionality is irrelevant here, so multi-dimensional arrays like + ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as + ``ARRAY(ARRAY(Integer))`` or such. + + :param as_tuple=False: Specify whether return results + should be converted to tuples from lists. This parameter is + not generally needed as a Python list corresponds well + to a SQL array. + + :param dimensions: if non-None, the ARRAY will assume a fixed + number of dimensions. This impacts how the array is declared + on the database, how it goes about interpreting Python and + result values, as well as how expression behavior in conjunction + with the "getitem" operator works. See the description at + :class:`_types.ARRAY` for additional detail. + + :param zero_indexes=False: when True, index values will be converted + between Python zero-based and SQL one-based indexes, e.g. + a value of one will be added to all index values before passing + to the database. + + """ + if isinstance(item_type, ARRAY): + raise ValueError( + "Do not nest ARRAY types; ARRAY(basetype) " + "handles multi-dimensional arrays of basetype" + ) + if isinstance(item_type, type): + item_type = item_type() + self.item_type = item_type + self.as_tuple = as_tuple + self.dimensions = dimensions + self.zero_indexes = zero_indexes + class Comparator( Indexable.Comparator[Sequence[Any]], Concatenable.Comparator[Sequence[Any]], @@ -2981,56 +3031,6 @@ def all(self, other, operator=None): comparator_factory = Comparator - def __init__( - self, - item_type: _TypeEngineArgument[Any], - as_tuple: bool = False, - dimensions: Optional[int] = None, - zero_indexes: bool = False, - ): - """Construct an :class:`_types.ARRAY`. - - E.g.:: - - Column('myarray', ARRAY(Integer)) - - Arguments are: - - :param item_type: The data type of items of this array. Note that - dimensionality is irrelevant here, so multi-dimensional arrays like - ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as - ``ARRAY(ARRAY(Integer))`` or such. - - :param as_tuple=False: Specify whether return results - should be converted to tuples from lists. This parameter is - not generally needed as a Python list corresponds well - to a SQL array. - - :param dimensions: if non-None, the ARRAY will assume a fixed - number of dimensions. This impacts how the array is declared - on the database, how it goes about interpreting Python and - result values, as well as how expression behavior in conjunction - with the "getitem" operator works. See the description at - :class:`_types.ARRAY` for additional detail. - - :param zero_indexes=False: when True, index values will be converted - between Python zero-based and SQL one-based indexes, e.g. - a value of one will be added to all index values before passing - to the database. - - """ - if isinstance(item_type, ARRAY): - raise ValueError( - "Do not nest ARRAY types; ARRAY(basetype) " - "handles multi-dimensional arrays of basetype" - ) - if isinstance(item_type, type): - item_type = item_type() - self.item_type = item_type - self.as_tuple = as_tuple - self.dimensions = dimensions - self.zero_indexes = zero_indexes - @property def hashable(self): return self.as_tuple From 436cb7221ffca29fd7764a022b8f7b35abd272bd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 19 Mar 2024 13:35:35 -0400 Subject: [PATCH 163/544] subqueryload invokes compile() on _OverrideBinds - do robust replace of bp Fixed regression from version 2.0.28 caused by the fix for :ticket:`11085` where the newer method of adjusting post-cache bound parameter values would interefere with the implementation for the :func:`_orm.subqueryload` loader option, which has some more legacy patterns in use internally, when the additional loader criteria feature were used with this loader option. Fixes: #11173 Change-Id: I88982fbcc809d516eb7c46a00fb807aab9c3a98e (cherry picked from commit b6f63a57ed878c1e157ecf86cb35d8b15cd7ea3b) --- doc/build/changelog/unreleased_20/11173.rst | 9 +++ lib/sqlalchemy/sql/compiler.py | 33 +++++++-- test/orm/test_relationship_criteria.py | 49 +++++++++++++ test/orm/test_subquery_relations.py | 78 +++++++++++++++++++++ 4 files changed, 164 insertions(+), 5 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11173.rst diff --git a/doc/build/changelog/unreleased_20/11173.rst b/doc/build/changelog/unreleased_20/11173.rst new file mode 100644 index 00000000000..900c6149d25 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11173.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm, regression + :tickets: 11173 + + Fixed regression from version 2.0.28 caused by the fix for :ticket:`11085` + where the newer method of adjusting post-cache bound parameter values would + interefere with the implementation for the :func:`_orm.subqueryload` loader + option, which has some more legacy patterns in use internally, when + the additional loader criteria feature were used with this loader option. diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 813d3fa0a05..c354ba83864 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -2360,17 +2360,18 @@ def visit_override_binds(self, override_binds, **kw): the compilation was already performed, and only the bound params should be swapped in at execution time. - However, the test suite has some tests that exercise compilation - on individual elements without using the cache key version, so here we - modify the bound parameter collection for the given compiler based on - the translation. + However, there are test cases that exericise this object, and + additionally the ORM subquery loader is known to feed in expressions + which include this construct into new queries (discovered in #11173), + so it has to do the right thing at compile time as well. """ # get SQL text first sqltext = override_binds.element._compiler_dispatch(self, **kw) - # then change binds after the fact. note that we don't try to + # for a test compile that is not for caching, change binds after the + # fact. note that we don't try to # swap the bindparam as we compile, because our element may be # elsewhere in the statement already (e.g. a subquery or perhaps a # CTE) and was already visited / compiled. See @@ -2381,14 +2382,36 @@ def visit_override_binds(self, override_binds, **kw): continue bp = self.binds[k] + # so this would work, just change the value of bp in place. + # but we dont want to mutate things outside. + # bp.value = override_binds.translate[bp.key] + # continue + + # instead, need to replace bp with new_bp or otherwise accommodate + # in all internal collections new_bp = bp._with_value( override_binds.translate[bp.key], maintain_key=True, required=False, ) + name = self.bind_names[bp] self.binds[k] = self.binds[name] = new_bp self.bind_names[new_bp] = name + self.bind_names.pop(bp, None) + + if bp in self.post_compile_params: + self.post_compile_params |= {new_bp} + if bp in self.literal_execute_params: + self.literal_execute_params |= {new_bp} + + ckbm_tuple = self._cache_key_bind_match + if ckbm_tuple: + ckbm, cksm = ckbm_tuple + for bp in bp._cloned_set: + if bp.key in cksm: + cb = cksm[bp.key] + ckbm[cb].append(new_bp) return sqltext diff --git a/test/orm/test_relationship_criteria.py b/test/orm/test_relationship_criteria.py index 4add92c1e72..96c178e5e22 100644 --- a/test/orm/test_relationship_criteria.py +++ b/test/orm/test_relationship_criteria.py @@ -2068,6 +2068,55 @@ def go(value): ), ) + @testing.combinations( + (selectinload,), + (subqueryload,), + (lazyload,), + (joinedload,), + argnames="opt", + ) + @testing.variation("use_in", [True, False]) + def test_opts_local_criteria_cachekey( + self, opt, user_address_fixture, use_in + ): + """test #11173""" + User, Address = user_address_fixture + + s = Session(testing.db, future=True) + + def go(value): + if use_in: + expr = ~Address.email_address.in_([value, "some_email"]) + else: + expr = Address.email_address != value + stmt = ( + select(User) + .options( + opt(User.addresses.and_(expr)), + ) + .order_by(User.id) + ) + result = s.execute(stmt) + return result + + for value in ( + "ed@wood.com", + "ed@lala.com", + "ed@wood.com", + "ed@lala.com", + ): + s.close() + result = go(value) + + eq_( + result.scalars().unique().all(), + ( + self._user_minus_edwood(*user_address_fixture) + if value == "ed@wood.com" + else self._user_minus_edlala(*user_address_fixture) + ), + ) + @testing.combinations( (joinedload, False), (lazyload, True), diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py index 00564cfb656..538c77c0cee 100644 --- a/test/orm/test_subquery_relations.py +++ b/test/orm/test_subquery_relations.py @@ -3759,3 +3759,81 @@ def test_issue_6419(self): ), ) s.close() + + +class Issue11173Test(fixtures.DeclarativeMappedTest): + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class SubItem(Base): + __tablename__ = "sub_items" + + id = Column(Integer, primary_key=True, autoincrement=True) + item_id = Column(Integer, ForeignKey("items.id")) + name = Column(String(50)) + number = Column(Integer) + + class Item(Base): + __tablename__ = "items" + + id = Column(Integer, primary_key=True, autoincrement=True) + name = Column(String(50)) + number = Column(Integer) + sub_items = relationship("SubItem", backref="item") + + @classmethod + def insert_data(cls, connection): + Item, SubItem = cls.classes("Item", "SubItem") + + with Session(connection) as sess: + number_of_items = 50 + number_of_sub_items = 5 + + items = [ + Item(name=f"Item:{i}", number=i) + for i in range(number_of_items) + ] + sess.add_all(items) + for item in items: + item.sub_items = [ + SubItem(name=f"SubItem:{item.id}:{i}", number=i) + for i in range(number_of_sub_items) + ] + sess.commit() + + @testing.variation("use_in", [True, False]) + def test_multiple_queries(self, use_in): + Item, SubItem = self.classes("Item", "SubItem") + + for sub_item_number in (1, 2, 3): + s = fixture_session() + base_query = s.query(Item) + + base_query = base_query.filter(Item.number > 5, Item.number <= 10) + + if use_in: + base_query = base_query.options( + subqueryload( + Item.sub_items.and_( + SubItem.number.in_([sub_item_number, 18, 12]) + ) + ) + ) + else: + base_query = base_query.options( + subqueryload( + Item.sub_items.and_(SubItem.number == sub_item_number) + ) + ) + + items = list(base_query) + + eq_(len(items), 5) + + for item in items: + sub_items = list(item.sub_items) + eq_(len(sub_items), 1) + + for sub_item in sub_items: + eq_(sub_item.number, sub_item_number) From c7278af03a7565eddc3b197508a3b45bdc79d0c2 Mon Sep 17 00:00:00 2001 From: Thomas Stephenson Date: Wed, 21 Feb 2024 15:17:01 -0500 Subject: [PATCH 164/544] Add pg DOMAIN type reflection The PostgreSQL dialect now returns :class:`_postgresql.DOMAIN` instances when reflecting a column that has a domain as type. Previously the domain data type was returned instead. As part of this change, the domain reflection was improved to also return the collation of the text types. Fixes: #10693 Closes: #10729 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/10729 Pull-request-sha: adac164d191138265ecd64a28be91254a53a9c25 Change-Id: I8730840de2e7e9649067191430eefa086bcf5e7b (cherry picked from commit 0b6a54811d9cf4943ba2ae4b5a0eaa718b1e848e) --- doc/build/changelog/unreleased_20/10693.rst | 9 + lib/sqlalchemy/dialects/postgresql/base.py | 311 ++++++++++-------- .../dialects/postgresql/named_types.py | 20 +- .../dialects/postgresql/pg_catalog.py | 30 +- test/dialect/postgresql/test_reflection.py | 122 ++++++- test/dialect/postgresql/test_types.py | 214 +++++++++++- 6 files changed, 540 insertions(+), 166 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10693.rst diff --git a/doc/build/changelog/unreleased_20/10693.rst b/doc/build/changelog/unreleased_20/10693.rst new file mode 100644 index 00000000000..c5044b9aa9f --- /dev/null +++ b/doc/build/changelog/unreleased_20/10693.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: postgresql, reflection + :tickets: 10693 + + The PostgreSQL dialect now returns :class:`_postgresql.DOMAIN` instances + when reflecting a column that has a domain as type. Previously, the domain + data type was returned instead. As part of this change, the domain + reflection was improved to also return the collation of the text types. + Pull request courtesy of Thomas Stephenson. diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index ebb7d546db0..4ab3ca24d16 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -2776,6 +2776,8 @@ class ReflectedDomain(ReflectedNamedType): """The constraints defined in the domain, if any. The constraint are in order of evaluation by postgresql. """ + collation: Optional[str] + """The collation for the domain.""" class ReflectedEnum(ReflectedNamedType): @@ -3707,20 +3709,156 @@ def get_multi_columns( return columns.items() - def _get_columns_info(self, rows, domains, enums, schema): - array_type_pattern = re.compile(r"\[\]$") - attype_pattern = re.compile(r"\(.*\)") - charlen_pattern = re.compile(r"\(([\d,]+)\)") - args_pattern = re.compile(r"\((.*)\)") - args_split_pattern = re.compile(r"\s*,\s*") - - def _handle_array_type(attype): - return ( - # strip '[]' from integer[], etc. - array_type_pattern.sub("", attype), - attype.endswith("[]"), + _format_type_args_pattern = re.compile(r"\((.*)\)") + _format_type_args_delim = re.compile(r"\s*,\s*") + _format_array_spec_pattern = re.compile(r"((?:\[\])*)$") + + def _reflect_type( + self, + format_type: Optional[str], + domains: dict[str, ReflectedDomain], + enums: dict[str, ReflectedEnum], + type_description: str, + ) -> sqltypes.TypeEngine[Any]: + """ + Attempts to reconstruct a column type defined in ischema_names based + on the information available in the format_type. + + If the `format_type` cannot be associated with a known `ischema_names`, + it is treated as a reference to a known PostgreSQL named `ENUM` or + `DOMAIN` type. + """ + type_description = type_description or "unknown type" + if format_type is None: + util.warn( + "PostgreSQL format_type() returned NULL for %s" + % type_description + ) + return sqltypes.NULLTYPE + + attype_args_match = self._format_type_args_pattern.search(format_type) + if attype_args_match and attype_args_match.group(1): + attype_args = self._format_type_args_delim.split( + attype_args_match.group(1) ) + else: + attype_args = () + + match_array_dim = self._format_array_spec_pattern.search(format_type) + # Each "[]" in array specs corresponds to an array dimension + array_dim = len(match_array_dim.group(1) or "") // 2 + + # Remove all parameters and array specs from format_type to obtain an + # ischema_name candidate + attype = self._format_type_args_pattern.sub("", format_type) + attype = self._format_array_spec_pattern.sub("", attype) + + schema_type = self.ischema_names.get(attype.lower(), None) + args, kwargs = (), {} + + if attype == "numeric": + if len(attype_args) == 2: + precision, scale = map(int, attype_args) + args = (precision, scale) + + elif attype == "double precision": + args = (53,) + + elif attype == "integer": + args = () + + elif attype in ("timestamp with time zone", "time with time zone"): + kwargs["timezone"] = True + if len(attype_args) == 1: + kwargs["precision"] = int(attype_args[0]) + elif attype in ( + "timestamp without time zone", + "time without time zone", + "time", + ): + kwargs["timezone"] = False + if len(attype_args) == 1: + kwargs["precision"] = int(attype_args[0]) + + elif attype == "bit varying": + kwargs["varying"] = True + if len(attype_args) == 1: + charlen = int(attype_args[0]) + args = (charlen,) + + elif attype.startswith("interval"): + schema_type = INTERVAL + + field_match = re.match(r"interval (.+)", attype) + if field_match: + kwargs["fields"] = field_match.group(1) + + if len(attype_args) == 1: + kwargs["precision"] = int(attype_args[0]) + + else: + enum_or_domain_key = tuple(util.quoted_token_parser(attype)) + + if enum_or_domain_key in enums: + schema_type = ENUM + enum = enums[enum_or_domain_key] + + args = tuple(enum["labels"]) + kwargs["name"] = enum["name"] + + if not enum["visible"]: + kwargs["schema"] = enum["schema"] + args = tuple(enum["labels"]) + elif enum_or_domain_key in domains: + schema_type = DOMAIN + domain = domains[enum_or_domain_key] + + data_type = self._reflect_type( + domain["type"], + domains, + enums, + type_description="DOMAIN '%s'" % domain["name"], + ) + args = (domain["name"], data_type) + + kwargs["collation"] = domain["collation"] + kwargs["default"] = domain["default"] + kwargs["not_null"] = not domain["nullable"] + kwargs["create_type"] = False + + if domain["constraints"]: + # We only support a single constraint + check_constraint = domain["constraints"][0] + + kwargs["constraint_name"] = check_constraint["name"] + kwargs["check"] = check_constraint["check"] + + if not domain["visible"]: + kwargs["schema"] = domain["schema"] + + else: + try: + charlen = int(attype_args[0]) + args = (charlen, *attype_args[1:]) + except (ValueError, IndexError): + args = attype_args + + if not schema_type: + util.warn( + "Did not recognize type '%s' of %s" + % (attype, type_description) + ) + return sqltypes.NULLTYPE + + data_type = schema_type(*args, **kwargs) + if array_dim >= 1: + # postgres does not preserve dimensionality or size of array types. + data_type = _array.ARRAY(data_type) + + return data_type + + def _get_columns_info(self, rows, domains, enums, schema): columns = defaultdict(list) for row_dict in rows: # ensure that each table has an entry, even if it has no columns @@ -3731,131 +3869,28 @@ def _handle_array_type(attype): continue table_cols = columns[(schema, row_dict["table_name"])] - format_type = row_dict["format_type"] + coltype = self._reflect_type( + row_dict["format_type"], + domains, + enums, + type_description="column '%s'" % row_dict["name"], + ) + default = row_dict["default"] name = row_dict["name"] generated = row_dict["generated"] - identity = row_dict["identity_options"] - - if format_type is None: - no_format_type = True - attype = format_type = "no format_type()" - is_array = False - else: - no_format_type = False - - # strip (*) from character varying(5), timestamp(5) - # with time zone, geometry(POLYGON), etc. - attype = attype_pattern.sub("", format_type) - - # strip '[]' from integer[], etc. and check if an array - attype, is_array = _handle_array_type(attype) - - # strip quotes from case sensitive enum or domain names - enum_or_domain_key = tuple(util.quoted_token_parser(attype)) - nullable = not row_dict["not_null"] - charlen = charlen_pattern.search(format_type) - if charlen: - charlen = charlen.group(1) - args = args_pattern.search(format_type) - if args and args.group(1): - args = tuple(args_split_pattern.split(args.group(1))) - else: - args = () - kwargs = {} + if isinstance(coltype, DOMAIN): + if not default: + # domain can override the default value but + # cant set it to None + if coltype.default is not None: + default = coltype.default - if attype == "numeric": - if charlen: - prec, scale = charlen.split(",") - args = (int(prec), int(scale)) - else: - args = () - elif attype == "double precision": - args = (53,) - elif attype == "integer": - args = () - elif attype in ("timestamp with time zone", "time with time zone"): - kwargs["timezone"] = True - if charlen: - kwargs["precision"] = int(charlen) - args = () - elif attype in ( - "timestamp without time zone", - "time without time zone", - "time", - ): - kwargs["timezone"] = False - if charlen: - kwargs["precision"] = int(charlen) - args = () - elif attype == "bit varying": - kwargs["varying"] = True - if charlen: - args = (int(charlen),) - else: - args = () - elif attype.startswith("interval"): - field_match = re.match(r"interval (.+)", attype, re.I) - if charlen: - kwargs["precision"] = int(charlen) - if field_match: - kwargs["fields"] = field_match.group(1) - attype = "interval" - args = () - elif charlen: - args = (int(charlen),) - - while True: - # looping here to suit nested domains - if attype in self.ischema_names: - coltype = self.ischema_names[attype] - break - elif enum_or_domain_key in enums: - enum = enums[enum_or_domain_key] - coltype = ENUM - kwargs["name"] = enum["name"] - if not enum["visible"]: - kwargs["schema"] = enum["schema"] - args = tuple(enum["labels"]) - break - elif enum_or_domain_key in domains: - domain = domains[enum_or_domain_key] - attype = domain["type"] - attype, is_array = _handle_array_type(attype) - # strip quotes from case sensitive enum or domain names - enum_or_domain_key = tuple( - util.quoted_token_parser(attype) - ) - # A table can't override a not null on the domain, - # but can override nullable - nullable = nullable and domain["nullable"] - if domain["default"] and not default: - # It can, however, override the default - # value, but can't set it to null. - default = domain["default"] - continue - else: - coltype = None - break - - if coltype: - coltype = coltype(*args, **kwargs) - if is_array: - coltype = self.ischema_names["_array"](coltype) - elif no_format_type: - util.warn( - "PostgreSQL format_type() returned NULL for column '%s'" - % (name,) - ) - coltype = sqltypes.NULLTYPE - else: - util.warn( - "Did not recognize type '%s' of column '%s'" - % (attype, name) - ) - coltype = sqltypes.NULLTYPE + nullable = nullable and not coltype.not_null + + identity = row_dict["identity_options"] # If a zero byte or blank string depending on driver (is also # absent for older PG versions), then not a generated column. @@ -4904,12 +4939,18 @@ def _domain_query(self, schema): pg_catalog.pg_namespace.c.nspname.label("schema"), con_sq.c.condefs, con_sq.c.connames, + pg_catalog.pg_collation.c.collname, ) .join( pg_catalog.pg_namespace, pg_catalog.pg_namespace.c.oid == pg_catalog.pg_type.c.typnamespace, ) + .outerjoin( + pg_catalog.pg_collation, + pg_catalog.pg_type.c.typcollation + == pg_catalog.pg_collation.c.oid, + ) .outerjoin( con_sq, pg_catalog.pg_type.c.oid == con_sq.c.contypid, @@ -4923,14 +4964,13 @@ def _domain_query(self, schema): @reflection.cache def _load_domains(self, connection, schema=None, **kw): - # Load data types for domains: result = connection.execute(self._domain_query(schema)) - domains = [] + domains: List[ReflectedDomain] = [] for domain in result.mappings(): # strip (30) from character varying(30) attype = re.search(r"([^\(]+)", domain["attype"]).group(1) - constraints = [] + constraints: List[ReflectedDomainConstraint] = [] if domain["connames"]: # When a domain has multiple CHECK constraints, they will # be tested in alphabetical order by name. @@ -4944,7 +4984,7 @@ def _load_domains(self, connection, schema=None, **kw): check = def_[7:-1] constraints.append({"name": name, "check": check}) - domain_rec = { + domain_rec: ReflectedDomain = { "name": domain["name"], "schema": domain["schema"], "visible": domain["visible"], @@ -4952,6 +4992,7 @@ def _load_domains(self, connection, schema=None, **kw): "nullable": domain["nullable"], "default": domain["default"], "constraints": constraints, + "collation": domain["collname"], } domains.append(domain_rec) diff --git a/lib/sqlalchemy/dialects/postgresql/named_types.py b/lib/sqlalchemy/dialects/postgresql/named_types.py index 56bec1dc732..16e5c867efc 100644 --- a/lib/sqlalchemy/dialects/postgresql/named_types.py +++ b/lib/sqlalchemy/dialects/postgresql/named_types.py @@ -416,10 +416,10 @@ def __init__( data_type: _TypeEngineArgument[Any], *, collation: Optional[str] = None, - default: Optional[Union[str, elements.TextClause]] = None, + default: Union[elements.TextClause, str, None] = None, constraint_name: Optional[str] = None, not_null: Optional[bool] = None, - check: Optional[str] = None, + check: Union[elements.TextClause, str, None] = None, create_type: bool = True, **kw: Any, ): @@ -463,7 +463,7 @@ def __init__( self.default = default self.collation = collation self.constraint_name = constraint_name - self.not_null = not_null + self.not_null = bool(not_null) if check is not None: check = coercions.expect(roles.DDLExpressionRole, check) self.check = check @@ -474,6 +474,20 @@ def __init__( def __test_init__(cls): return cls("name", sqltypes.Integer) + def adapt(self, impl, **kw): + if self.default: + kw["default"] = self.default + if self.constraint_name is not None: + kw["constraint_name"] = self.constraint_name + if self.not_null: + kw["not_null"] = self.not_null + if self.check is not None: + kw["check"] = str(self.check) + if self.create_type: + kw["create_type"] = self.create_type + + return super().adapt(impl, **kw) + class CreateEnumType(schema._CreateDropBase): __visit_name__ = "create_enum_type" diff --git a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py index 7b44bc93f7b..9b5562c13fc 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py +++ b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py @@ -77,7 +77,7 @@ def process(value): RELKINDS_ALL_TABLE_LIKE = RELKINDS_TABLE + RELKINDS_VIEW + RELKINDS_MAT_VIEW # tables -pg_catalog_meta = MetaData() +pg_catalog_meta = MetaData(schema="pg_catalog") pg_namespace = Table( "pg_namespace", @@ -85,7 +85,6 @@ def process(value): Column("oid", OID), Column("nspname", NAME), Column("nspowner", OID), - schema="pg_catalog", ) pg_class = Table( @@ -120,7 +119,6 @@ def process(value): Column("relispartition", Boolean, info={"server_version": (10,)}), Column("relrewrite", OID, info={"server_version": (11,)}), Column("reloptions", ARRAY(Text)), - schema="pg_catalog", ) pg_type = Table( @@ -155,7 +153,6 @@ def process(value): Column("typndims", Integer), Column("typcollation", OID, info={"server_version": (9, 1)}), Column("typdefault", Text), - schema="pg_catalog", ) pg_index = Table( @@ -182,7 +179,6 @@ def process(value): Column("indoption", INT2VECTOR), Column("indexprs", PG_NODE_TREE), Column("indpred", PG_NODE_TREE), - schema="pg_catalog", ) pg_attribute = Table( @@ -209,7 +205,6 @@ def process(value): Column("attislocal", Boolean), Column("attinhcount", Integer), Column("attcollation", OID, info={"server_version": (9, 1)}), - schema="pg_catalog", ) pg_constraint = Table( @@ -235,7 +230,6 @@ def process(value): Column("connoinherit", Boolean, info={"server_version": (9, 2)}), Column("conkey", ARRAY(SmallInteger)), Column("confkey", ARRAY(SmallInteger)), - schema="pg_catalog", ) pg_sequence = Table( @@ -249,7 +243,6 @@ def process(value): Column("seqmin", BigInteger), Column("seqcache", BigInteger), Column("seqcycle", Boolean), - schema="pg_catalog", info={"server_version": (10,)}, ) @@ -260,7 +253,6 @@ def process(value): Column("adrelid", OID), Column("adnum", SmallInteger), Column("adbin", PG_NODE_TREE), - schema="pg_catalog", ) pg_description = Table( @@ -270,7 +262,6 @@ def process(value): Column("classoid", OID), Column("objsubid", Integer), Column("description", Text(collation="C")), - schema="pg_catalog", ) pg_enum = Table( @@ -280,7 +271,6 @@ def process(value): Column("enumtypid", OID), Column("enumsortorder", Float(), info={"server_version": (9, 1)}), Column("enumlabel", NAME), - schema="pg_catalog", ) pg_am = Table( @@ -290,5 +280,21 @@ def process(value): Column("amname", NAME), Column("amhandler", REGPROC, info={"server_version": (9, 6)}), Column("amtype", CHAR, info={"server_version": (9, 6)}), - schema="pg_catalog", +) + +pg_collation = Table( + "pg_collation", + pg_catalog_meta, + Column("oid", OID, info={"server_version": (9, 3)}), + Column("collname", NAME), + Column("collnamespace", OID), + Column("collowner", OID), + Column("collprovider", CHAR, info={"server_version": (10,)}), + Column("collisdeterministic", Boolean, info={"server_version": (12,)}), + Column("collencoding", Integer), + Column("collcollate", Text), + Column("collctype", Text), + Column("colliculocale", Text), + Column("collicurules", Text, info={"server_version": (16,)}), + Column("collversion", Text, info={"server_version": (10,)}), ) diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index dd6c8aa88ee..3d29a89de7b 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -23,6 +23,7 @@ from sqlalchemy import UniqueConstraint from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.dialects.postgresql import base as postgresql +from sqlalchemy.dialects.postgresql import DOMAIN from sqlalchemy.dialects.postgresql import ExcludeConstraint from sqlalchemy.dialects.postgresql import INTEGER from sqlalchemy.dialects.postgresql import INTERVAL @@ -408,25 +409,24 @@ class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): def setup_test_class(cls): with testing.db.begin() as con: for ddl in [ - 'CREATE SCHEMA "SomeSchema"', + 'CREATE SCHEMA IF NOT EXISTS "SomeSchema"', "CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42", "CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0", "CREATE TYPE testtype AS ENUM ('test')", "CREATE DOMAIN enumdomain AS testtype", "CREATE DOMAIN arraydomain AS INTEGER[]", + "CREATE DOMAIN arraydomain_2d AS INTEGER[][]", + "CREATE DOMAIN arraydomain_3d AS INTEGER[][][]", 'CREATE DOMAIN "SomeSchema"."Quoted.Domain" INTEGER DEFAULT 0', - "CREATE DOMAIN nullable_domain AS TEXT CHECK " + 'CREATE DOMAIN nullable_domain AS TEXT COLLATE "C" CHECK ' "(VALUE IN('FOO', 'BAR'))", "CREATE DOMAIN not_nullable_domain AS TEXT NOT NULL", "CREATE DOMAIN my_int AS int CONSTRAINT b_my_int_one CHECK " "(VALUE > 1) CONSTRAINT a_my_int_two CHECK (VALUE < 42) " "CHECK(VALUE != 22)", ]: - try: - con.exec_driver_sql(ddl) - except exc.DBAPIError as e: - if "already exists" not in str(e): - raise e + con.exec_driver_sql(ddl) + con.exec_driver_sql( "CREATE TABLE testtable (question integer, answer " "testdomain)" @@ -446,7 +446,12 @@ def setup_test_class(cls): ) con.exec_driver_sql( - "CREATE TABLE array_test (id integer, data arraydomain)" + "CREATE TABLE array_test (" + "id integer, " + "datas arraydomain, " + "datass arraydomain_2d, " + "datasss arraydomain_3d" + ")" ) con.exec_driver_sql( @@ -473,6 +478,8 @@ def teardown_test_class(cls): con.exec_driver_sql("DROP TYPE testtype") con.exec_driver_sql("DROP TABLE array_test") con.exec_driver_sql("DROP DOMAIN arraydomain") + con.exec_driver_sql("DROP DOMAIN arraydomain_2d") + con.exec_driver_sql("DROP DOMAIN arraydomain_3d") con.exec_driver_sql('DROP DOMAIN "SomeSchema"."Quoted.Domain"') con.exec_driver_sql('DROP SCHEMA "SomeSchema"') @@ -489,7 +496,9 @@ def test_table_is_reflected(self, connection): {"question", "answer"}, "Columns of reflected table didn't equal expected columns", ) - assert isinstance(table.c.answer.type, Integer) + assert isinstance(table.c.answer.type, DOMAIN) + assert table.c.answer.type.name, "testdomain" + assert isinstance(table.c.answer.type.data_type, Integer) def test_nullable_from_domain(self, connection): metadata = MetaData() @@ -514,18 +523,36 @@ def test_domain_is_reflected(self, connection): def test_enum_domain_is_reflected(self, connection): metadata = MetaData() table = Table("enum_test", metadata, autoload_with=connection) - eq_(table.c.data.type.enums, ["test"]) + assert isinstance(table.c.data.type, DOMAIN) + eq_(table.c.data.type.data_type.enums, ["test"]) def test_array_domain_is_reflected(self, connection): metadata = MetaData() table = Table("array_test", metadata, autoload_with=connection) - eq_(table.c.data.type.__class__, ARRAY) - eq_(table.c.data.type.item_type.__class__, INTEGER) + + def assert_is_integer_array_domain(domain, name): + # Postgres does not persist the dimensionality of the array. + # It's always treated as integer[] + assert isinstance(domain, DOMAIN) + assert domain.name == name + assert isinstance(domain.data_type, ARRAY) + assert isinstance(domain.data_type.item_type, INTEGER) + + array_domain = table.c.datas.type + assert_is_integer_array_domain(array_domain, "arraydomain") + + array_domain_2d = table.c.datass.type + assert_is_integer_array_domain(array_domain_2d, "arraydomain_2d") + + array_domain_3d = table.c.datasss.type + assert_is_integer_array_domain(array_domain_3d, "arraydomain_3d") def test_quoted_remote_schema_domain_is_reflected(self, connection): metadata = MetaData() table = Table("quote_test", metadata, autoload_with=connection) - eq_(table.c.data.type.__class__, INTEGER) + assert isinstance(table.c.data.type, DOMAIN) + assert table.c.data.type.name, "Quoted.Domain" + assert isinstance(table.c.data.type.data_type, Integer) def test_table_is_reflected_test_schema(self, connection): metadata = MetaData() @@ -603,6 +630,27 @@ def all_domains(self): "type": "integer[]", "default": None, "constraints": [], + "collation": None, + }, + { + "visible": True, + "name": "arraydomain_2d", + "schema": "public", + "nullable": True, + "type": "integer[]", + "default": None, + "constraints": [], + "collation": None, + }, + { + "visible": True, + "name": "arraydomain_3d", + "schema": "public", + "nullable": True, + "type": "integer[]", + "default": None, + "constraints": [], + "collation": None, }, { "visible": True, @@ -612,6 +660,7 @@ def all_domains(self): "type": "testtype", "default": None, "constraints": [], + "collation": None, }, { "visible": True, @@ -626,6 +675,7 @@ def all_domains(self): # autogenerated name by pg {"check": "VALUE <> 22", "name": "my_int_check"}, ], + "collation": None, }, { "visible": True, @@ -635,6 +685,7 @@ def all_domains(self): "type": "text", "default": None, "constraints": [], + "collation": "default", }, { "visible": True, @@ -651,6 +702,7 @@ def all_domains(self): "name": "nullable_domain_check", } ], + "collation": "C", }, { "visible": True, @@ -660,6 +712,7 @@ def all_domains(self): "type": "integer", "default": "42", "constraints": [], + "collation": None, }, ], "test_schema": [ @@ -671,6 +724,7 @@ def all_domains(self): "type": "integer", "default": "0", "constraints": [], + "collation": None, } ], "SomeSchema": [ @@ -682,13 +736,20 @@ def all_domains(self): "type": "integer", "default": "0", "constraints": [], + "collation": None, } ], } def test_inspect_domains(self, connection): inspector = inspect(connection) - eq_(inspector.get_domains(), self.all_domains["public"]) + domains = inspector.get_domains() + + domain_names = {d["name"] for d in domains} + expect_domain_names = {d["name"] for d in self.all_domains["public"]} + eq_(domain_names, expect_domain_names) + + eq_(domains, self.all_domains["public"]) def test_inspect_domains_schema(self, connection): inspector = inspect(connection) @@ -705,7 +766,38 @@ def test_inspect_domains_star(self, connection): all_ = [d for dl in self.all_domains.values() for d in dl] all_ += inspector.get_domains("information_schema") exp = sorted(all_, key=lambda d: (d["schema"], d["name"])) - eq_(inspector.get_domains("*"), exp) + domains = inspector.get_domains("*") + + eq_(domains, exp) + + +class ArrayReflectionTest(fixtures.TablesTest): + __only_on__ = "postgresql >= 10" + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table( + "array_table", + metadata, + Column("id", INTEGER, primary_key=True), + Column("datas", ARRAY(INTEGER)), + Column("datass", ARRAY(INTEGER, dimensions=2)), + Column("datasss", ARRAY(INTEGER, dimensions=3)), + ) + + def test_array_table_is_reflected(self, connection): + metadata = MetaData() + table = Table("array_table", metadata, autoload_with=connection) + + def assert_is_integer_array(data_type): + assert isinstance(data_type, ARRAY) + # posgres treats all arrays as one-dimensional arrays + assert isinstance(data_type.item_type, INTEGER) + + assert_is_integer_array(table.c.datas.type) + assert_is_integer_array(table.c.datass.type) + assert_is_integer_array(table.c.datasss.type) class ReflectionTest( diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 08479b445f5..65c5fdbf7f6 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -73,6 +73,7 @@ from sqlalchemy.dialects.postgresql import TSRANGE from sqlalchemy.dialects.postgresql import TSTZMULTIRANGE from sqlalchemy.dialects.postgresql import TSTZRANGE +from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.dialects.postgresql.ranges import MultiRange from sqlalchemy.exc import CompileError from sqlalchemy.exc import DBAPIError @@ -531,6 +532,7 @@ def make_type(**kw): "check": r"VALUE ~ '[^@]+@[^@]+\.[^@]+'::text", } ], + "collation": "default", } ], ) @@ -1075,7 +1077,7 @@ def test_standalone_enum(self, connection, metadata): connection, "fourfivesixtype" ) - def test_reflection(self, metadata, connection): + def test_enum_type_reflection(self, metadata, connection): etype = Enum( "four", "five", "six", name="fourfivesixtype", metadata=metadata ) @@ -1229,6 +1231,212 @@ def test_generic_w_some_other_variant(self, metadata, connection): ] +class DomainTest( + AssertsCompiledSQL, fixtures.TestBase, AssertsExecutionResults +): + __backend__ = True + __only_on__ = "postgresql > 8.3" + + def test_domain_type_reflection(self, metadata, connection): + positive_int = DOMAIN( + "positive_int", Integer(), check="value > 0", not_null=True + ) + my_str = DOMAIN("my_string", Text(), collation="C", default="~~") + Table( + "table", + metadata, + Column("value", positive_int), + Column("str", my_str), + ) + + metadata.create_all(connection) + m2 = MetaData() + t2 = Table("table", m2, autoload_with=connection) + + vt = t2.c.value.type + is_true(isinstance(vt, DOMAIN)) + is_true(isinstance(vt.data_type, Integer)) + eq_(vt.name, "positive_int") + eq_(str(vt.check), "VALUE > 0") + is_(vt.default, None) + is_(vt.collation, None) + is_true(vt.constraint_name is not None) + is_true(vt.not_null) + is_false(vt.create_type) + + st = t2.c.str.type + is_true(isinstance(st, DOMAIN)) + is_true(isinstance(st.data_type, Text)) + eq_(st.name, "my_string") + is_(st.check, None) + is_true("~~" in st.default) + eq_(st.collation, "C") + is_(st.constraint_name, None) + is_false(st.not_null) + is_false(st.create_type) + + def test_domain_create_table(self, metadata, connection): + metadata = self.metadata + Email = DOMAIN( + name="email", + data_type=Text, + check=r"VALUE ~ '[^@]+@[^@]+\.[^@]+'", + ) + PosInt = DOMAIN( + name="pos_int", + data_type=Integer, + not_null=True, + check=r"VALUE > 0", + ) + t1 = Table( + "table", + metadata, + Column("id", Integer, primary_key=True), + Column("email", Email), + Column("number", PosInt), + ) + t1.create(connection) + t1.create(connection, checkfirst=True) # check the create + connection.execute( + t1.insert(), {"email": "test@example.com", "number": 42} + ) + connection.execute(t1.insert(), {"email": "a@b.c", "number": 1}) + connection.execute( + t1.insert(), {"email": "example@gmail.co.uk", "number": 99} + ) + eq_( + connection.execute(t1.select().order_by(t1.c.id)).fetchall(), + [ + (1, "test@example.com", 42), + (2, "a@b.c", 1), + (3, "example@gmail.co.uk", 99), + ], + ) + + @testing.combinations( + tuple( + [ + DOMAIN( + name="mytype", + data_type=Text, + check=r"VALUE ~ '[^@]+@[^@]+\.[^@]+'", + create_type=True, + ), + ] + ), + tuple( + [ + DOMAIN( + name="mytype", + data_type=Text, + check=r"VALUE ~ '[^@]+@[^@]+\.[^@]+'", + create_type=False, + ), + ] + ), + argnames="domain", + ) + def test_create_drop_domain_with_table(self, connection, metadata, domain): + table = Table("e1", metadata, Column("e1", domain)) + + def _domain_names(): + return {d["name"] for d in inspect(connection).get_domains()} + + assert "mytype" not in _domain_names() + + if domain.create_type: + table.create(connection) + assert "mytype" in _domain_names() + else: + with expect_raises(exc.ProgrammingError): + table.create(connection) + connection.rollback() + + domain.create(connection) + assert "mytype" in _domain_names() + table.create(connection) + + table.drop(connection) + if domain.create_type: + assert "mytype" not in _domain_names() + + @testing.combinations( + (Integer, "value > 0", 4), + (String, "value != ''", "hello world"), + ( + UUID, + "value != '{00000000-0000-0000-0000-000000000000}'", + uuid.uuid4(), + ), + ( + DateTime, + "value >= '2020-01-01T00:00:00'", + datetime.datetime.fromisoformat("2021-01-01T00:00:00.000"), + ), + argnames="domain_datatype, domain_check, value", + ) + def test_domain_roundtrip( + self, metadata, connection, domain_datatype, domain_check, value + ): + table = Table( + "domain_roundtrip_test", + metadata, + Column("id", Integer, primary_key=True), + Column( + "value", + DOMAIN("valuedomain", domain_datatype, check=domain_check), + ), + ) + table.create(connection) + + connection.execute(table.insert(), {"value": value}) + + results = connection.execute( + table.select().order_by(table.c.id) + ).fetchall() + eq_(results, [(1, value)]) + + @testing.combinations( + (DOMAIN("pos_int", Integer, check="VALUE > 0", not_null=True), 4, -4), + ( + DOMAIN("email", String, check=r"VALUE ~ '[^@]+@[^@]+\.[^@]+'"), + "e@xample.com", + "fred", + ), + argnames="domain,pass_value,fail_value", + ) + def test_check_constraint( + self, metadata, connection, domain, pass_value, fail_value + ): + table = Table("table", metadata, Column("value", domain)) + table.create(connection) + + connection.execute(table.insert(), {"value": pass_value}) + + # psycopg/psycopg2 raise IntegrityError, while pg8000 raises + # ProgrammingError + with expect_raises(exc.DatabaseError): + connection.execute(table.insert(), {"value": fail_value}) + + @testing.combinations( + (DOMAIN("nullable_domain", Integer, not_null=True), 1), + (DOMAIN("non_nullable_domain", Integer, not_null=False), 1), + argnames="domain,pass_value", + ) + def test_domain_nullable(self, metadata, connection, domain, pass_value): + table = Table("table", metadata, Column("value", domain)) + table.create(connection) + connection.execute(table.insert(), {"value": pass_value}) + + if domain.not_null: + # psycopg/psycopg2 raise IntegrityError, while pg8000 raises + # ProgrammingError + with expect_raises(exc.DatabaseError): + connection.execute(table.insert(), {"value": None}) + else: + connection.execute(table.insert(), {"value": None}) + + class DomainDDLEventTest(DDLEventWCreateHarness, fixtures.TestBase): __backend__ = True @@ -1557,6 +1765,10 @@ def test_reflection(self, metadata, connection): t1.create(connection) m2 = MetaData() t2 = Table("t1", m2, autoload_with=connection) + + eq_(t1.c.c1.type.__class__, postgresql.TIME) + eq_(t1.c.c4.type.__class__, postgresql.TIMESTAMP) + eq_(t2.c.c1.type.precision, None) eq_(t2.c.c2.type.precision, 5) eq_(t2.c.c3.type.precision, 5) From 39f4b9a75497eae3589501a3790f9fb7866dc557 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 23 Mar 2024 10:55:55 -0400 Subject: [PATCH 165/544] changelog fixes Change-Id: I1e1b752660d2186647c15f2b19e8eece720f29cb (cherry picked from commit 438f09c82a295343e4211df7a31582e829ecde35) --- doc/build/changelog/unreleased_20/10693.rst | 2 +- doc/build/changelog/unreleased_20/11176.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/changelog/unreleased_20/10693.rst b/doc/build/changelog/unreleased_20/10693.rst index c5044b9aa9f..914703f3cff 100644 --- a/doc/build/changelog/unreleased_20/10693.rst +++ b/doc/build/changelog/unreleased_20/10693.rst @@ -1,5 +1,5 @@ .. change:: - :tags: postgresql, reflection + :tags: postgresql, usecase :tickets: 10693 The PostgreSQL dialect now returns :class:`_postgresql.DOMAIN` instances diff --git a/doc/build/changelog/unreleased_20/11176.rst b/doc/build/changelog/unreleased_20/11176.rst index cc35ab1d543..be9b1ecf770 100644 --- a/doc/build/changelog/unreleased_20/11176.rst +++ b/doc/build/changelog/unreleased_20/11176.rst @@ -1,5 +1,5 @@ .. change:: - :tag: bug, sql, regression + :tags: bug, sql, regression :tickets: 11176 Fixed regression from the 1.4 series where the refactor of the From 4bc12e6abda97386b2eb66aff21312d010e57e1d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 23 Mar 2024 13:02:46 -0400 Subject: [PATCH 166/544] use asyncio.Runner for tests Backported to SQLAlchemy 2.0 an improvement to the test suite with regards to how asyncio related tests are run, now using the newer Python 3.11 ``asyncio.Runner`` or a backported equivalent, rather than relying on the previous implementation based on ``asyncio.get_running_loop()``. This should hopefully prevent issues with large suite runs on CPU loaded hardware where the event loop seems to become corrupted, leading to cascading failures. Fixes: #11187 Change-Id: I867b2478b9ba3a152fbfef380650eb987527ba46 --- doc/build/changelog/unreleased_20/11187.rst | 12 +++ lib/sqlalchemy/testing/asyncio.py | 17 ++-- lib/sqlalchemy/testing/plugin/pytestplugin.py | 6 ++ lib/sqlalchemy/util/_concurrency_py3k.py | 79 ++++++++++++------- lib/sqlalchemy/util/concurrency.py | 47 +++++++++-- 5 files changed, 121 insertions(+), 40 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11187.rst diff --git a/doc/build/changelog/unreleased_20/11187.rst b/doc/build/changelog/unreleased_20/11187.rst new file mode 100644 index 00000000000..be16ef301e5 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11187.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: bug, tests + :tickets: 11187 + + Backported to SQLAlchemy 2.0 an improvement to the test suite with regards + to how asyncio related tests are run, now using the newer Python 3.11 + ``asyncio.Runner`` or a backported equivalent, rather than relying on the + previous implementation based on ``asyncio.get_running_loop()``. This + should hopefully prevent issues with large suite runs on CPU loaded + hardware where the event loop seems to become corrupted, leading to + cascading failures. + diff --git a/lib/sqlalchemy/testing/asyncio.py b/lib/sqlalchemy/testing/asyncio.py index 17dc861c95a..f71ca57fe57 100644 --- a/lib/sqlalchemy/testing/asyncio.py +++ b/lib/sqlalchemy/testing/asyncio.py @@ -24,16 +24,21 @@ import inspect from . import config -from ..util.concurrency import _util_async_run -from ..util.concurrency import _util_async_run_coroutine_function +from ..util.concurrency import _AsyncUtil # may be set to False if the # --disable-asyncio flag is passed to the test runner. ENABLE_ASYNCIO = True +_async_util = _AsyncUtil() # it has lazy init so just always create one + + +def _shutdown(): + """called when the test finishes""" + _async_util.close() def _run_coroutine_function(fn, *args, **kwargs): - return _util_async_run_coroutine_function(fn, *args, **kwargs) + return _async_util.run(fn, *args, **kwargs) def _assume_async(fn, *args, **kwargs): @@ -50,7 +55,7 @@ def _assume_async(fn, *args, **kwargs): if not ENABLE_ASYNCIO: return fn(*args, **kwargs) - return _util_async_run(fn, *args, **kwargs) + return _async_util.run_in_greenlet(fn, *args, **kwargs) def _maybe_async_provisioning(fn, *args, **kwargs): @@ -69,7 +74,7 @@ def _maybe_async_provisioning(fn, *args, **kwargs): return fn(*args, **kwargs) if config.any_async: - return _util_async_run(fn, *args, **kwargs) + return _async_util.run_in_greenlet(fn, *args, **kwargs) else: return fn(*args, **kwargs) @@ -89,7 +94,7 @@ def _maybe_async(fn, *args, **kwargs): is_async = config._current.is_async if is_async: - return _util_async_run(fn, *args, **kwargs) + return _async_util.run_in_greenlet(fn, *args, **kwargs) else: return fn(*args, **kwargs) diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py index b63e06359c5..1a4d4bb30a1 100644 --- a/lib/sqlalchemy/testing/plugin/pytestplugin.py +++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py @@ -182,6 +182,12 @@ def pytest_sessionfinish(session): collect_types.dump_stats(session.config.option.dump_pyannotate) +def pytest_unconfigure(config): + from sqlalchemy.testing import asyncio + + asyncio._shutdown() + + def pytest_collection_finish(session): if session.config.option.dump_pyannotate: from pyannotate_runtime import collect_types diff --git a/lib/sqlalchemy/util/_concurrency_py3k.py b/lib/sqlalchemy/util/_concurrency_py3k.py index 42ceb8122ee..defef1f6bf3 100644 --- a/lib/sqlalchemy/util/_concurrency_py3k.py +++ b/lib/sqlalchemy/util/_concurrency_py3k.py @@ -19,10 +19,14 @@ from typing import Optional from typing import TYPE_CHECKING from typing import TypeVar +from typing import Union from .langhelpers import memoized_property from .. import exc +from ..util import py311 +from ..util.typing import Literal from ..util.typing import Protocol +from ..util.typing import Self from ..util.typing import TypeGuard _T = TypeVar("_T") @@ -225,34 +229,6 @@ def __exit__(self, *arg: Any, **kw: Any) -> None: self.mutex.release() -def _util_async_run_coroutine_function( - fn: Callable[..., Coroutine[Any, Any, Any]], *args: Any, **kwargs: Any -) -> Any: - """for test suite/ util only""" - - loop = get_event_loop() - if loop.is_running(): - raise Exception( - "for async run coroutine we expect that no greenlet or event " - "loop is running when we start out" - ) - return loop.run_until_complete(fn(*args, **kwargs)) - - -def _util_async_run( - fn: Callable[..., Coroutine[Any, Any, Any]], *args: Any, **kwargs: Any -) -> Any: - """for test suite/ util only""" - - loop = get_event_loop() - if not loop.is_running(): - return loop.run_until_complete(greenlet_spawn(fn, *args, **kwargs)) - else: - # allow for a wrapped test function to call another - assert isinstance(getcurrent(), _AsyncIoGreenlet) - return fn(*args, **kwargs) - - def get_event_loop() -> asyncio.AbstractEventLoop: """vendor asyncio.get_event_loop() for python 3.7 and above. @@ -265,3 +241,50 @@ def get_event_loop() -> asyncio.AbstractEventLoop: # avoid "During handling of the above exception, another exception..." pass return asyncio.get_event_loop_policy().get_event_loop() + + +if py311: + _Runner = asyncio.Runner +else: + + class _Runner: # type: ignore[no-redef] + """Runner implementation for test only""" + + _loop: Union[None, asyncio.AbstractEventLoop, Literal[False]] + + def __init__(self) -> None: + self._loop = None + + def __enter__(self) -> Self: + self._lazy_init() + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + self.close() + + def close(self) -> None: + if self._loop: + try: + self._loop.run_until_complete( + self._loop.shutdown_asyncgens() + ) + finally: + self._loop.close() + self._loop = False + + def get_loop(self) -> asyncio.AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + assert self._loop + return self._loop + + def run(self, coro: Coroutine[Any, Any, _T]) -> _T: + self._lazy_init() + assert self._loop + return self._loop.run_until_complete(coro) + + def _lazy_init(self) -> None: + if self._loop is False: + raise RuntimeError("Runner is closed") + if self._loop is None: + self._loop = asyncio.new_event_loop() diff --git a/lib/sqlalchemy/util/concurrency.py b/lib/sqlalchemy/util/concurrency.py index 65a62052125..96ba416cabf 100644 --- a/lib/sqlalchemy/util/concurrency.py +++ b/lib/sqlalchemy/util/concurrency.py @@ -10,6 +10,10 @@ import asyncio # noqa import typing +from typing import Any +from typing import Callable +from typing import Coroutine +from typing import TypeVar have_greenlet = False greenlet_error = None @@ -26,12 +30,43 @@ from ._concurrency_py3k import greenlet_spawn as greenlet_spawn from ._concurrency_py3k import is_exit_exception as is_exit_exception from ._concurrency_py3k import AsyncAdaptedLock as AsyncAdaptedLock - from ._concurrency_py3k import ( - _util_async_run as _util_async_run, - ) # noqa: F401 - from ._concurrency_py3k import ( - _util_async_run_coroutine_function as _util_async_run_coroutine_function, # noqa: F401, E501 - ) + from ._concurrency_py3k import _Runner + +_T = TypeVar("_T") + + +class _AsyncUtil: + """Asyncio util for test suite/ util only""" + + def __init__(self) -> None: + if have_greenlet: + self.runner = _Runner() + + def run( + self, + fn: Callable[..., Coroutine[Any, Any, _T]], + *args: Any, + **kwargs: Any, + ) -> _T: + """Run coroutine on the loop""" + return self.runner.run(fn(*args, **kwargs)) + + def run_in_greenlet( + self, fn: Callable[..., _T], *args: Any, **kwargs: Any + ) -> _T: + """Run sync function in greenlet. Support nested calls""" + if have_greenlet: + if self.runner.get_loop().is_running(): + return fn(*args, **kwargs) + else: + return self.runner.run(greenlet_spawn(fn, *args, **kwargs)) + else: + return fn(*args, **kwargs) + + def close(self) -> None: + if have_greenlet: + self.runner.close() + if not typing.TYPE_CHECKING and not have_greenlet: From 8a3ba1163cdbe1e5fffd6b62b00d61925b8be200 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 23 Mar 2024 16:59:40 -0400 Subject: [PATCH 167/544] typing fix the most recent greenlet/asyncio commit introduced a typing error that only comes up under python3.10 due to the non-presence of asyncio.Runner in that version. do an intricate dance here along with another python-version-specific thing observed around the greenlet import. Change-Id: I1b220ab8ea633cdf43ad7e8abe826f758858b62a --- lib/sqlalchemy/util/_concurrency_py3k.py | 4 ++-- lib/sqlalchemy/util/concurrency.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/util/_concurrency_py3k.py b/lib/sqlalchemy/util/_concurrency_py3k.py index defef1f6bf3..5717d970617 100644 --- a/lib/sqlalchemy/util/_concurrency_py3k.py +++ b/lib/sqlalchemy/util/_concurrency_py3k.py @@ -243,11 +243,11 @@ def get_event_loop() -> asyncio.AbstractEventLoop: return asyncio.get_event_loop_policy().get_event_loop() -if py311: +if not TYPE_CHECKING and py311: _Runner = asyncio.Runner else: - class _Runner: # type: ignore[no-redef] + class _Runner: """Runner implementation for test only""" _loop: Union[None, asyncio.AbstractEventLoop, Literal[False]] diff --git a/lib/sqlalchemy/util/concurrency.py b/lib/sqlalchemy/util/concurrency.py index 96ba416cabf..de6195de8f1 100644 --- a/lib/sqlalchemy/util/concurrency.py +++ b/lib/sqlalchemy/util/concurrency.py @@ -18,7 +18,7 @@ have_greenlet = False greenlet_error = None try: - import greenlet # type: ignore # noqa: F401 + import greenlet # type: ignore[import-untyped,unused-ignore] # noqa: F401,E501 except ImportError as e: greenlet_error = str(e) pass From b304ef2808ba30ce9f7f250830a670be7f3058f5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 23 Mar 2024 17:46:25 -0400 Subject: [PATCH 168/544] - 2.0.29 --- doc/build/changelog/changelog_20.rst | 129 +++++++++++++++++++- doc/build/changelog/unreleased_20/10611.rst | 8 -- doc/build/changelog/unreleased_20/10693.rst | 9 -- doc/build/changelog/unreleased_20/11055.rst | 8 -- doc/build/changelog/unreleased_20/11091.rst | 13 -- doc/build/changelog/unreleased_20/11130.rst | 9 -- doc/build/changelog/unreleased_20/11157.rst | 11 -- doc/build/changelog/unreleased_20/11160.rst | 26 ---- doc/build/changelog/unreleased_20/11173.rst | 9 -- doc/build/changelog/unreleased_20/11176.rst | 12 -- doc/build/changelog/unreleased_20/11187.rst | 12 -- doc/build/conf.py | 4 +- 12 files changed, 130 insertions(+), 120 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/10611.rst delete mode 100644 doc/build/changelog/unreleased_20/10693.rst delete mode 100644 doc/build/changelog/unreleased_20/11055.rst delete mode 100644 doc/build/changelog/unreleased_20/11091.rst delete mode 100644 doc/build/changelog/unreleased_20/11130.rst delete mode 100644 doc/build/changelog/unreleased_20/11157.rst delete mode 100644 doc/build/changelog/unreleased_20/11160.rst delete mode 100644 doc/build/changelog/unreleased_20/11173.rst delete mode 100644 doc/build/changelog/unreleased_20/11176.rst delete mode 100644 doc/build/changelog/unreleased_20/11187.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 8dc3bb9c762..b1617fe844a 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,134 @@ .. changelog:: :version: 2.0.29 - :include_notes_from: unreleased_20 + :released: March 23, 2024 + + .. change:: + :tags: bug, orm + :tickets: 10611 + + Fixed Declarative issue where typing a relationship using + :class:`_orm.Relationship` rather than :class:`_orm.Mapped` would + inadvertently pull in the "dynamic" relationship loader strategy for that + attribute. + + .. change:: + :tags: postgresql, usecase + :tickets: 10693 + + The PostgreSQL dialect now returns :class:`_postgresql.DOMAIN` instances + when reflecting a column that has a domain as type. Previously, the domain + data type was returned instead. As part of this change, the domain + reflection was improved to also return the collation of the text types. + Pull request courtesy of Thomas Stephenson. + + .. change:: + :tags: bug, typing + :tickets: 11055 + + Fixed typing issue allowing asyncio ``run_sync()`` methods to correctly + type the parameters according to the callable that was passed, making use + of :pep:`612` ``ParamSpec`` variables. Pull request courtesy Francisco R. + Del Roio. + + .. change:: + :tags: bug, orm + :tickets: 11091 + + Fixed issue in ORM annotated declarative where using + :func:`_orm.mapped_column()` with an :paramref:`_orm.mapped_column.index` + or :paramref:`_orm.mapped_column.unique` setting of False would be + overridden by an incoming ``Annotated`` element that featured that + parameter set to ``True``, even though the immediate + :func:`_orm.mapped_column()` element is more specific and should take + precedence. The logic to reconcile the booleans has been enhanced to + accommodate a local value of ``False`` as still taking precedence over an + incoming ``True`` value from the annotated element. + + .. change:: + :tags: usecase, orm + :tickets: 11130 + + Added support for the :pep:`695` ``TypeAliasType`` construct as well as the + python 3.12 native ``type`` keyword to work with ORM Annotated Declarative + form when using these constructs to link to a :pep:`593` ``Annotated`` + container, allowing the resolution of the ``Annotated`` to proceed when + these constructs are used in a :class:`_orm.Mapped` typing container. + + .. change:: + :tags: bug, engine + :tickets: 11157 + + Fixed issue in :ref:`engine_insertmanyvalues` feature where using a primary + key column with an "inline execute" default generator such as an explicit + :class:`.Sequence` with an explcit schema name, while at the same time + using the + :paramref:`_engine.Connection.execution_options.schema_translate_map` + feature would fail to render the sequence or the parameters properly, + leading to errors. + + .. change:: + :tags: bug, engine + :tickets: 11160 + + Made a change to the adjustment made in version 2.0.10 for :ticket:`9618`, + which added the behavior of reconciling RETURNING rows from a bulk INSERT + to the parameters that were passed to it. This behavior included a + comparison of already-DB-converted bound parameter values against returned + row values that was not always "symmetrical" for SQL column types such as + UUIDs, depending on specifics of how different DBAPIs receive such values + versus how they return them, necessitating the need for additional + "sentinel value resolver" methods on these column types. Unfortunately + this broke third party column types such as UUID/GUID types in libraries + like SQLModel which did not implement this special method, raising an error + "Can't match sentinel values in result set to parameter sets". Rather than + attempt to further explain and document this implementation detail of the + "insertmanyvalues" feature including a public version of the new + method, the approach is intead revised to no longer need this extra + conversion step, and the logic that does the comparison now works on the + pre-converted bound parameter value compared to the post-result-processed + value, which should always be of a matching datatype. In the unusual case + that a custom SQL column type that also happens to be used in a "sentinel" + column for bulk INSERT is not receiving and returning the same value type, + the "Can't match" error will be raised, however the mitigation is + straightforward in that the same Python datatype should be passed as that + returned. + + .. change:: + :tags: bug, orm, regression + :tickets: 11173 + + Fixed regression from version 2.0.28 caused by the fix for :ticket:`11085` + where the newer method of adjusting post-cache bound parameter values would + interefere with the implementation for the :func:`_orm.subqueryload` loader + option, which has some more legacy patterns in use internally, when + the additional loader criteria feature were used with this loader option. + + .. change:: + :tags: bug, sql, regression + :tickets: 11176 + + Fixed regression from the 1.4 series where the refactor of the + :meth:`_types.TypeEngine.with_variant` method introduced at + :ref:`change_6980` failed to accommodate for the ``.copy()`` method, which + will lose the variant mappings that are set up. This becomes an issue for + the very specific case of a "schema" type, which includes types such as + :class:`.Enum` and :class:`.ARRAY`, when they are then used in the context + of an ORM Declarative mapping with mixins where copying of types comes into + play. The variant mapping is now copied as well. + + .. change:: + :tags: bug, tests + :tickets: 11187 + + Backported to SQLAlchemy 2.0 an improvement to the test suite with regards + to how asyncio related tests are run, now using the newer Python 3.11 + ``asyncio.Runner`` or a backported equivalent, rather than relying on the + previous implementation based on ``asyncio.get_running_loop()``. This + should hopefully prevent issues with large suite runs on CPU loaded + hardware where the event loop seems to become corrupted, leading to + cascading failures. + .. changelog:: :version: 2.0.28 diff --git a/doc/build/changelog/unreleased_20/10611.rst b/doc/build/changelog/unreleased_20/10611.rst deleted file mode 100644 index 2627e4d37c8..00000000000 --- a/doc/build/changelog/unreleased_20/10611.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10611 - - Fixed Declarative issue where typing a relationship using - :class:`_orm.Relationship` rather than :class:`_orm.Mapped` would - inadvertently pull in the "dynamic" relationship loader strategy for that - attribute. diff --git a/doc/build/changelog/unreleased_20/10693.rst b/doc/build/changelog/unreleased_20/10693.rst deleted file mode 100644 index 914703f3cff..00000000000 --- a/doc/build/changelog/unreleased_20/10693.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: postgresql, usecase - :tickets: 10693 - - The PostgreSQL dialect now returns :class:`_postgresql.DOMAIN` instances - when reflecting a column that has a domain as type. Previously, the domain - data type was returned instead. As part of this change, the domain - reflection was improved to also return the collation of the text types. - Pull request courtesy of Thomas Stephenson. diff --git a/doc/build/changelog/unreleased_20/11055.rst b/doc/build/changelog/unreleased_20/11055.rst deleted file mode 100644 index 8784d7aec11..00000000000 --- a/doc/build/changelog/unreleased_20/11055.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, typing - :tickets: 11055 - - Fixed typing issue allowing asyncio ``run_sync()`` methods to correctly - type the parameters according to the callable that was passed, making use - of :pep:`612` ``ParamSpec`` variables. Pull request courtesy Francisco R. - Del Roio. diff --git a/doc/build/changelog/unreleased_20/11091.rst b/doc/build/changelog/unreleased_20/11091.rst deleted file mode 100644 index 30f2fbcd355..00000000000 --- a/doc/build/changelog/unreleased_20/11091.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11091 - - Fixed issue in ORM annotated declarative where using - :func:`_orm.mapped_column()` with an :paramref:`_orm.mapped_column.index` - or :paramref:`_orm.mapped_column.unique` setting of False would be - overridden by an incoming ``Annotated`` element that featured that - parameter set to ``True``, even though the immediate - :func:`_orm.mapped_column()` element is more specific and should take - precedence. The logic to reconcile the booleans has been enhanced to - accommodate a local value of ``False`` as still taking precedence over an - incoming ``True`` value from the annotated element. diff --git a/doc/build/changelog/unreleased_20/11130.rst b/doc/build/changelog/unreleased_20/11130.rst deleted file mode 100644 index 80fbe08dd2b..00000000000 --- a/doc/build/changelog/unreleased_20/11130.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: usecase, orm - :tickets: 11130 - - Added support for the :pep:`695` ``TypeAliasType`` construct as well as the - python 3.12 native ``type`` keyword to work with ORM Annotated Declarative - form when using these constructs to link to a :pep:`593` ``Annotated`` - container, allowing the resolution of the ``Annotated`` to proceed when - these constructs are used in a :class:`_orm.Mapped` typing container. diff --git a/doc/build/changelog/unreleased_20/11157.rst b/doc/build/changelog/unreleased_20/11157.rst deleted file mode 100644 index 8f1e85c348d..00000000000 --- a/doc/build/changelog/unreleased_20/11157.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 11157 - - Fixed issue in :ref:`engine_insertmanyvalues` feature where using a primary - key column with an "inline execute" default generator such as an explicit - :class:`.Sequence` with an explcit schema name, while at the same time - using the - :paramref:`_engine.Connection.execution_options.schema_translate_map` - feature would fail to render the sequence or the parameters properly, - leading to errors. diff --git a/doc/build/changelog/unreleased_20/11160.rst b/doc/build/changelog/unreleased_20/11160.rst deleted file mode 100644 index 1c8ae3a2a74..00000000000 --- a/doc/build/changelog/unreleased_20/11160.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 11160 - - Made a change to the adjustment made in version 2.0.10 for :ticket:`9618`, - which added the behavior of reconciling RETURNING rows from a bulk INSERT - to the parameters that were passed to it. This behavior included a - comparison of already-DB-converted bound parameter values against returned - row values that was not always "symmetrical" for SQL column types such as - UUIDs, depending on specifics of how different DBAPIs receive such values - versus how they return them, necessitating the need for additional - "sentinel value resolver" methods on these column types. Unfortunately - this broke third party column types such as UUID/GUID types in libraries - like SQLModel which did not implement this special method, raising an error - "Can't match sentinel values in result set to parameter sets". Rather than - attempt to further explain and document this implementation detail of the - "insertmanyvalues" feature including a public version of the new - method, the approach is intead revised to no longer need this extra - conversion step, and the logic that does the comparison now works on the - pre-converted bound parameter value compared to the post-result-processed - value, which should always be of a matching datatype. In the unusual case - that a custom SQL column type that also happens to be used in a "sentinel" - column for bulk INSERT is not receiving and returning the same value type, - the "Can't match" error will be raised, however the mitigation is - straightforward in that the same Python datatype should be passed as that - returned. diff --git a/doc/build/changelog/unreleased_20/11173.rst b/doc/build/changelog/unreleased_20/11173.rst deleted file mode 100644 index 900c6149d25..00000000000 --- a/doc/build/changelog/unreleased_20/11173.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm, regression - :tickets: 11173 - - Fixed regression from version 2.0.28 caused by the fix for :ticket:`11085` - where the newer method of adjusting post-cache bound parameter values would - interefere with the implementation for the :func:`_orm.subqueryload` loader - option, which has some more legacy patterns in use internally, when - the additional loader criteria feature were used with this loader option. diff --git a/doc/build/changelog/unreleased_20/11176.rst b/doc/build/changelog/unreleased_20/11176.rst deleted file mode 100644 index be9b1ecf770..00000000000 --- a/doc/build/changelog/unreleased_20/11176.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, sql, regression - :tickets: 11176 - - Fixed regression from the 1.4 series where the refactor of the - :meth:`_types.TypeEngine.with_variant` method introduced at - :ref:`change_6980` failed to accommodate for the ``.copy()`` method, which - will lose the variant mappings that are set up. This becomes an issue for - the very specific case of a "schema" type, which includes types such as - :class:`.Enum` and :class:`.ARRAY`, when they are then used in the context - of an ORM Declarative mapping with mixins where copying of types comes into - play. The variant mapping is now copied as well. diff --git a/doc/build/changelog/unreleased_20/11187.rst b/doc/build/changelog/unreleased_20/11187.rst deleted file mode 100644 index be16ef301e5..00000000000 --- a/doc/build/changelog/unreleased_20/11187.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, tests - :tickets: 11187 - - Backported to SQLAlchemy 2.0 an improvement to the test suite with regards - to how asyncio related tests are run, now using the newer Python 3.11 - ``asyncio.Runner`` or a backported equivalent, rather than relying on the - previous implementation based on ``asyncio.get_running_loop()``. This - should hopefully prevent issues with large suite runs on CPU loaded - hardware where the event loop seems to become corrupted, leading to - cascading failures. - diff --git a/doc/build/conf.py b/doc/build/conf.py index d91e3a31e38..db6ee5c8f9c 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.28" +release = "2.0.29" -release_date = "March 4, 2024" +release_date = "March 23, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From cc257e77ac091f4886d13a748d37b5c00c54ace9 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 23 Mar 2024 17:53:46 -0400 Subject: [PATCH 169/544] Version 2.0.30 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index b1617fe844a..7678463b438 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.30 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.29 :released: March 23, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 9e983d07fca..1d5bfd5d3d1 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.29" +__version__ = "2.0.30" def __go(lcls: Any) -> None: From bb43af053dc1a4edc11a9c5d245e7f68c2dfa6a2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 24 Mar 2024 11:43:33 -0400 Subject: [PATCH 170/544] remove restore_asyncio hack we have backported the asnycio.Runner from 2.1 so we dont need this hack anymore; it fails if greenlet is not installed Change-Id: I9e2ce09f9987ba5a4a78881475d85b9fa24e5bb4 --- test/base/test_tutorials.py | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/test/base/test_tutorials.py b/test/base/test_tutorials.py index 7543b1c100c..d86322e12ee 100644 --- a/test/base/test_tutorials.py +++ b/test/base/test_tutorials.py @@ -1,6 +1,5 @@ from __future__ import annotations -import asyncio import doctest import logging import os @@ -178,19 +177,8 @@ def test_orm_queryguide_columns(self): def test_orm_quickstart(self): self._run_doctest("orm/quickstart.rst") - @config.fixture(scope="class") - def restore_asyncio(self): - # NOTE: this is required since test_asyncio will remove the global - # loop. 2.1 uses runners that don't require this hack - yield - ep = asyncio.get_event_loop_policy() - try: - ep.get_event_loop() - except RuntimeError: - ep.set_event_loop(ep.new_event_loop()) - @requires.greenlet - def test_asyncio(self, restore_asyncio): + def test_asyncio(self): try: make_url("sqlite+aiosqlite://").get_dialect().import_dbapi() except ImportError: From fb26ede122daa1e4b856d46389b84b1914434e1b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 28 Mar 2024 09:59:44 -0400 Subject: [PATCH 171/544] use a full ConnectionCharacteristic for logging_token Fixed issue in the :paramref:`_engine.Connection.execution_options.logging_token` option, where changing the value of ``logging_token`` on a connection that has already logged messages would not be updated to reflect the new logging token. This in particular prevented the use of :meth:`_orm.Session.connection` to change the option on the connection, since the BEGIN logging message would already have been emitted. Fixes: #11210 Change-Id: I0ddade3778215259a6eacde3a67e09d30bc3257b (cherry picked from commit a124a593c86325389a92903d2b61f40c34f6d6e2) --- doc/build/changelog/unreleased_20/11210.rst | 11 +++ lib/sqlalchemy/engine/base.py | 10 +-- lib/sqlalchemy/engine/characteristics.py | 78 ++++++++++++++++++++- lib/sqlalchemy/engine/default.py | 13 ++-- test/engine/test_logging.py | 37 ++++++++++ 5 files changed, 136 insertions(+), 13 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11210.rst diff --git a/doc/build/changelog/unreleased_20/11210.rst b/doc/build/changelog/unreleased_20/11210.rst new file mode 100644 index 00000000000..088f07d61ba --- /dev/null +++ b/doc/build/changelog/unreleased_20/11210.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, engine + :tickets: 11210 + + Fixed issue in the + :paramref:`_engine.Connection.execution_options.logging_token` option, + where changing the value of ``logging_token`` on a connection that has + already logged messages would not be updated to reflect the new logging + token. This in particular prevented the use of + :meth:`_orm.Session.connection` to change the option on the connection, + since the BEGIN logging message would already have been emitted. diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 403ec452b9a..83d1cc1317a 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -174,13 +174,9 @@ def __init__( if self._has_events or self.engine._has_events: self.dispatch.engine_connect(self) - @util.memoized_property - def _message_formatter(self) -> Any: - if "logging_token" in self._execution_options: - token = self._execution_options["logging_token"] - return lambda msg: "[%s] %s" % (token, msg) - else: - return None + # this can be assigned differently via + # characteristics.LoggingTokenCharacteristic + _message_formatter: Any = None def _log_info(self, message: str, *arg: Any, **kw: Any) -> None: fmt = self._message_formatter diff --git a/lib/sqlalchemy/engine/characteristics.py b/lib/sqlalchemy/engine/characteristics.py index 7dd3a2f31e3..97b17fbdfb6 100644 --- a/lib/sqlalchemy/engine/characteristics.py +++ b/lib/sqlalchemy/engine/characteristics.py @@ -12,6 +12,7 @@ from typing import ClassVar if typing.TYPE_CHECKING: + from .base import Connection from .interfaces import DBAPIConnection from .interfaces import Dialect @@ -44,13 +45,30 @@ class ConnectionCharacteristic(abc.ABC): def reset_characteristic( self, dialect: Dialect, dbapi_conn: DBAPIConnection ) -> None: - """Reset the characteristic on the connection to its default value.""" + """Reset the characteristic on the DBAPI connection to its default + value.""" @abc.abstractmethod def set_characteristic( self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any ) -> None: - """set characteristic on the connection to a given value.""" + """set characteristic on the DBAPI connection to a given value.""" + + def set_connection_characteristic( + self, + dialect: Dialect, + conn: Connection, + dbapi_conn: DBAPIConnection, + value: Any, + ) -> None: + """set characteristic on the :class:`_engine.Connection` to a given + value. + + .. versionadded:: 2.0.30 - added to support elements that are local + to the :class:`_engine.Connection` itself. + + """ + self.set_characteristic(dialect, dbapi_conn, value) @abc.abstractmethod def get_characteristic( @@ -61,8 +79,22 @@ def get_characteristic( """ + def get_connection_characteristic( + self, dialect: Dialect, conn: Connection, dbapi_conn: DBAPIConnection + ) -> Any: + """Given a :class:`_engine.Connection`, get the current value of the + characteristic. + + .. versionadded:: 2.0.30 - added to support elements that are local + to the :class:`_engine.Connection` itself. + + """ + return self.get_characteristic(dialect, dbapi_conn) + class IsolationLevelCharacteristic(ConnectionCharacteristic): + """Manage the isolation level on a DBAPI connection""" + transactional: ClassVar[bool] = True def reset_characteristic( @@ -79,3 +111,45 @@ def get_characteristic( self, dialect: Dialect, dbapi_conn: DBAPIConnection ) -> Any: return dialect.get_isolation_level(dbapi_conn) + + +class LoggingTokenCharacteristic(ConnectionCharacteristic): + """Manage the 'logging_token' option of a :class:`_engine.Connection`. + + .. versionadded:: 2.0.30 + + """ + + transactional: ClassVar[bool] = False + + def reset_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection + ) -> None: + pass + + def set_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any + ) -> None: + raise NotImplementedError() + + def set_connection_characteristic( + self, + dialect: Dialect, + conn: Connection, + dbapi_conn: DBAPIConnection, + value: Any, + ) -> None: + if value: + conn._message_formatter = lambda msg: "[%s] %s" % (value, msg) + else: + del conn._message_formatter + + def get_characteristic( + self, dialect: Dialect, dbapi_conn: DBAPIConnection + ) -> Any: + raise NotImplementedError() + + def get_connection_characteristic( + self, dialect: Dialect, conn: Connection, dbapi_conn: DBAPIConnection + ) -> Any: + return conn._execution_options.get("logging_token", None) diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 90cafe4f4ba..b8eacc032ed 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -168,7 +168,10 @@ class DefaultDialect(Dialect): tuple_in_values = False connection_characteristics = util.immutabledict( - {"isolation_level": characteristics.IsolationLevelCharacteristic()} + { + "isolation_level": characteristics.IsolationLevelCharacteristic(), + "logging_token": characteristics.LoggingTokenCharacteristic(), + } ) engine_config_types: Mapping[str, Any] = util.immutabledict( @@ -660,7 +663,7 @@ def _set_connection_characteristics(self, connection, characteristics): if connection.in_transaction(): trans_objs = [ (name, obj) - for name, obj, value in characteristic_values + for name, obj, _ in characteristic_values if obj.transactional ] if trans_objs: @@ -673,8 +676,10 @@ def _set_connection_characteristics(self, connection, characteristics): ) dbapi_connection = connection.connection.dbapi_connection - for name, characteristic, value in characteristic_values: - characteristic.set_characteristic(self, dbapi_connection, value) + for _, characteristic, value in characteristic_values: + characteristic.set_connection_characteristic( + self, connection, dbapi_connection, value + ) connection.connection._connection_record.finalize_callback.append( functools.partial(self._reset_characteristics, characteristics) ) diff --git a/test/engine/test_logging.py b/test/engine/test_logging.py index a498ec85c83..119d5533201 100644 --- a/test/engine/test_logging.py +++ b/test/engine/test_logging.py @@ -990,6 +990,43 @@ def test_logging_token_option_connection(self, token_engine): c2.close() c3.close() + def test_logging_token_option_connection_updates(self, token_engine): + """test #11210""" + + eng = token_engine + + c1 = eng.connect().execution_options(logging_token="my_name_1") + + self._assert_token_in_execute(c1, "my_name_1") + + c1.execution_options(logging_token="my_name_2") + + self._assert_token_in_execute(c1, "my_name_2") + + c1.execution_options(logging_token=None) + + self._assert_no_tokens_in_execute(c1) + + c1.close() + + def test_logging_token_option_not_transactional(self, token_engine): + """test #11210""" + + eng = token_engine + + c1 = eng.connect() + + with c1.begin(): + self._assert_no_tokens_in_execute(c1) + + c1.execution_options(logging_token="my_name_1") + + self._assert_token_in_execute(c1, "my_name_1") + + self._assert_token_in_execute(c1, "my_name_1") + + c1.close() + def test_logging_token_option_engine(self, token_engine): eng = token_engine From 1a1ef60885ed948cdb3e2d84c97a007e7988fe6b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 1 Apr 2024 19:28:01 -0400 Subject: [PATCH 172/544] new flake8-builtins adds a code we dont want A005 "he module is shadowing a Python builtin module " Change-Id: I9c7464e8f0c32df76d4c455e502b8bc7f94aa038 (cherry picked from commit c3f8bd1c27fd5e376e88533542aa6fd669c58067) --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 45b6c47914c..c977ae7a986 100644 --- a/setup.cfg +++ b/setup.cfg @@ -105,7 +105,7 @@ enable-extensions = G # E203 is due to https://github.com/PyCQA/pycodestyle/issues/373 ignore = - A003, + A003,A005 D, E203,E305,E701,E704,E711,E712,E721,E722,E741, N801,N802,N806, From 7564f819d9bde2eb3276f99c411dad85559e1de7 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 1 Apr 2024 17:54:22 -0400 Subject: [PATCH 173/544] set up is_from_statement and others for FromStatement Added new attribute :attr:`_orm.ORMExecuteState.is_from_statement`, to detect statements of the form ``select().from_statement()``, and also enhanced ``FromStatement`` to set :attr:`_orm.ORMExecuteState.is_select`, :attr:`_orm.ORMExecuteState.is_insert`, :attr:`_orm.ORMExecuteState.is_update`, and :attr:`_orm.ORMExecuteState.is_delete` according to the element that is sent to the :meth:`_sql.Select.from_statement` method itself. Fixes: #11220 Change-Id: I3bf9e7e22fa2955d772b3b6ad636ed93a60916ae (cherry picked from commit d3222a31b8df97a454b37a32881dd484a06e5742) --- doc/build/changelog/unreleased_20/11220.rst | 11 ++ lib/sqlalchemy/orm/context.py | 6 + lib/sqlalchemy/orm/session.py | 53 +++++++- lib/sqlalchemy/sql/base.py | 1 + test/orm/test_events.py | 133 ++++++++++++++++---- 5 files changed, 175 insertions(+), 29 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11220.rst diff --git a/doc/build/changelog/unreleased_20/11220.rst b/doc/build/changelog/unreleased_20/11220.rst new file mode 100644 index 00000000000..4f04cbf23da --- /dev/null +++ b/doc/build/changelog/unreleased_20/11220.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, orm + :tickets: 11220 + + Added new attribute :attr:`_orm.ORMExecuteState.is_from_statement`, to + detect statements of the form ``select().from_statement()``, and also + enhanced ``FromStatement`` to set :attr:`_orm.ORMExecuteState.is_select`, + :attr:`_orm.ORMExecuteState.is_insert`, + :attr:`_orm.ORMExecuteState.is_update`, and + :attr:`_orm.ORMExecuteState.is_delete` according to the element that is + sent to the :meth:`_sql.Select.from_statement` method itself. diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 3056016e729..fcd01e65916 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -888,6 +888,8 @@ class FromStatement(GroupedElement, Generative, TypedReturnsRows[_TP]): ("_compile_options", InternalTraversal.dp_has_cache_key) ] + is_from_statement = True + def __init__( self, entities: Iterable[_ColumnsClauseArgument[Any]], @@ -905,6 +907,10 @@ def __init__( ] self.element = element self.is_dml = element.is_dml + self.is_select = element.is_select + self.is_delete = element.is_delete + self.is_insert = element.is_insert + self.is_update = element.is_update self._label_style = ( element._label_style if is_select_base(element) else None ) diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index 3eba5aaf411..acc6895e86f 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -575,22 +575,67 @@ def is_executemany(self) -> bool: @property def is_select(self) -> bool: - """return True if this is a SELECT operation.""" + """return True if this is a SELECT operation. + + .. versionchanged:: 2.0.30 - the attribute is also True for a + :meth:`_sql.Select.from_statement` construct that is itself against + a :class:`_sql.Select` construct, such as + ``select(Entity).from_statement(select(..))`` + + """ return self.statement.is_select + @property + def is_from_statement(self) -> bool: + """return True if this operation is a + :meth:`_sql.Select.from_statement` operation. + + This is independent from :attr:`_orm.ORMExecuteState.is_select`, as a + ``select().from_statement()`` construct can be used with + INSERT/UPDATE/DELETE RETURNING types of statements as well. + :attr:`_orm.ORMExecuteState.is_select` will only be set if the + :meth:`_sql.Select.from_statement` is itself against a + :class:`_sql.Select` construct. + + .. versionadded:: 2.0.30 + + """ + return self.statement.is_from_statement + @property def is_insert(self) -> bool: - """return True if this is an INSERT operation.""" + """return True if this is an INSERT operation. + + .. versionchanged:: 2.0.30 - the attribute is also True for a + :meth:`_sql.Select.from_statement` construct that is itself against + a :class:`_sql.Insert` construct, such as + ``select(Entity).from_statement(insert(..))`` + + """ return self.statement.is_dml and self.statement.is_insert @property def is_update(self) -> bool: - """return True if this is an UPDATE operation.""" + """return True if this is an UPDATE operation. + + .. versionchanged:: 2.0.30 - the attribute is also True for a + :meth:`_sql.Select.from_statement` construct that is itself against + a :class:`_sql.Update` construct, such as + ``select(Entity).from_statement(update(..))`` + + """ return self.statement.is_dml and self.statement.is_update @property def is_delete(self) -> bool: - """return True if this is a DELETE operation.""" + """return True if this is a DELETE operation. + + .. versionchanged:: 2.0.30 - the attribute is also True for a + :meth:`_sql.Select.from_statement` construct that is itself against + a :class:`_sql.Delete` construct, such as + ``select(Entity).from_statement(delete(..))`` + + """ return self.statement.is_dml and self.statement.is_delete @property diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 5eb32e30dd4..1a65b653ea2 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -1029,6 +1029,7 @@ class Executable(roles.StatementRole): ] is_select = False + is_from_statement = False is_update = False is_insert = False is_text = False diff --git a/test/orm/test_events.py b/test/orm/test_events.py index 3af6aad86aa..5e1672b526b 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -385,6 +385,8 @@ def do_orm_execute(ctx): bind_mapper=ctx.bind_mapper, all_mappers=ctx.all_mappers, is_select=ctx.is_select, + is_from_statement=ctx.is_from_statement, + is_insert=ctx.is_insert, is_update=ctx.is_update, is_delete=ctx.is_delete, is_orm_statement=ctx.is_orm_statement, @@ -421,6 +423,8 @@ def test_non_orm_statements(self, stmt, is_select): bind_mapper=None, all_mappers=[], is_select=is_select, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=False, @@ -451,6 +455,8 @@ def test_all_mappers_accessor_one(self): bind_mapper=inspect(User), all_mappers=[inspect(User), inspect(Address)], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -475,6 +481,8 @@ def test_all_mappers_accessor_two(self): bind_mapper=None, all_mappers=[], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=False, @@ -501,6 +509,8 @@ def test_all_mappers_accessor_three(self): bind_mapper=inspect(User), all_mappers=[inspect(User)], # Address not in results is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -531,6 +541,8 @@ def test_select_flags(self): bind_mapper=inspect(User), all_mappers=[inspect(User)], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -542,6 +554,54 @@ def test_select_flags(self): bind_mapper=inspect(User), all_mappers=[inspect(User)], is_select=True, + is_from_statement=False, + is_insert=False, + is_update=False, + is_delete=False, + is_orm_statement=True, + is_relationship_load=False, + is_column_load=True, + lazy_loaded_from=None, + ), + ], + ) + + def test_select_from_statement_flags(self): + User, Address = self.classes("User", "Address") + + sess = Session(testing.db, future=True) + + canary = self._flag_fixture(sess) + + s1 = select(User).filter_by(id=7) + u1 = sess.execute(select(User).from_statement(s1)).scalar_one() + + sess.expire(u1) + + eq_(u1.name, "jack") + + eq_( + canary.mock_calls, + [ + call.options( + bind_mapper=inspect(User), + all_mappers=[inspect(User)], + is_select=True, + is_from_statement=True, + is_insert=False, + is_update=False, + is_delete=False, + is_orm_statement=True, + is_relationship_load=False, + is_column_load=False, + lazy_loaded_from=None, + ), + call.options( + bind_mapper=inspect(User), + all_mappers=[inspect(User)], + is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -570,6 +630,8 @@ def test_lazyload_flags(self): bind_mapper=inspect(User), all_mappers=[inspect(User)], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -581,6 +643,8 @@ def test_lazyload_flags(self): bind_mapper=inspect(Address), all_mappers=[inspect(Address)], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -611,6 +675,8 @@ def test_selectinload_flags(self): bind_mapper=inspect(User), all_mappers=[inspect(User)], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -622,6 +688,8 @@ def test_selectinload_flags(self): bind_mapper=inspect(Address), all_mappers=[inspect(Address)], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -652,6 +720,8 @@ def test_subqueryload_flags(self): bind_mapper=inspect(User), all_mappers=[inspect(User)], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -663,6 +733,8 @@ def test_subqueryload_flags(self): bind_mapper=inspect(Address), all_mappers=[inspect(Address), inspect(User)], is_select=True, + is_from_statement=False, + is_insert=False, is_update=False, is_delete=False, is_orm_statement=True, @@ -673,24 +745,44 @@ def test_subqueryload_flags(self): ], ) - def test_update_delete_flags(self): + @testing.variation( + "stmt_type", + [ + ("insert", testing.requires.insert_returning), + ("update", testing.requires.update_returning), + ("delete", testing.requires.delete_returning), + ], + ) + @testing.variation("from_stmt", [True, False]) + def test_update_delete_flags(self, stmt_type, from_stmt): User, Address = self.classes("User", "Address") sess = Session(testing.db, future=True) canary = self._flag_fixture(sess) - sess.execute( - delete(User) - .filter_by(id=18) - .execution_options(synchronize_session="evaluate") - ) - sess.execute( - update(User) - .filter_by(id=18) - .values(name="eighteen") - .execution_options(synchronize_session="evaluate") - ) + if stmt_type.delete: + stmt = ( + delete(User) + .filter_by(id=18) + .execution_options(synchronize_session="evaluate") + ) + elif stmt_type.update: + stmt = ( + update(User) + .filter_by(id=18) + .values(name="eighteen") + .execution_options(synchronize_session="evaluate") + ) + elif stmt_type.insert: + stmt = insert(User).values(name="eighteen") + else: + stmt_type.fail() + + if from_stmt: + stmt = select(User).from_statement(stmt.returning(User)) + + sess.execute(stmt) eq_( canary.mock_calls, @@ -699,19 +791,10 @@ def test_update_delete_flags(self): bind_mapper=inspect(User), all_mappers=[inspect(User)], is_select=False, - is_update=False, - is_delete=True, - is_orm_statement=True, - is_relationship_load=False, - is_column_load=False, - lazy_loaded_from=None, - ), - call.options( - bind_mapper=inspect(User), - all_mappers=[inspect(User)], - is_select=False, - is_update=True, - is_delete=False, + is_from_statement=bool(from_stmt), + is_insert=stmt_type.insert, + is_update=stmt_type.update, + is_delete=stmt_type.delete, is_orm_statement=True, is_relationship_load=False, is_column_load=False, From 725d52d1df205635fd0a7cd622e3d19ec8f09b88 Mon Sep 17 00:00:00 2001 From: wouter bolsterlee Date: Thu, 4 Apr 2024 14:15:07 -0400 Subject: [PATCH 174/544] typing: annotate Exists.select() to return Select[bool] MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes: #11231 A query of the form: ``` sql SELECT EXISTS ( SELECT 1 FROM ... WHERE ... ) ``` … returns a boolean. Closes: #11233 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11233 Pull-request-sha: 1bec1cac731eb42e097948f84ae3d0ef133f8a9a Change-Id: I407a3bd9ed21a180c6c3ff02250aa0a9fbe502d7 (cherry picked from commit ceb9e021cd5df3aa7f3beed2c9564d5f182bf8b6) --- lib/sqlalchemy/sql/selectable.py | 2 +- test/typing/plain_files/sql/common_sql_element.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 65978f6646c..5a7663628a3 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -6648,7 +6648,7 @@ def _regroup( assert isinstance(return_value, SelectStatementGrouping) return return_value - def select(self) -> Select[Any]: + def select(self) -> Select[Tuple[bool]]: r"""Return a SELECT of this :class:`_expression.Exists`. e.g.:: diff --git a/test/typing/plain_files/sql/common_sql_element.py b/test/typing/plain_files/sql/common_sql_element.py index bc9faca96b4..fb0add31d81 100644 --- a/test/typing/plain_files/sql/common_sql_element.py +++ b/test/typing/plain_files/sql/common_sql_element.py @@ -98,6 +98,11 @@ def core_expr(email: str) -> SQLColumnExpression[bool]: # EXPECTED_TYPE: Select[Tuple[int]] reveal_type(stmt2) +stmt3 = select(User.id).exists().select() + +# EXPECTED_TYPE: Select[Tuple[bool]] +reveal_type(stmt3) + receives_str_col_expr(User.email) receives_str_col_expr(User.email + "some expr") From 0dc731c331c1324d3264223987eb4ca62575a030 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 4 Apr 2024 20:56:39 +0200 Subject: [PATCH 175/544] Highlite composide mode that's more type checker friently Change-Id: I9c7d79f31ab5e7a7f63aca4ba42c93f346acdefe References: #11232 (cherry picked from commit 585a582db0c3a3271659bd48e13abe42eb67ac13) --- doc/build/changelog/changelog_20.rst | 2 +- doc/build/orm/composites.rst | 39 ++++++++++++++++++---------- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 7678463b438..973a480fe23 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -126,7 +126,7 @@ :ref:`change_6980` failed to accommodate for the ``.copy()`` method, which will lose the variant mappings that are set up. This becomes an issue for the very specific case of a "schema" type, which includes types such as - :class:`.Enum` and :class:`.ARRAY`, when they are then used in the context + :class:`.Enum` and :class:`_types.ARRAY`, when they are then used in the context of an ORM Declarative mapping with mixins where copying of types comes into play. The variant mapping is now copied as well. diff --git a/doc/build/orm/composites.rst b/doc/build/orm/composites.rst index b0ddb9ea488..2fc62cbfd01 100644 --- a/doc/build/orm/composites.rst +++ b/doc/build/orm/composites.rst @@ -63,6 +63,12 @@ of the columns to be generated, in this case the names; the def __repr__(self): return f"Vertex(start={self.start}, end={self.end})" +.. tip:: In the example above the columns that represent the composites + (``x1``, ``y1``, etc.) are also accessible on the class but are not + correctly understood by type checkers. + If accessing the single columns is important they can be explicitly declared, + as shown in :ref:`composite_with_typing`. + The above mapping would correspond to a CREATE TABLE statement as: .. sourcecode:: pycon+sql @@ -184,12 +190,13 @@ using a :func:`_orm.mapped_column` construct, a :class:`_schema.Column`, or the string name of an existing mapped column. The following examples illustrate an equivalent mapping as that of the main section above. -* Map columns directly, then pass to composite +Map columns directly, then pass to composite +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Here we pass the existing :func:`_orm.mapped_column` instances to the - :func:`_orm.composite` construct, as in the non-annotated example below - where we also pass the ``Point`` class as the first argument to - :func:`_orm.composite`:: +Here we pass the existing :func:`_orm.mapped_column` instances to the +:func:`_orm.composite` construct, as in the non-annotated example below +where we also pass the ``Point`` class as the first argument to +:func:`_orm.composite`:: from sqlalchemy import Integer from sqlalchemy.orm import mapped_column, composite @@ -207,11 +214,14 @@ illustrate an equivalent mapping as that of the main section above. start = composite(Point, x1, y1) end = composite(Point, x2, y2) -* Map columns directly, pass attribute names to composite +.. _composite_with_typing: + +Map columns directly, pass attribute names to composite +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - We can write the same example above using more annotated forms where we have - the option to pass attribute names to :func:`_orm.composite` instead of - full column constructs:: +We can write the same example above using more annotated forms where we have +the option to pass attribute names to :func:`_orm.composite` instead of +full column constructs:: from sqlalchemy.orm import mapped_column, composite, Mapped @@ -228,12 +238,13 @@ illustrate an equivalent mapping as that of the main section above. start: Mapped[Point] = composite("x1", "y1") end: Mapped[Point] = composite("x2", "y2") -* Imperative mapping and imperative table +Imperative mapping and imperative table +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - When using :ref:`imperative table ` or - fully :ref:`imperative ` mappings, we have access - to :class:`_schema.Column` objects directly. These may be passed to - :func:`_orm.composite` as well, as in the imperative example below:: +When using :ref:`imperative table ` or +fully :ref:`imperative ` mappings, we have access +to :class:`_schema.Column` objects directly. These may be passed to +:func:`_orm.composite` as well, as in the imperative example below:: mapper_registry.map_imperatively( Vertex, From 817f1429021ea6a15a163f2760c3dd13e8d24bec Mon Sep 17 00:00:00 2001 From: Stefan Wojcik <5014112+yawhide@users.noreply.github.com> Date: Mon, 8 Apr 2024 15:23:19 -0400 Subject: [PATCH 176/544] Update links from initd.org to psycopg.org (#11244) (cherry picked from commit ac7d70dea89dfaf8e061bc8dd03a1ed7825069fc) --- doc/build/changelog/migration_12.rst | 2 +- lib/sqlalchemy/dialects/postgresql/psycopg2.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/changelog/migration_12.rst b/doc/build/changelog/migration_12.rst index 454b17f12a5..cd21d087910 100644 --- a/doc/build/changelog/migration_12.rst +++ b/doc/build/changelog/migration_12.rst @@ -1586,7 +1586,7 @@ Support for Batch Mode / Fast Execution Helpers The psycopg2 ``cursor.executemany()`` method has been identified as performing poorly, particularly with INSERT statements. To alleviate this, psycopg2 -has added `Fast Execution Helpers `_ +has added `Fast Execution Helpers `_ which rework statements into fewer server round trips by sending multiple DML statements in batch. SQLAlchemy 1.2 now includes support for these helpers to be used transparently whenever the :class:`_engine.Engine` makes use diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 9bf2e493361..6c492a5b250 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -242,7 +242,7 @@ Modern versions of psycopg2 include a feature known as `Fast Execution Helpers \ -`_, which +`_, which have been shown in benchmarking to improve psycopg2's executemany() performance, primarily with INSERT statements, by at least an order of magnitude. From 7d1ff20b0f4a460085e9eff2a6bcc5d021c625ff Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 10 Apr 2024 10:28:28 -0400 Subject: [PATCH 177/544] improve distinct() docs differentiate more clearly between distinct() and select().distinct(). Change-Id: Id5eae749393e5898ae501b2462ec4c2c54262e2f (cherry picked from commit da639af16f77118bc17bbf5cf78fe41dd1818168) --- lib/sqlalchemy/sql/_elements_constructors.py | 21 +++++++++++++------- lib/sqlalchemy/sql/selectable.py | 21 +++++++++++++++++--- 2 files changed, 32 insertions(+), 10 deletions(-) diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index 77cc2a8021d..51d8ac39995 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -1090,16 +1090,23 @@ def desc( def distinct(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: """Produce an column-expression-level unary ``DISTINCT`` clause. - This applies the ``DISTINCT`` keyword to an individual column - expression, and is typically contained within an aggregate function, - as in:: + This applies the ``DISTINCT`` keyword to an **individual column + expression** (e.g. not the whole statement), and renders **specifically + in that column position**; this is used for containment within + an aggregate function, as in:: from sqlalchemy import distinct, func - stmt = select(func.count(distinct(users_table.c.name))) + stmt = select(users_table.c.id, func.count(distinct(users_table.c.name))) + + The above would produce an statement resembling:: - The above would produce an expression resembling:: + SELECT user.id, count(DISTINCT user.name) FROM user - SELECT COUNT(DISTINCT name) FROM user + .. tip:: The :func:`_sql.distinct` function does **not** apply DISTINCT + to the full SELECT statement, instead applying a DISTINCT modifier + to **individual column expressions**. For general ``SELECT DISTINCT`` + support, use the + :meth:`_sql.Select.distinct` method on :class:`_sql.Select`. The :func:`.distinct` function is also available as a column-level method, e.g. :meth:`_expression.ColumnElement.distinct`, as in:: @@ -1122,7 +1129,7 @@ def distinct(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: :data:`.func` - """ + """ # noqa: E501 return UnaryExpression._create_distinct(expr) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 65978f6646c..0f19810ccbb 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -5972,11 +5972,26 @@ def having(self, *having: _ColumnExpressionArgument[bool]) -> Self: @_generative def distinct(self, *expr: _ColumnExpressionArgument[Any]) -> Self: r"""Return a new :func:`_expression.select` construct which - will apply DISTINCT to its columns clause. + will apply DISTINCT to the SELECT statement overall. + + E.g.:: + + from sqlalchemy import select + stmt = select(users_table.c.id, users_table.c.name).distinct() + + The above would produce an statement resembling:: + + SELECT DISTINCT user.id, user.name FROM user + + The method also accepts an ``*expr`` parameter which produces the + PostgreSQL dialect-specific ``DISTINCT ON`` expression. Using this + parameter on other backends which don't support this syntax will + raise an error. :param \*expr: optional column expressions. When present, - the PostgreSQL dialect will render a ``DISTINCT ON (>)`` - construct. + the PostgreSQL dialect will render a ``DISTINCT ON ()`` + construct. A deprecation warning and/or :class:`_exc.CompileError` + will be raised on other backends. .. deprecated:: 1.4 Using \*expr in other dialects is deprecated and will raise :class:`_exc.CompileError` in a future version. From ceb465d09e60c95f269aac6f740ef0ecaae368ed Mon Sep 17 00:00:00 2001 From: Francisco Del Roio Date: Fri, 5 Apr 2024 12:05:51 -0400 Subject: [PATCH 178/544] Fix typing issue in `MetaData.reflect()` with asyncio. Fixed typing regression caused by PR :ticket:`11055` in version 2.0.29 that attempted to add ``ParamSpec`` to the asyncio ``run_sync()`` methods, where using :meth:`_asyncio.AsyncConnection.run_sync` with meth:`_schema.MetaData.reflect` would fail on mypy due to a bug. See https://github.com/python/mypy/issues/17093 for details. Pull request courtesy of Francisco R. Del Roio Fixes: #11200 Closes: #11201 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11201 Pull-request-sha: 49e10e0d2a7bbadf471212033e25b7616b99c092 Change-Id: Ie2ebaebd1bc1ee1b865b78561cb6cb8937e85eca (cherry picked from commit 40fc02d93f3f8b4d9ae2f7bf987f5f965a761dd4) --- doc/build/changelog/unreleased_20/11200.rst | 10 ++++++ lib/sqlalchemy/sql/schema.py | 32 +++++++++++++++++++ .../typing/plain_files/ext/asyncio/engines.py | 24 ++++++++++++++ 3 files changed, 66 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11200.rst diff --git a/doc/build/changelog/unreleased_20/11200.rst b/doc/build/changelog/unreleased_20/11200.rst new file mode 100644 index 00000000000..61ab6506b1c --- /dev/null +++ b/doc/build/changelog/unreleased_20/11200.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, typing, regression + :tickets: 11200 + + Fixed typing regression caused by PR :ticket:`11055` in version 2.0.29 that + attempted to add ``ParamSpec`` to the asyncio ``run_sync()`` methods, where + using :meth:`_asyncio.AsyncConnection.run_sync` with + :meth:`_schema.MetaData.reflect` would fail on mypy due to a bug. + See https://github.com/python/mypy/issues/17093 for details. + Pull request courtesy of Francisco R. Del Roio diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 6d5f941786a..aa359fdbbd7 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -5616,6 +5616,38 @@ def sorted_tables(self) -> List[Table]: sorted(self.tables.values(), key=lambda t: t.key) # type: ignore ) + # overload needed to work around mypy this mypy + # https://github.com/python/mypy/issues/17093 + @overload + def reflect( + self, + bind: Engine, + schema: Optional[str] = ..., + views: bool = ..., + only: Union[ + _typing_Sequence[str], Callable[[str, MetaData], bool], None + ] = ..., + extend_existing: bool = ..., + autoload_replace: bool = ..., + resolve_fks: bool = ..., + **dialect_kwargs: Any, + ) -> None: ... + + @overload + def reflect( + self, + bind: Connection, + schema: Optional[str] = ..., + views: bool = ..., + only: Union[ + _typing_Sequence[str], Callable[[str, MetaData], bool], None + ] = ..., + extend_existing: bool = ..., + autoload_replace: bool = ..., + resolve_fks: bool = ..., + **dialect_kwargs: Any, + ) -> None: ... + @util.preload_module("sqlalchemy.engine.reflection") def reflect( self, diff --git a/test/typing/plain_files/ext/asyncio/engines.py b/test/typing/plain_files/ext/asyncio/engines.py index 1f7843082a9..7c93466e0bf 100644 --- a/test/typing/plain_files/ext/asyncio/engines.py +++ b/test/typing/plain_files/ext/asyncio/engines.py @@ -1,6 +1,8 @@ from typing import Any from sqlalchemy import Connection +from sqlalchemy import Enum +from sqlalchemy import MetaData from sqlalchemy import select from sqlalchemy import text from sqlalchemy.ext.asyncio import create_async_engine @@ -71,3 +73,25 @@ async def asyncio() -> None: ce.statement cc = select(1).compile(conn) cc.statement + + async with e.connect() as conn: + metadata = MetaData() + + await conn.run_sync(metadata.create_all) + await conn.run_sync(metadata.reflect) + await conn.run_sync(metadata.drop_all) + + # Just to avoid creating new constructs manually: + for _, table in metadata.tables.items(): + await conn.run_sync(table.create) + await conn.run_sync(table.drop) + + # Indexes: + for index in table.indexes: + await conn.run_sync(index.create) + await conn.run_sync(index.drop) + + # Test for enum types: + enum = Enum("a", "b") + await conn.run_sync(enum.create) + await conn.run_sync(enum.drop) From 8f5d0584e4871c014b598d71746c700ba030413b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 11 Apr 2024 21:24:54 +0200 Subject: [PATCH 179/544] Document how to configure the cursor_factory on psycopg Change-Id: I117a0600c31dde721c99891caaa43937458e78d9 Refereinces: #8978 (cherry picked from commit 497c4a2c22be2e5c2319acf56e11d3037a552064) --- lib/sqlalchemy/dialects/postgresql/psycopg.py | 32 +++++++++++++++++ test/dialect/postgresql/test_dialect.py | 35 +++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index 90177a43ceb..a1ad0fc6821 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -50,6 +50,38 @@ dialect shares most of its behavior with the ``psycopg2`` dialect. Further documentation is available there. +Using a different Cursor class +------------------------------ + +One of the differences between ``psycopg`` and the older ``psycopg2`` +is how bound parameters are handled: ``psycopg2`` would bind them +client side, while ``psycopg`` by default will bind them server side. + +It's possible to configure ``psycopg`` to do client side binding by +specifying the ``cursor_factory`` to be ``ClientCursor`` when creating +the engine:: + + from psycopg import ClientCursor + + client_side_engine = create_engine( + "postgresql+psycopg://...", + connect_args={"cursor_factory": ClientCursor}, + ) + +Similarly when using an async engine the ``AsyncClientCursor`` can be +specified:: + + from psycopg import AsyncClientCursor + + client_side_engine = create_async_engine( + "postgresql+psycopg://...", + connect_args={"cursor_factory": AsyncClientCursor}, + ) + +.. seealso:: + + `Client-side-binding cursors `_ + """ # noqa from __future__ import annotations diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index 40718ee2dff..eae1b55d6e9 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -1376,6 +1376,7 @@ def test_notice_logging(self): conn.exec_driver_sql("SELECT note('another note')") finally: trans.rollback() + conn.close() finally: log.removeHandler(buf) log.setLevel(lev) @@ -1720,3 +1721,37 @@ def test_get_dialect(self): def test_async_version(self): e = create_engine("postgresql+psycopg_async://") is_true(isinstance(e.dialect, psycopg_dialect.PGDialectAsync_psycopg)) + + @testing.skip_if(lambda c: c.db.dialect.is_async) + def test_client_side_cursor(self, testing_engine): + from psycopg import ClientCursor + + engine = testing_engine( + options={"connect_args": {"cursor_factory": ClientCursor}} + ) + + with engine.connect() as c: + res = c.execute(select(1, 2, 3)).one() + eq_(res, (1, 2, 3)) + with c.connection.driver_connection.cursor() as cursor: + is_true(isinstance(cursor, ClientCursor)) + + @config.async_test + @testing.skip_if(lambda c: not c.db.dialect.is_async) + async def test_async_client_side_cursor(self, testing_engine): + from psycopg import AsyncClientCursor + + engine = testing_engine( + options={"connect_args": {"cursor_factory": AsyncClientCursor}}, + asyncio=True, + ) + + async with engine.connect() as c: + res = (await c.execute(select(1, 2, 3))).one() + eq_(res, (1, 2, 3)) + async with ( + await c.get_raw_connection() + ).driver_connection.cursor() as cursor: + is_true(isinstance(cursor, AsyncClientCursor)) + + await engine.dispose() From 354ce373f23f193488d6b73c42fd9b4fa93443a8 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 15 Apr 2024 21:52:38 +0200 Subject: [PATCH 180/544] Fix missing pythonpath in test that use subprocess Ensure the ``PYTHONPATH`` variable is properly initialized when using ``subprocess.run`` in the tests. Fixes: #11268 Change-Id: Ie2db656364931b3be9033dcaaf7a7c56b383ecca (cherry picked from commit b5cf61c504e6ff7cdceeb0ca376eb47a97b9da5a) --- doc/build/changelog/unreleased_20/11268.rst | 6 ++++++ test/sql/test_resultset.py | 8 +++++++- 2 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11268.rst diff --git a/doc/build/changelog/unreleased_20/11268.rst b/doc/build/changelog/unreleased_20/11268.rst new file mode 100644 index 00000000000..40c1eb7bcca --- /dev/null +++ b/doc/build/changelog/unreleased_20/11268.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, test + :tickets: 11268 + + Ensure the ``PYTHONPATH`` variable is properly initialized when + using ``subprocess.run`` in the tests. diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py index 350e9542214..2fd16d46db0 100644 --- a/test/sql/test_resultset.py +++ b/test/sql/test_resultset.py @@ -528,8 +528,14 @@ def test_pickle_rows_other_process(self, connection, use_labels): "import sqlalchemy; import pickle; print([" f"r[0] for r in pickle.load(open('''{name}''', 'rb'))])" ) + parts = list(sys.path) + if os.environ.get("PYTHONPATH"): + parts.append(os.environ["PYTHONPATH"]) + pythonpath = os.pathsep.join(parts) proc = subprocess.run( - [sys.executable, "-c", code], stdout=subprocess.PIPE + [sys.executable, "-c", code], + stdout=subprocess.PIPE, + env={**os.environ, "PYTHONPATH": pythonpath}, ) exp = str([r[0] for r in result]).encode() eq_(proc.returncode, 0) From 31d0ab27519ceae4e1b12212c8a41fe290418ea0 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 17 Apr 2024 22:17:25 +0200 Subject: [PATCH 181/544] Improve docs formatting on automap, fix missing import Fixes: #11273 Change-Id: I872dcc0c2cf5093034e1590533b2e0d26602df7f References: #11267 (cherry picked from commit 82803016b5fcbc3225af87a43768dbea2be87582) --- lib/sqlalchemy/ext/automap.py | 153 +++++++++++++++++++++------------- 1 file changed, 93 insertions(+), 60 deletions(-) diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index bf6a5f26909..d7920904bb0 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -64,7 +64,7 @@ # collection-based relationships are by default named # "_collection" u1 = session.query(User).first() - print (u1.address_collection) + print(u1.address_collection) Above, calling :meth:`.AutomapBase.prepare` while passing along the :paramref:`.AutomapBase.prepare.reflect` parameter indicates that the @@ -101,6 +101,7 @@ from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey from sqlalchemy.ext.automap import automap_base + engine = create_engine("sqlite:///mydatabase.db") # produce our own MetaData object @@ -108,13 +109,15 @@ # we can reflect it ourselves from a database, using options # such as 'only' to limit what tables we look at... - metadata.reflect(engine, only=['user', 'address']) + metadata.reflect(engine, only=["user", "address"]) # ... or just define our own Table objects with it (or combine both) - Table('user_order', metadata, - Column('id', Integer, primary_key=True), - Column('user_id', ForeignKey('user.id')) - ) + Table( + "user_order", + metadata, + Column("id", Integer, primary_key=True), + Column("user_id", ForeignKey("user.id")), + ) # we can then produce a set of mappings from this MetaData. Base = automap_base(metadata=metadata) @@ -123,8 +126,9 @@ Base.prepare() # mapped classes are ready - User, Address, Order = Base.classes.user, Base.classes.address,\ - Base.classes.user_order + User = Base.classes.user + Address = Base.classes.address + Order = Base.classes.user_order .. _automap_by_module: @@ -177,19 +181,20 @@ Base.metadata.create_all(e) + def module_name_for_table(cls, tablename, table): if table.schema is not None: return f"mymodule.{table.schema}" else: return f"mymodule.default" + Base = automap_base() Base.prepare(e, modulename_for_table=module_name_for_table) Base.prepare(e, schema="test_schema", modulename_for_table=module_name_for_table) Base.prepare(e, schema="test_schema_2", modulename_for_table=module_name_for_table) - The same named-classes are organized into a hierarchical collection available at :attr:`.AutomapBase.by_module`. This collection is traversed using the dot-separated name of a particular package/module down into the desired @@ -251,12 +256,13 @@ class name. # automap base Base = automap_base() + # pre-declare User for the 'user' table class User(Base): - __tablename__ = 'user' + __tablename__ = "user" # override schema elements like Columns - user_name = Column('name', String) + user_name = Column("name", String) # override relationships too, if desired. # we must use the same name that automap would use for the @@ -264,6 +270,7 @@ class User(Base): # generate for "address" address_collection = relationship("address", collection_class=set) + # reflect engine = create_engine("sqlite:///mydatabase.db") Base.prepare(autoload_with=engine) @@ -274,11 +281,11 @@ class User(Base): Address = Base.classes.address u1 = session.query(User).first() - print (u1.address_collection) + print(u1.address_collection) # the backref is still there: a1 = session.query(Address).first() - print (a1.user) + print(a1.user) Above, one of the more intricate details is that we illustrated overriding one of the :func:`_orm.relationship` objects that automap would have created. @@ -305,35 +312,49 @@ class User(Base): import re import inflect + def camelize_classname(base, tablename, table): - "Produce a 'camelized' class name, e.g. " + "Produce a 'camelized' class name, e.g." "'words_and_underscores' -> 'WordsAndUnderscores'" - return str(tablename[0].upper() + \ - re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:])) + return str( + tablename[0].upper() + + re.sub( + r"_([a-z])", + lambda m: m.group(1).upper(), + tablename[1:], + ) + ) + _pluralizer = inflect.engine() + + def pluralize_collection(base, local_cls, referred_cls, constraint): - "Produce an 'uncamelized', 'pluralized' class name, e.g. " + "Produce an 'uncamelized', 'pluralized' class name, e.g." "'SomeTerm' -> 'some_terms'" referred_name = referred_cls.__name__ - uncamelized = re.sub(r'[A-Z]', - lambda m: "_%s" % m.group(0).lower(), - referred_name)[1:] + uncamelized = re.sub( + r"[A-Z]", + lambda m: "_%s" % m.group(0).lower(), + referred_name, + )[1:] pluralized = _pluralizer.plural(uncamelized) return pluralized + from sqlalchemy.ext.automap import automap_base Base = automap_base() engine = create_engine("sqlite:///mydatabase.db") - Base.prepare(autoload_with=engine, - classname_for_table=camelize_classname, - name_for_collection_relationship=pluralize_collection - ) + Base.prepare( + autoload_with=engine, + classname_for_table=camelize_classname, + name_for_collection_relationship=pluralize_collection, + ) From the above mapping, we would now have classes ``User`` and ``Address``, where the collection from ``User`` to ``Address`` is called @@ -422,16 +443,21 @@ def pluralize_collection(base, local_cls, referred_cls, constraint): options along to all one-to-many relationships:: from sqlalchemy.ext.automap import generate_relationship + from sqlalchemy.orm import interfaces + - def _gen_relationship(base, direction, return_fn, - attrname, local_cls, referred_cls, **kw): + def _gen_relationship( + base, direction, return_fn, attrname, local_cls, referred_cls, **kw + ): if direction is interfaces.ONETOMANY: - kw['cascade'] = 'all, delete-orphan' - kw['passive_deletes'] = True + kw["cascade"] = "all, delete-orphan" + kw["passive_deletes"] = True # make use of the built-in function to actually return # the result. - return generate_relationship(base, direction, return_fn, - attrname, local_cls, referred_cls, **kw) + return generate_relationship( + base, direction, return_fn, attrname, local_cls, referred_cls, **kw + ) + from sqlalchemy.ext.automap import automap_base from sqlalchemy import create_engine @@ -440,8 +466,7 @@ def _gen_relationship(base, direction, return_fn, Base = automap_base() engine = create_engine("sqlite:///mydatabase.db") - Base.prepare(autoload_with=engine, - generate_relationship=_gen_relationship) + Base.prepare(autoload_with=engine, generate_relationship=_gen_relationship) Many-to-Many relationships -------------------------- @@ -482,18 +507,20 @@ def _gen_relationship(base, direction, return_fn, classes given as follows:: class Employee(Base): - __tablename__ = 'employee' + __tablename__ = "employee" id = Column(Integer, primary_key=True) type = Column(String(50)) __mapper_args__ = { - 'polymorphic_identity':'employee', 'polymorphic_on': type + "polymorphic_identity": "employee", + "polymorphic_on": type, } + class Engineer(Employee): - __tablename__ = 'engineer' - id = Column(Integer, ForeignKey('employee.id'), primary_key=True) + __tablename__ = "engineer" + id = Column(Integer, ForeignKey("employee.id"), primary_key=True) __mapper_args__ = { - 'polymorphic_identity':'engineer', + "polymorphic_identity": "engineer", } The foreign key from ``Engineer`` to ``Employee`` is used not for a @@ -508,25 +535,26 @@ class Engineer(Employee): SQLAlchemy can guess:: class Employee(Base): - __tablename__ = 'employee' + __tablename__ = "employee" id = Column(Integer, primary_key=True) type = Column(String(50)) __mapper_args__ = { - 'polymorphic_identity':'employee', 'polymorphic_on':type + "polymorphic_identity": "employee", + "polymorphic_on": type, } + class Engineer(Employee): - __tablename__ = 'engineer' - id = Column(Integer, ForeignKey('employee.id'), primary_key=True) - favorite_employee_id = Column(Integer, ForeignKey('employee.id')) + __tablename__ = "engineer" + id = Column(Integer, ForeignKey("employee.id"), primary_key=True) + favorite_employee_id = Column(Integer, ForeignKey("employee.id")) - favorite_employee = relationship(Employee, - foreign_keys=favorite_employee_id) + favorite_employee = relationship(Employee, foreign_keys=favorite_employee_id) __mapper_args__ = { - 'polymorphic_identity':'engineer', - 'inherit_condition': id == Employee.id + "polymorphic_identity": "engineer", + "inherit_condition": id == Employee.id, } Handling Simple Naming Conflicts @@ -564,15 +592,15 @@ def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): local_table = local_cls.__table__ if name in local_table.columns: newname = name + "_" - warnings.warn( - "Already detected name %s present. using %s" % - (name, newname)) + warnings.warn("Already detected name %s present. using %s" % (name, newname)) return newname return name - Base.prepare(autoload_with=engine, - name_for_scalar_relationship=name_for_scalar_relationship) + Base.prepare( + autoload_with=engine, + name_for_scalar_relationship=name_for_scalar_relationship, + ) Alternatively, we can change the name on the column side. The columns that are mapped can be modified using the technique described at @@ -581,12 +609,13 @@ def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): Base = automap_base() + class TableB(Base): - __tablename__ = 'table_b' - _table_a = Column('table_a', ForeignKey('table_a.id')) + __tablename__ = "table_b" + _table_a = Column("table_a", ForeignKey("table_a.id")) - Base.prepare(autoload_with=engine) + Base.prepare(autoload_with=engine) Using Automap with Explicit Declarations ======================================== @@ -603,26 +632,29 @@ class TableB(Base): Base = automap_base() + class User(Base): - __tablename__ = 'user' + __tablename__ = "user" id = Column(Integer, primary_key=True) name = Column(String) + class Address(Base): - __tablename__ = 'address' + __tablename__ = "address" id = Column(Integer, primary_key=True) email = Column(String) - user_id = Column(ForeignKey('user.id')) + user_id = Column(ForeignKey("user.id")) + # produce relationships Base.prepare() # mapping is complete, with "address_collection" and # "user" relationships - a1 = Address(email='u1') - a2 = Address(email='u2') + a1 = Address(email="u1") + a2 = Address(email="u2") u1 = User(address_collection=[a1, a2]) assert a1.user is u1 @@ -651,7 +683,8 @@ class Address(Base): @event.listens_for(Base.metadata, "column_reflect") def column_reflect(inspector, table, column_info): # set column.key = "attr_" - column_info['key'] = "attr_%s" % column_info['name'].lower() + column_info["key"] = "attr_%s" % column_info["name"].lower() + # run reflection Base.prepare(autoload_with=engine) From 005586fcc64078496d663c6261eb44c7135ff232 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 18 Apr 2024 18:17:21 -0400 Subject: [PATCH 182/544] consider propagate_to_loaders at application time Fixed regression from 1.4 where using :func:`_orm.defaultload` in conjunction with a non-propagating loader like :func:`_orm.contains_eager` would nonetheless propagate the :func:`_orm.contains_eager` to a lazy load operation, causing incorrect queries as this option is only intended to come from an original load. Fixes: #11292 Change-Id: I79928afa108970b523f2166c3190f7952eca73ed (cherry picked from commit 80399cefa1b16a8548ba0c997a1eda94b8e9db01) --- doc/build/changelog/unreleased_20/11292.rst | 11 ++ lib/sqlalchemy/orm/strategy_options.py | 16 ++- test/orm/test_default_strategies.py | 126 ++++++++++++++++++++ test/orm/test_options.py | 5 +- test/profiles.txt | 26 ++-- 5 files changed, 170 insertions(+), 14 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11292.rst diff --git a/doc/build/changelog/unreleased_20/11292.rst b/doc/build/changelog/unreleased_20/11292.rst new file mode 100644 index 00000000000..65fbdf719a0 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11292.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, orm, regression + :tickets: 11292 + + Fixed regression from 1.4 where using :func:`_orm.defaultload` in + conjunction with a non-propagating loader like :func:`_orm.contains_eager` + would nonetheless propagate the :func:`_orm.contains_eager` to a lazy load + operation, causing incorrect queries as this option is only intended to + come from an original load. + + diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 25c6332112f..31c3a54e323 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -98,6 +98,7 @@ def contains_eager( attr: _AttrType, alias: Optional[_FromClauseArgument] = None, _is_chain: bool = False, + _propagate_to_loaders: bool = False, ) -> Self: r"""Indicate that the given attribute should be eagerly loaded from columns stated manually in the query. @@ -158,7 +159,7 @@ def contains_eager( cloned = self._set_relationship_strategy( attr, {"lazy": "joined"}, - propagate_to_loaders=False, + propagate_to_loaders=_propagate_to_loaders, opts={"eager_from_alias": coerced_alias}, _reconcile_to_other=True if _is_chain else None, ) @@ -1146,7 +1147,20 @@ def _process( mapper_entities, raiseerr ) + # if the context has a current path, this is a lazy load + has_current_path = bool(compile_state.compile_options._current_path) + for loader in self.context: + # issue #11292 + # historically, propagate_to_loaders was only considered at + # object loading time, whether or not to carry along options + # onto an object's loaded state where it would be used by lazyload. + # however, the defaultload() option needs to propagate in case + # its sub-options propagate_to_loaders, but its sub-options + # that dont propagate should not be applied for lazy loaders. + # so we check again + if has_current_path and not loader.propagate_to_loaders: + continue loader.process_compile_state( self, compile_state, diff --git a/test/orm/test_default_strategies.py b/test/orm/test_default_strategies.py index 657875aa9d8..178b03fe6f6 100644 --- a/test/orm/test_default_strategies.py +++ b/test/orm/test_default_strategies.py @@ -1,11 +1,18 @@ import sqlalchemy as sa +from sqlalchemy import Column +from sqlalchemy import ForeignKey +from sqlalchemy import Integer +from sqlalchemy import select from sqlalchemy import testing from sqlalchemy import util +from sqlalchemy.orm import contains_eager from sqlalchemy.orm import defaultload from sqlalchemy.orm import joinedload from sqlalchemy.orm import relationship +from sqlalchemy.orm import Session from sqlalchemy.orm import subqueryload from sqlalchemy.testing import eq_ +from sqlalchemy.testing import fixtures from sqlalchemy.testing.assertions import expect_raises_message from sqlalchemy.testing.fixtures import fixture_session from test.orm import _fixtures @@ -738,3 +745,122 @@ def go(): eq_(a1.user, None) self.sql_count_(0, go) + + +class Issue11292Test(fixtures.DeclarativeMappedTest): + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class Parent(Base): + __tablename__ = "parent" + + id = Column(Integer, primary_key=True) + + extension = relationship( + "Extension", back_populates="parent", uselist=False + ) + + class Child(Base): + __tablename__ = "child" + + id = Column(Integer, primary_key=True) + + extensions = relationship("Extension", back_populates="child") + + class Extension(Base): + __tablename__ = "extension" + + id = Column(Integer, primary_key=True) + parent_id = Column(Integer, ForeignKey(Parent.id)) + child_id = Column(Integer, ForeignKey(Child.id)) + + parent = relationship("Parent", back_populates="extension") + child = relationship("Child", back_populates="extensions") + + @classmethod + def insert_data(cls, connection): + Parent, Child, Extension = cls.classes("Parent", "Child", "Extension") + with Session(connection) as session: + for id_ in (1, 2, 3): + session.add(Parent(id=id_)) + session.add(Child(id=id_)) + session.add(Extension(id=id_, parent_id=id_, child_id=id_)) + session.commit() + + @testing.variation("load_as_option", [True, False]) + def test_defaultload_dont_propagate(self, load_as_option): + Parent, Child, Extension = self.classes("Parent", "Child", "Extension") + + session = fixture_session() + + # here, we want the defaultload() to go away on subsequent loads, + # becuase Parent.extension is propagate_to_loaders=False + query = ( + select(Parent) + .join(Extension) + .join(Child) + .options( + contains_eager(Parent.extension), + ( + defaultload(Parent.extension).options( + contains_eager(Extension.child) + ) + if load_as_option + else defaultload(Parent.extension).contains_eager( + Extension.child + ) + ), + ) + ) + + parents = session.scalars(query).all() + + eq_( + [(p.id, p.extension.id, p.extension.child.id) for p in parents], + [(1, 1, 1), (2, 2, 2), (3, 3, 3)], + ) + + session.expire_all() + + eq_( + [(p.id, p.extension.id, p.extension.child.id) for p in parents], + [(1, 1, 1), (2, 2, 2), (3, 3, 3)], + ) + + @testing.variation("load_as_option", [True, False]) + def test_defaultload_yes_propagate(self, load_as_option): + Parent, Child, Extension = self.classes("Parent", "Child", "Extension") + + session = fixture_session() + + # here, we want the defaultload() to go away on subsequent loads, + # becuase Parent.extension is propagate_to_loaders=False + query = select(Parent).options( + ( + defaultload(Parent.extension).options( + joinedload(Extension.child) + ) + if load_as_option + else defaultload(Parent.extension).joinedload(Extension.child) + ), + ) + + parents = session.scalars(query).all() + + eq_( + [(p.id, p.extension.id, p.extension.child.id) for p in parents], + [(1, 1, 1), (2, 2, 2), (3, 3, 3)], + ) + + session.expire_all() + + # this would be 9 without the joinedload + with self.assert_statement_count(testing.db, 6): + eq_( + [ + (p.id, p.extension.id, p.extension.child.id) + for p in parents + ], + [(1, 1, 1), (2, 2, 2), (3, 3, 3)], + ) diff --git a/test/orm/test_options.py b/test/orm/test_options.py index db9b51607c3..c6058a80b3b 100644 --- a/test/orm/test_options.py +++ b/test/orm/test_options.py @@ -419,7 +419,10 @@ def _option_fixture(self, *arg): # loader option works this way right now; the rest all use # defaultload() for the "chain" elements return strategy_options._generate_from_keys( - strategy_options.Load.contains_eager, arg, True, {} + strategy_options.Load.contains_eager, + arg, + True, + dict(_propagate_to_loaders=True), ) @testing.combinations( diff --git a/test/profiles.txt b/test/profiles.txt index d8226f4a894..370d895b627 100644 --- a/test/profiles.txt +++ b/test/profiles.txt @@ -1,15 +1,15 @@ # /home/classic/dev/sqlalchemy/test/profiles.txt # This file is written out on a per-environment basis. -# For each test in aaa_profiling, the corresponding function and +# For each test in aaa_profiling, the corresponding function and # environment is located within this file. If it doesn't exist, # the test is skipped. -# If a callcount does exist, it is compared to what we received. +# If a callcount does exist, it is compared to what we received. # assertions are raised if the counts do not match. -# -# To add a new callcount test, apply the function_call_count -# decorator and re-run the tests using the --write-profiles +# +# To add a new callcount test, apply the function_call_count +# decorator and re-run the tests using the --write-profiles # option - this file will be rewritten including the new count. -# +# # TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert @@ -209,15 +209,17 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove # TEST: test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 128 -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 128 -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 124 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 136 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 136 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 132 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_key_bound_branching x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_nocextensions 132 # TEST: test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 128 -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 128 -test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 124 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 136 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_nocextensions 136 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_cextensions 132 +test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching x86_64_linux_cpython_3.12_sqlite_pysqlite_dbapiunicode_nocextensions 132 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline From 3d8643dc0787c2c21b358c13bff8e5c5fcc37961 Mon Sep 17 00:00:00 2001 From: gmanny Date: Wed, 24 Apr 2024 22:41:34 +0200 Subject: [PATCH 183/544] Changed some `declared_attr` code examples in the docs to return `mapped_column` to indicate that it's possible. (#11302) (cherry picked from commit 81c2503173fc674baa579a355e63e020969618af) --- doc/build/orm/dataclasses.rst | 4 ++-- doc/build/orm/extensions/mypy.rst | 2 +- doc/build/orm/mapping_api.rst | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/build/orm/dataclasses.rst b/doc/build/orm/dataclasses.rst index 2c45a4d0196..e737597cf14 100644 --- a/doc/build/orm/dataclasses.rst +++ b/doc/build/orm/dataclasses.rst @@ -787,8 +787,8 @@ example at :ref:`orm_declarative_mixins_relationships`:: class RefTargetMixin: @declared_attr - def target_id(cls): - return Column("target_id", ForeignKey("target.id")) + def target_id(cls) -> Mapped[int]: + return mapped_column("target_id", ForeignKey("target.id")) @declared_attr def target(cls): diff --git a/doc/build/orm/extensions/mypy.rst b/doc/build/orm/extensions/mypy.rst index 8275e94866b..afd34929af6 100644 --- a/doc/build/orm/extensions/mypy.rst +++ b/doc/build/orm/extensions/mypy.rst @@ -497,7 +497,7 @@ plugin that a particular class intends to serve as a declarative mixin:: class HasCompany: @declared_attr def company_id(cls) -> Mapped[int]: # uses Mapped - return Column(ForeignKey("company.id")) + return mapped_column(ForeignKey("company.id")) @declared_attr def company(cls) -> Mapped["Company"]: diff --git a/doc/build/orm/mapping_api.rst b/doc/build/orm/mapping_api.rst index 57ef5e00e0f..399111d6058 100644 --- a/doc/build/orm/mapping_api.rst +++ b/doc/build/orm/mapping_api.rst @@ -53,11 +53,11 @@ Class Mapping API class HasIdMixin: @declared_attr.cascading - def id(cls): + def id(cls) -> Mapped[int]: if has_inherited_table(cls): - return Column(ForeignKey("myclass.id"), primary_key=True) + return mapped_column(ForeignKey("myclass.id"), primary_key=True) else: - return Column(Integer, primary_key=True) + return mapped_column(Integer, primary_key=True) class MyClass(HasIdMixin, Base): From bbcf07a34a4f7fa70fa370b3444f63830871c1ec Mon Sep 17 00:00:00 2001 From: Pat Buxton <45275736+rad-pat@users.noreply.github.com> Date: Wed, 24 Apr 2024 21:48:02 +0100 Subject: [PATCH 184/544] Add Databend and Greenplum dialects (#11248) (cherry picked from commit 7adc7404acc691698e30c362a8ec03af2bd426fd) --- doc/build/dialects/index.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index b6c9c8e88d5..294095450f4 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -81,6 +81,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | CrateDB | crate-python_ | +------------------------------------------------+---------------------------------------+ +| Databend | databend-sqlalchemy_ | ++------------------------------------------------+---------------------------------------+ | EXASolution | sqlalchemy_exasol_ | +------------------------------------------------+---------------------------------------+ | Elasticsearch (readonly) | elasticsearch-dbapi_ | @@ -93,6 +95,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Google Sheets | gsheets_ | +------------------------------------------------+---------------------------------------+ +| Greenplum [2]_ | sqlalchemy-greenplum_ | ++------------------------------------------------+---------------------------------------+ | IBM DB2 and Informix | ibm-db-sa_ | +------------------------------------------------+---------------------------------------+ | IBM Netezza Performance Server [1]_ | nzalchemy_ | @@ -125,6 +129,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ .. [1] Supports version 1.3.x only at the moment. +.. [2] Supports version 1.4.x only at the moment. .. _openGauss-sqlalchemy: https://gitee.com/opengauss/openGauss-sqlalchemy .. _rockset-sqlalchemy: https://pypi.org/project/rockset-sqlalchemy @@ -156,3 +161,5 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _pyathena: https://github.com/laughingman7743/PyAthena/ .. _sqlalchemy-yugabytedb: https://pypi.org/project/sqlalchemy-yugabytedb/ .. _impyla: https://pypi.org/project/impyla/ +.. _databend-sqlalchemy: https://github.com/datafuselabs/databend-sqlalchemy +.. _sqlalchemy-greenplum: https://github.com/PlaidCloud/sqlalchemy-greenplum From 454533aacfc0f9af2332dfd9b9ffbf35c248decc Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 24 Apr 2024 22:30:21 +0200 Subject: [PATCH 185/544] Fix typing to support mypy 1.10 Change-Id: I77c0a04331a99c7be77c174721431a5601475dc3 (cherry picked from commit 859dda8f0b2874fcf7f080d15411336047b89a64) --- lib/sqlalchemy/ext/hybrid.py | 2 +- lib/sqlalchemy/sql/compiler.py | 2 +- lib/sqlalchemy/util/langhelpers.py | 6 +++--- lib/sqlalchemy/util/typing.py | 2 +- test/typing/plain_files/orm/typed_queries.py | 8 ++++---- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 25b74d8d6e3..ee8d6a78184 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -1096,7 +1096,7 @@ def value(self, value): self.expr = _unwrap_classmethod(expr) self.custom_comparator = _unwrap_classmethod(custom_comparator) self.update_expr = _unwrap_classmethod(update_expr) - util.update_wrapper(self, fget) + util.update_wrapper(self, fget) # type: ignore[arg-type] @overload def __get__(self, instance: Any, owner: Literal[None]) -> Self: ... diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index c354ba83864..9aa06be25a3 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -7290,7 +7290,7 @@ def visit_user_defined(self, type_, **kw): class _SchemaForObjectCallable(Protocol): - def __call__(self, obj: Any) -> str: ... + def __call__(self, __obj: Any) -> str: ... class _BindNameForColProtocol(Protocol): diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 4390ae18352..00d5d0f2b38 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -308,10 +308,10 @@ def decorate(fn: _Fn) -> _Fn: ) decorated.__defaults__ = getattr(fn, "__func__", fn).__defaults__ - decorated.__wrapped__ = fn # type: ignore - return cast(_Fn, update_wrapper(decorated, fn)) + decorated.__wrapped__ = fn # type: ignore[attr-defined] + return update_wrapper(decorated, fn) # type: ignore[return-value] - return update_wrapper(decorate, target) + return update_wrapper(decorate, target) # type: ignore[return-value] def _update_argspec_defaults_into_env(spec, env): diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 2d9e2250a8b..64619957a6b 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -337,7 +337,7 @@ def flatten_newtype(type_: NewType) -> Type[Any]: super_type = type_.__supertype__ while is_newtype(super_type): super_type = super_type.__supertype__ - return super_type + return super_type # type: ignore[return-value] def is_fwd_ref( diff --git a/test/typing/plain_files/orm/typed_queries.py b/test/typing/plain_files/orm/typed_queries.py index 7d8a2dd1a32..e5f513a7de9 100644 --- a/test/typing/plain_files/orm/typed_queries.py +++ b/test/typing/plain_files/orm/typed_queries.py @@ -97,7 +97,7 @@ def t_select_3() -> None: # awkwardnesses that aren't really worth it ua(id=1, name="foo") - # EXPECTED_TYPE: Type[User] + # EXPECTED_TYPE: type[User] reveal_type(ua) stmt = select(ua.id, ua.name).filter(User.id == 5) @@ -529,13 +529,13 @@ def t_aliased_fromclause() -> None: a4 = aliased(user_table) - # EXPECTED_TYPE: Type[User] + # EXPECTED_TYPE: type[User] reveal_type(a1) - # EXPECTED_TYPE: Type[User] + # EXPECTED_TYPE: type[User] reveal_type(a2) - # EXPECTED_TYPE: Type[User] + # EXPECTED_TYPE: type[User] reveal_type(a3) # EXPECTED_TYPE: FromClause From 1fc4894c0dedabbea9f8b722474f6e019ab86526 Mon Sep 17 00:00:00 2001 From: Gord Thompson Date: Thu, 25 Apr 2024 09:14:53 -0600 Subject: [PATCH 186/544] Add Databricks to external dialect list Change-Id: I155e274c6baaeb044f7fda76ba74a63ab9e8e4e3 (cherry picked from commit d1cda3482aeb4b7edbcd564dc3523b974848a02c) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 294095450f4..d065bcf5b34 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -83,6 +83,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Databend | databend-sqlalchemy_ | +------------------------------------------------+---------------------------------------+ +| Databricks | databricks_ | ++------------------------------------------------+---------------------------------------+ | EXASolution | sqlalchemy_exasol_ | +------------------------------------------------+---------------------------------------+ | Elasticsearch (readonly) | elasticsearch-dbapi_ | @@ -163,3 +165,4 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _impyla: https://pypi.org/project/impyla/ .. _databend-sqlalchemy: https://github.com/datafuselabs/databend-sqlalchemy .. _sqlalchemy-greenplum: https://github.com/PlaidCloud/sqlalchemy-greenplum +.. _databricks: https://docs.databricks.com/en/dev-tools/sqlalchemy.html From 584094a4384e305093dfffc56648626b9659cdf7 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 24 Apr 2024 21:47:01 +0200 Subject: [PATCH 187/544] Improve typing to the count function. Improve typing to allow `'*'` and 1 in the count function. Fixes: #11316 Change-Id: Iaafdb779b6baa70504154099f0b9554c612a9ffa (cherry picked from commit 55fb04f10c0aeee7ace984dbe66642a1286594de) --- .gitignore | 1 + lib/sqlalchemy/sql/_typing.py | 6 ++++-- lib/sqlalchemy/sql/functions.py | 5 ++++- test/typing/plain_files/sql/functions.py | 7 +++++++ 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 13b40c819ad..d2ee9a2f4ad 100644 --- a/.gitignore +++ b/.gitignore @@ -40,3 +40,4 @@ test/test_schema.db /db_idents.txt .DS_Store .vs +/scratch diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index c861bae6e0f..0d8f464467e 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -117,10 +117,12 @@ def dialect(self) -> Dialect: ... "Decimal", ) +_StarOrOne = Literal["*", 1] + _MAYBE_ENTITY = TypeVar( "_MAYBE_ENTITY", roles.ColumnsClauseRole, - Literal["*", 1], + _StarOrOne, Type[Any], Inspectable[_HasClauseElement[Any]], _HasClauseElement[Any], @@ -145,7 +147,7 @@ def dialect(self) -> Dialect: ... roles.TypedColumnsClauseRole[_T], roles.ColumnsClauseRole, "SQLCoreOperations[_T]", - Literal["*", 1], + _StarOrOne, Type[_T], Inspectable[_HasClauseElement[_T]], _HasClauseElement[_T], diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index afb2b1d9b99..8ef7f75bc21 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -69,6 +69,7 @@ from ._typing import _ColumnExpressionArgument from ._typing import _ColumnExpressionOrLiteralArgument from ._typing import _ColumnExpressionOrStrLabelArgument + from ._typing import _StarOrOne from ._typing import _TypeEngineArgument from .base import _EntityNamespace from .elements import ClauseElement @@ -1721,7 +1722,9 @@ class count(GenericFunction[int]): def __init__( self, - expression: Optional[_ColumnExpressionArgument[Any]] = None, + expression: Union[ + _ColumnExpressionArgument[Any], _StarOrOne, None + ] = None, **kwargs: Any, ): if expression is None: diff --git a/test/typing/plain_files/sql/functions.py b/test/typing/plain_files/sql/functions.py index 6a345fcf6ec..f657a48571a 100644 --- a/test/typing/plain_files/sql/functions.py +++ b/test/typing/plain_files/sql/functions.py @@ -1,8 +1,11 @@ """this file is generated by tools/generate_sql_functions.py""" +from typing import Tuple + from sqlalchemy import column from sqlalchemy import func from sqlalchemy import Integer +from sqlalchemy import Select from sqlalchemy import select from sqlalchemy import Sequence from sqlalchemy import String @@ -150,3 +153,7 @@ reveal_type(stmt23) # END GENERATED FUNCTION TYPING TESTS + +stmt_count: Select[Tuple[int, int, int]] = select( + func.count(), func.count("*"), func.count(1) +) From 6caf620f88cb5b11b13985f247a30e732b3fdeb6 Mon Sep 17 00:00:00 2001 From: Yossi Rozantsev <54272821+Apakottur@users.noreply.github.com> Date: Wed, 24 Apr 2024 16:15:30 -0400 Subject: [PATCH 188/544] Add missing overload to __add__ Add a missing `@overload` to the `__add__` operator. ### Description The `__add__` function is missing an overload that handles the rest of the cases, similar to the one that `__sub__` has a few lines later in the same file. This fix is taken from https://github.com/microsoft/pyright/issues/7743 ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11307 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11307 Pull-request-sha: 961d87403a5f985fbd17e07bae490e8e97475158 Change-Id: I27784f79e8d4f8b7f09b17060186916c78cba0a3 (cherry picked from commit 18b5b8a5b4d40b8ed8695a4027cedaaafa04cff4) --- lib/sqlalchemy/sql/elements.py | 3 +++ test/typing/plain_files/sql/operators.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index bafb5c77860..24f04fd7670 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -1065,6 +1065,9 @@ def __add__( other: Any, ) -> ColumnElement[str]: ... + @overload + def __add__(self, other: Any) -> ColumnElement[Any]: ... + def __add__(self, other: Any) -> ColumnElement[Any]: ... @overload diff --git a/test/typing/plain_files/sql/operators.py b/test/typing/plain_files/sql/operators.py index 2e2f31df9cf..dbd6f3d48f4 100644 --- a/test/typing/plain_files/sql/operators.py +++ b/test/typing/plain_files/sql/operators.py @@ -1,3 +1,4 @@ +import datetime as dt from decimal import Decimal from typing import Any from typing import List @@ -6,6 +7,7 @@ from sqlalchemy import BigInteger from sqlalchemy import column from sqlalchemy import ColumnElement +from sqlalchemy import func from sqlalchemy import Integer from sqlalchemy import select from sqlalchemy import String @@ -100,6 +102,10 @@ class A(Base): add1: "ColumnElement[int]" = A.id + A.id add2: "ColumnElement[int]" = A.id + 1 add3: "ColumnElement[int]" = 1 + A.id +add_date: "ColumnElement[dt.date]" = func.current_date() + dt.timedelta(days=1) +add_datetime: "ColumnElement[dt.datetime]" = ( + func.current_timestamp() + dt.timedelta(seconds=1) +) sub1: "ColumnElement[int]" = A.id - A.id sub2: "ColumnElement[int]" = A.id - 1 From c26ec67e1fe8431a3380c55512dbcf8c9dba9f56 Mon Sep 17 00:00:00 2001 From: Felix Zenk Date: Fri, 26 Apr 2024 21:25:19 +0200 Subject: [PATCH 189/544] Fix typo in sqlalchemy.event.api (#11325) (cherry picked from commit f0ed44e89ea83dc2f994105dcd0c471bcb54d608) --- lib/sqlalchemy/event/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py index 4a39d10f406..230ec698667 100644 --- a/lib/sqlalchemy/event/api.py +++ b/lib/sqlalchemy/event/api.py @@ -132,7 +132,7 @@ def listens_for( The :func:`.listens_for` decorator is part of the primary interface for the SQLAlchemy event system, documented at :ref:`event_toplevel`. - This function generally shares the same kwargs as :func:`.listens`. + This function generally shares the same kwargs as :func:`.listen`. e.g.:: From 65c12025a1b1a37186d55c1f8f204560037ebe5f Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 27 Apr 2024 10:48:47 +0200 Subject: [PATCH 190/544] Fixing ci errors Change-Id: Ia1e3a8748a36dd3fa013707eae5ee4f97013d71b (cherry picked from commit d85289b35ee6c2683eef378f1ea2fdea7f401ed9) --- .github/workflows/create-wheels.yaml | 5 ++-- .github/workflows/run-on-pr.yaml | 4 ++-- .github/workflows/run-test.yaml | 25 +++++++++++++++++++- test/typing/plain_files/orm/typed_queries.py | 8 +++---- 4 files changed, 33 insertions(+), 9 deletions(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 7fd142d225f..2ca2b981d81 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -27,7 +27,8 @@ jobs: - compiled os: - "windows-2022" - - "macos-12" + # TODO: macos-14 uses arm macs (only python 3.10+) - make arm wheel on it + - "macos-13" - "ubuntu-22.04" linux_archs: # this is only meaningful on linux. windows and macos ignore exclude all but one arch @@ -43,7 +44,7 @@ jobs: exclude: - os: "windows-2022" linux_archs: "aarch64" - - os: "macos-12" + - os: "macos-13" linux_archs: "aarch64" fail-fast: false diff --git a/.github/workflows/run-on-pr.yaml b/.github/workflows/run-on-pr.yaml index 0790c793304..aa67872e325 100644 --- a/.github/workflows/run-on-pr.yaml +++ b/.github/workflows/run-on-pr.yaml @@ -25,7 +25,7 @@ jobs: os: - "ubuntu-latest" python-version: - - "3.11" + - "3.12" build-type: - "cext" - "nocext" @@ -62,7 +62,7 @@ jobs: os: - "ubuntu-latest" python-version: - - "3.11" + - "3.12" tox-env: - mypy - lint diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index 6eae9e8bc72..1d384294a0a 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -29,6 +29,7 @@ jobs: - "ubuntu-latest" - "windows-latest" - "macos-latest" + - "macos-13" python-version: - "3.7" - "3.8" @@ -43,6 +44,7 @@ jobs: architecture: - x64 - x86 + - arm64 include: # autocommit tests fail on the ci for some reason @@ -51,11 +53,31 @@ jobs: - os: "ubuntu-latest" pytest-args: "--dbdriver pysqlite --dbdriver aiosqlite" + exclude: - # linux and osx do not have x86 python + # linux do not have x86 / arm64 python + - os: "ubuntu-latest" + architecture: x86 - os: "ubuntu-latest" + architecture: arm64 + # windows des not have arm64 python + - os: "windows-latest" + architecture: arm64 + # macos: latests uses arm macs. only 3.10+; no x86/x64 + - os: "macos-latest" architecture: x86 - os: "macos-latest" + architecture: x64 + - os: "macos-latest" + python-version: "3.7" + - os: "macos-latest" + python-version: "3.8" + - os: "macos-latest" + python-version: "3.9" + # macos 13: uses intel macs. no arm64, x86 + - os: "macos-13" + architecture: arm64 + - os: "macos-13" architecture: x86 # pypy does not have cext or x86 - python-version: "pypy-3.9" @@ -94,6 +116,7 @@ jobs: continue-on-error: ${{ matrix.python-version == 'pypy-3.9' }} run-test-arm64: + # Hopefully something native can be used at some point https://github.blog/changelog/2023-10-30-accelerate-your-ci-cd-with-arm-based-hosted-runners-in-github-actions/ name: test-arm64-${{ matrix.python-version }}-${{ matrix.build-type }}-${{ matrix.os }} runs-on: ubuntu-latest strategy: diff --git a/test/typing/plain_files/orm/typed_queries.py b/test/typing/plain_files/orm/typed_queries.py index e5f513a7de9..b1226da30fc 100644 --- a/test/typing/plain_files/orm/typed_queries.py +++ b/test/typing/plain_files/orm/typed_queries.py @@ -97,7 +97,7 @@ def t_select_3() -> None: # awkwardnesses that aren't really worth it ua(id=1, name="foo") - # EXPECTED_TYPE: type[User] + # EXPECTED_RE_TYPE: [tT]ype\[.*\.User\] reveal_type(ua) stmt = select(ua.id, ua.name).filter(User.id == 5) @@ -529,13 +529,13 @@ def t_aliased_fromclause() -> None: a4 = aliased(user_table) - # EXPECTED_TYPE: type[User] + # EXPECTED_RE_TYPE: [tT]ype\[.*\.User\] reveal_type(a1) - # EXPECTED_TYPE: type[User] + # EXPECTED_RE_TYPE: [tT]ype\[.*\.User\] reveal_type(a2) - # EXPECTED_TYPE: type[User] + # EXPECTED_RE_TYPE: [tT]ype\[.*\.User\] reveal_type(a3) # EXPECTED_TYPE: FromClause From 86e1e1de3f7d4c992b7ad6703e748073c91cb508 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 27 Apr 2024 13:06:03 +0200 Subject: [PATCH 191/544] improve fetchmany performance when using deque Change-Id: Id30e770eb44eafd3e939c4076b639e8e6962c54b (cherry picked from commit 319304e7c9e5c6c2e42513b81f85aa6b238495b5) --- lib/sqlalchemy/connectors/asyncio.py | 8 ++------ lib/sqlalchemy/engine/cursor.py | 22 ++++++++++++---------- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/lib/sqlalchemy/connectors/asyncio.py b/lib/sqlalchemy/connectors/asyncio.py index 0b44f23a025..8dc198cf8e9 100644 --- a/lib/sqlalchemy/connectors/asyncio.py +++ b/lib/sqlalchemy/connectors/asyncio.py @@ -11,7 +11,6 @@ from __future__ import annotations import collections -import itertools from ..engine import AdaptedConnection from ..util.concurrency import asyncio @@ -114,11 +113,8 @@ def fetchone(self): def fetchmany(self, size=None): if size is None: size = self.arraysize - - rr = iter(self._rows) - retval = list(itertools.islice(rr, 0, size)) - self._rows = collections.deque(rr) - return retval + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] def fetchall(self): retval = list(self._rows) diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index 71767db74ed..a885aca8e3b 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -1246,8 +1246,9 @@ def fetchmany(self, result, dbapi_cursor, size=None): if size is None: return self.fetchall(result, dbapi_cursor) - buf = list(self._rowbuffer) - lb = len(buf) + rb = self._rowbuffer + lb = len(rb) + close = False if size > lb: try: new = dbapi_cursor.fetchmany(size - lb) @@ -1255,13 +1256,15 @@ def fetchmany(self, result, dbapi_cursor, size=None): self.handle_exception(result, dbapi_cursor, e) else: if not new: - result._soft_close() + # defer closing since it may clear the row buffer + close = True else: - buf.extend(new) + rb.extend(new) - result = buf[0:size] - self._rowbuffer = collections.deque(buf[size:]) - return result + res = [rb.popleft() for _ in range(min(size, len(rb)))] + if close: + result._soft_close() + return res def fetchall(self, result, dbapi_cursor): try: @@ -1315,9 +1318,8 @@ def fetchmany(self, result, dbapi_cursor, size=None): if size is None: return self.fetchall(result, dbapi_cursor) - buf = list(self._rowbuffer) - rows = buf[0:size] - self._rowbuffer = collections.deque(buf[size:]) + rb = self._rowbuffer + rows = [rb.popleft() for _ in range(min(size, len(rb)))] if not rows: result._soft_close() return rows From 42ef15233cfda6c769e3be9d750551ac566f1966 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 27 Apr 2024 00:31:07 -0400 Subject: [PATCH 192/544] ensure intermediary mappers emit subclass IN Fixed issue in :func:`_orm.selectin_polymorhpic` loader option where the SELECT emitted would only accommodate for the child-most class among the result rows that were returned, leading intermediary-class attributes to be unloaded if there were no concrete instances of that intermediary-class present in the result. This issue only presented itself for multi-level inheritance hierarchies. Fixes: #11327 Change-Id: Iec88cc517613d031221a1c035c4cfb46db0154be (cherry picked from commit 37c598a41efd2609622b1ca6ee698dbe0ab5ac8b) --- doc/build/changelog/unreleased_20/11327.rst | 10 +++ lib/sqlalchemy/orm/loading.py | 47 +++++++++---- lib/sqlalchemy/orm/mapper.py | 1 + test/orm/inheritance/test_poly_loading.py | 78 ++++++++++++++++++++- 4 files changed, 119 insertions(+), 17 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11327.rst diff --git a/doc/build/changelog/unreleased_20/11327.rst b/doc/build/changelog/unreleased_20/11327.rst new file mode 100644 index 00000000000..f7169ad9803 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11327.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, orm + :tickets: 11327 + + Fixed issue in :func:`_orm.selectin_polymorhpic` loader option where the + SELECT emitted would only accommodate for the child-most class among the + result rows that were returned, leading intermediary-class attributes to be + unloaded if there were no concrete instances of that intermediary-class + present in the result. This issue only presented itself for multi-level + inheritance hierarchies. diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index 4e2cb8250fc..6176d72a67e 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -1010,21 +1010,38 @@ def _instance_processor( # loading does not apply assert only_load_props is None - callable_ = _load_subclass_via_in( - context, - path, - selectin_load_via, - _polymorphic_from, - option_entities, - ) - PostLoad.callable_for_path( - context, - load_path, - selectin_load_via.mapper, - selectin_load_via, - callable_, - selectin_load_via, - ) + if selectin_load_via.is_mapper: + _load_supers = [] + _endmost_mapper = selectin_load_via + while ( + _endmost_mapper + and _endmost_mapper is not _polymorphic_from + ): + _load_supers.append(_endmost_mapper) + _endmost_mapper = _endmost_mapper.inherits + else: + _load_supers = [selectin_load_via] + + for _selectinload_entity in _load_supers: + if PostLoad.path_exists( + context, load_path, _selectinload_entity + ): + continue + callable_ = _load_subclass_via_in( + context, + path, + _selectinload_entity, + _polymorphic_from, + option_entities, + ) + PostLoad.callable_for_path( + context, + load_path, + _selectinload_entity.mapper, + _selectinload_entity, + callable_, + _selectinload_entity, + ) post_load = PostLoad.for_context(context, load_path, only_load_props) diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 0caed0e2fd0..3052710f825 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -3805,6 +3805,7 @@ def _subclass_load_via_in(self, entity, polymorphic_from): this subclass as a SELECT with IN. """ + strategy_options = util.preloaded.orm_strategy_options assert self.inherits diff --git a/test/orm/inheritance/test_poly_loading.py b/test/orm/inheritance/test_poly_loading.py index df286f0d35c..a768c32754a 100644 --- a/test/orm/inheritance/test_poly_loading.py +++ b/test/orm/inheritance/test_poly_loading.py @@ -735,6 +735,66 @@ def test_threelevel_selectin_to_inline_options(self): with self.assert_statement_count(testing.db, 0): eq_(result, [d(d_data="d1"), e(e_data="e1")]) + @testing.variation("include_intermediary_row", [True, False]) + def test_threelevel_load_only_3lev(self, include_intermediary_row): + """test issue #11327""" + + self._fixture_from_geometry( + { + "a": { + "subclasses": { + "b": {"subclasses": {"c": {}}}, + } + } + } + ) + + a, b, c = self.classes("a", "b", "c") + sess = fixture_session() + sess.add(c(a_data="a1", b_data="b1", c_data="c1")) + if include_intermediary_row: + sess.add(b(a_data="a1", b_data="b1")) + sess.commit() + + sess = fixture_session() + + pks = [] + c_pks = [] + with self.sql_execution_asserter(testing.db) as asserter: + + for obj in sess.scalars( + select(a) + .options(selectin_polymorphic(a, classes=[b, c])) + .order_by(a.id) + ): + assert "b_data" in obj.__dict__ + if isinstance(obj, c): + assert "c_data" in obj.__dict__ + c_pks.append(obj.id) + pks.append(obj.id) + + asserter.assert_( + CompiledSQL( + "SELECT a.id, a.type, a.a_data FROM a ORDER BY a.id", {} + ), + AllOf( + CompiledSQL( + "SELECT c.id AS c_id, b.id AS b_id, a.id AS a_id, " + "a.type AS a_type, c.c_data AS c_c_data FROM a JOIN b " + "ON a.id = b.id JOIN c ON b.id = c.id WHERE a.id IN " + "(__[POSTCOMPILE_primary_keys]) ORDER BY a.id", + [{"primary_keys": c_pks}], + ), + CompiledSQL( + "SELECT b.id AS b_id, a.id AS a_id, a.type AS a_type, " + "b.b_data AS b_b_data FROM a JOIN b ON a.id = b.id " + "WHERE a.id IN (__[POSTCOMPILE_primary_keys]) " + "ORDER BY a.id", + [{"primary_keys": pks}], + ), + ), + ) + @testing.combinations((True,), (False,)) def test_threelevel_selectin_to_inline_awkward_alias_options( self, use_aliased_class @@ -752,7 +812,9 @@ def test_threelevel_selectin_to_inline_awkward_alias_options( a, b, c, d, e = self.classes("a", "b", "c", "d", "e") sess = fixture_session() - sess.add_all([d(d_data="d1"), e(e_data="e1")]) + sess.add_all( + [d(c_data="c1", d_data="d1"), e(c_data="c2", e_data="e1")] + ) sess.commit() from sqlalchemy import select @@ -840,6 +902,15 @@ def test_threelevel_selectin_to_inline_awkward_alias_options( {}, ), AllOf( + # note this query is added due to the fix made in + # #11327 + CompiledSQL( + "SELECT c.id AS c_id, a.id AS a_id, a.type AS a_type, " + "c.c_data AS c_c_data FROM a JOIN c ON a.id = c.id " + "WHERE a.id IN (__[POSTCOMPILE_primary_keys]) " + "ORDER BY a.id", + [{"primary_keys": [1, 2]}], + ), CompiledSQL( "SELECT d.id AS d_id, c.id AS c_id, a.id AS a_id, " "a.type AS a_type, d.d_data AS d_d_data FROM a " @@ -860,7 +931,10 @@ def test_threelevel_selectin_to_inline_awkward_alias_options( ) with self.assert_statement_count(testing.db, 0): - eq_(result, [d(d_data="d1"), e(e_data="e1")]) + eq_( + result, + [d(c_data="c1", d_data="d1"), e(c_data="c2", e_data="e1")], + ) def test_partial_load_no_invoke_eagers(self): # test issue #4199 From 2f48773061694d9b02044751e6a63a478ac24bd3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 28 Apr 2024 13:39:08 -0400 Subject: [PATCH 193/544] only consider column / relationship attrs for subclass IN Fixed issue in :func:`_orm.selectin_polymorhpic` loader option where attributes defined with :func:`_orm.composite` on a superclass would cause an internal exception on load. Define the prop for :class:`.PropRegistry` as a :class:`.StrategizedProperty`; we dont make path registries for descriptor props like synonyms, composites, etc. Fixes: #11291 Change-Id: I6f16844d2483dc86ab402b0b8b1f09561498aa1f (cherry picked from commit f4a0ff730cc753d4d6f947959c6551fd10d7d699) --- doc/build/changelog/unreleased_20/11291.rst | 8 +++++ lib/sqlalchemy/orm/mapper.py | 2 +- lib/sqlalchemy/orm/path_registry.py | 25 ++++++++------- test/orm/inheritance/test_poly_loading.py | 35 ++++++++++++--------- 4 files changed, 44 insertions(+), 26 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11291.rst diff --git a/doc/build/changelog/unreleased_20/11291.rst b/doc/build/changelog/unreleased_20/11291.rst new file mode 100644 index 00000000000..e341ff8aff8 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11291.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm + :tickets: 11291 + + Fixed issue in :func:`_orm.selectin_polymorhpic` loader option where + attributes defined with :func:`_orm.composite` on a superclass would cause + an internal exception on load. + diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 3052710f825..06e3884be63 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -3829,7 +3829,7 @@ def _subclass_load_via_in(self, entity, polymorphic_from): classes_to_include.add(m) m = m.inherits - for prop in self.attrs: + for prop in self.column_attrs + self.relationships: # skip prop keys that are not instrumented on the mapped class. # this is primarily the "_sa_polymorphic_on" property that gets # created for an ad-hoc polymorphic_on SQL expression, issue #8704 diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py index 76484b3e68f..4ee8ac71b84 100644 --- a/lib/sqlalchemy/orm/path_registry.py +++ b/lib/sqlalchemy/orm/path_registry.py @@ -35,7 +35,7 @@ if TYPE_CHECKING: from ._typing import _InternalEntityType - from .interfaces import MapperProperty + from .interfaces import StrategizedProperty from .mapper import Mapper from .relationships import RelationshipProperty from .util import AliasedInsp @@ -57,13 +57,13 @@ def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: ... _SerializedPath = List[Any] _StrPathToken = str _PathElementType = Union[ - _StrPathToken, "_InternalEntityType[Any]", "MapperProperty[Any]" + _StrPathToken, "_InternalEntityType[Any]", "StrategizedProperty[Any]" ] # the representation is in fact # a tuple with alternating: -# [_InternalEntityType[Any], Union[str, MapperProperty[Any]], -# _InternalEntityType[Any], Union[str, MapperProperty[Any]], ...] +# [_InternalEntityType[Any], Union[str, StrategizedProperty[Any]], +# _InternalEntityType[Any], Union[str, StrategizedProperty[Any]], ...] # this might someday be a tuple of 2-tuples instead, but paths can be # chopped at odd intervals as well so this is less flexible _PathRepresentation = Tuple[_PathElementType, ...] @@ -71,7 +71,7 @@ def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: ... # NOTE: these names are weird since the array is 0-indexed, # the "_Odd" entries are at 0, 2, 4, etc _OddPathRepresentation = Sequence["_InternalEntityType[Any]"] -_EvenPathRepresentation = Sequence[Union["MapperProperty[Any]", str]] +_EvenPathRepresentation = Sequence[Union["StrategizedProperty[Any]", str]] log = logging.getLogger(__name__) @@ -197,7 +197,9 @@ def __getitem__( ) -> AbstractEntityRegistry: ... @overload - def __getitem__(self, entity: MapperProperty[Any]) -> PropRegistry: ... + def __getitem__( + self, entity: StrategizedProperty[Any] + ) -> PropRegistry: ... def __getitem__( self, @@ -206,7 +208,7 @@ def __getitem__( int, slice, _InternalEntityType[Any], - MapperProperty[Any], + StrategizedProperty[Any], ], ) -> Union[ TokenRegistry, @@ -225,7 +227,7 @@ def length(self) -> int: def pairs( self, ) -> Iterator[ - Tuple[_InternalEntityType[Any], Union[str, MapperProperty[Any]]] + Tuple[_InternalEntityType[Any], Union[str, StrategizedProperty[Any]]] ]: odd_path = cast(_OddPathRepresentation, self.path) even_path = cast(_EvenPathRepresentation, odd_path) @@ -531,15 +533,16 @@ class PropRegistry(PathRegistry): inherit_cache = True is_property = True - prop: MapperProperty[Any] + prop: StrategizedProperty[Any] mapper: Optional[Mapper[Any]] entity: Optional[_InternalEntityType[Any]] def __init__( - self, parent: AbstractEntityRegistry, prop: MapperProperty[Any] + self, parent: AbstractEntityRegistry, prop: StrategizedProperty[Any] ): + # restate this path in terms of the - # given MapperProperty's parent. + # given StrategizedProperty's parent. insp = cast("_InternalEntityType[Any]", parent[-1]) natural_parent: AbstractEntityRegistry = parent diff --git a/test/orm/inheritance/test_poly_loading.py b/test/orm/inheritance/test_poly_loading.py index a768c32754a..58cf7b54271 100644 --- a/test/orm/inheritance/test_poly_loading.py +++ b/test/orm/inheritance/test_poly_loading.py @@ -1470,18 +1470,10 @@ def test_wp(self, mapping_fixture, connection): class CompositeAttributesTest(fixtures.TestBase): - @testing.fixture - def mapping_fixture(self, registry, connection): - Base = registry.generate_base() - class BaseCls(Base): - __tablename__ = "base" - id = Column( - Integer, primary_key=True, test_needs_autoincrement=True - ) - type = Column(String(50)) - - __mapper_args__ = {"polymorphic_on": type} + @testing.fixture(params=("base", "sub")) + def mapping_fixture(self, request, registry, connection): + Base = registry.generate_base() class XYThing: def __init__(self, x, y): @@ -1501,13 +1493,28 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) + class BaseCls(Base): + __tablename__ = "base" + id = Column( + Integer, primary_key=True, test_needs_autoincrement=True + ) + type = Column(String(50)) + + if request.param == "base": + comp1 = composite( + XYThing, Column("x1", Integer), Column("y1", Integer) + ) + + __mapper_args__ = {"polymorphic_on": type} + class A(ComparableEntity, BaseCls): __tablename__ = "a" id = Column(ForeignKey(BaseCls.id), primary_key=True) thing1 = Column(String(50)) - comp1 = composite( - XYThing, Column("x1", Integer), Column("y1", Integer) - ) + if request.param == "sub": + comp1 = composite( + XYThing, Column("x1", Integer), Column("y1", Integer) + ) __mapper_args__ = { "polymorphic_identity": "a", From 368d88d1ac29db7b5d3933e37d43aebc06ad633b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sun, 28 Apr 2024 12:01:05 +0200 Subject: [PATCH 194/544] Fix issue in bulk_save_objects Fixes issue in :meth:`_orm.Session.bulk_save_objects()` where it would write a wrong identity key when using ``return_defaults=True``. The wrong identity key could lead to an index error when entities are then pickled. Fixes: #11332 Change-Id: I8d095392ad03e8d3408e477476cd5de8a5bca2c0 (cherry picked from commit ade4bdfb0406fadff566aa8d39abe6aa29af521f) --- doc/build/changelog/unreleased_20/11332.rst | 7 ++++ lib/sqlalchemy/orm/bulk_persistence.py | 7 ++++ lib/sqlalchemy/orm/session.py | 39 ++++++++++++--------- test/orm/dml/test_bulk.py | 12 +++++++ test/orm/test_pickled.py | 11 ++++++ 5 files changed, 60 insertions(+), 16 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11332.rst diff --git a/doc/build/changelog/unreleased_20/11332.rst b/doc/build/changelog/unreleased_20/11332.rst new file mode 100644 index 00000000000..c8f748654c6 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11332.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, orm + :tickets: 11332 + + Fixes issue in :meth:`_orm.Session.bulk_save_objects` where it would write a + wrong identity key when using ``return_defaults=True``. + The wrong identity key could lead to an index error when entities are then pickled. diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 5d2558d9530..2ed6a4beaac 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -76,6 +76,7 @@ def _bulk_insert( mapper: Mapper[_O], mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], session_transaction: SessionTransaction, + *, isstates: bool, return_defaults: bool, render_nulls: bool, @@ -89,6 +90,7 @@ def _bulk_insert( mapper: Mapper[_O], mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], session_transaction: SessionTransaction, + *, isstates: bool, return_defaults: bool, render_nulls: bool, @@ -101,6 +103,7 @@ def _bulk_insert( mapper: Mapper[_O], mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], session_transaction: SessionTransaction, + *, isstates: bool, return_defaults: bool, render_nulls: bool, @@ -218,6 +221,7 @@ def _bulk_insert( state.key = ( identity_cls, tuple([dict_[key] for key in identity_props]), + None, ) if use_orm_insert_stmt is not None: @@ -230,6 +234,7 @@ def _bulk_update( mapper: Mapper[Any], mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], session_transaction: SessionTransaction, + *, isstates: bool, update_changed_only: bool, use_orm_update_stmt: Literal[None] = ..., @@ -242,6 +247,7 @@ def _bulk_update( mapper: Mapper[Any], mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], session_transaction: SessionTransaction, + *, isstates: bool, update_changed_only: bool, use_orm_update_stmt: Optional[dml.Update] = ..., @@ -253,6 +259,7 @@ def _bulk_update( mapper: Mapper[Any], mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], session_transaction: SessionTransaction, + *, isstates: bool, update_changed_only: bool, use_orm_update_stmt: Optional[dml.Update] = None, diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index acc6895e86f..a4bf7c1cecf 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -4574,11 +4574,11 @@ def grouping_key( self._bulk_save_mappings( mapper, states, - isupdate, - True, - return_defaults, - update_changed_only, - False, + isupdate=isupdate, + isstates=True, + return_defaults=return_defaults, + update_changed_only=update_changed_only, + render_nulls=False, ) def bulk_insert_mappings( @@ -4657,11 +4657,11 @@ def bulk_insert_mappings( self._bulk_save_mappings( mapper, mappings, - False, - False, - return_defaults, - False, - render_nulls, + isupdate=False, + isstates=False, + return_defaults=return_defaults, + update_changed_only=False, + render_nulls=render_nulls, ) def bulk_update_mappings( @@ -4703,13 +4703,20 @@ def bulk_update_mappings( """ self._bulk_save_mappings( - mapper, mappings, True, False, False, False, False + mapper, + mappings, + isupdate=True, + isstates=False, + return_defaults=False, + update_changed_only=False, + render_nulls=False, ) def _bulk_save_mappings( self, mapper: Mapper[_O], mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]], + *, isupdate: bool, isstates: bool, return_defaults: bool, @@ -4726,17 +4733,17 @@ def _bulk_save_mappings( mapper, mappings, transaction, - isstates, - update_changed_only, + isstates=isstates, + update_changed_only=update_changed_only, ) else: bulk_persistence._bulk_insert( mapper, mappings, transaction, - isstates, - return_defaults, - render_nulls, + isstates=isstates, + return_defaults=return_defaults, + render_nulls=render_nulls, ) transaction.commit() diff --git a/test/orm/dml/test_bulk.py b/test/orm/dml/test_bulk.py index 62b435e9cbf..4d24a52eceb 100644 --- a/test/orm/dml/test_bulk.py +++ b/test/orm/dml/test_bulk.py @@ -2,6 +2,7 @@ from sqlalchemy import ForeignKey from sqlalchemy import Identity from sqlalchemy import insert +from sqlalchemy import inspect from sqlalchemy import Integer from sqlalchemy import String from sqlalchemy import testing @@ -147,6 +148,17 @@ def test_bulk_save_return_defaults(self, statement_type): if statement_type == "save_objects": eq_(objects[0].__dict__["id"], 1) + def test_bulk_save_objects_defaults_key(self): + User = self.classes.User + + pes = [User(name=f"foo{i}") for i in range(3)] + s = fixture_session() + s.bulk_save_objects(pes, return_defaults=True) + key = inspect(pes[0]).key + + s.commit() + eq_(inspect(s.get(User, 1)).key, key) + def test_bulk_save_mappings_preserve_order(self): (User,) = self.classes("User") diff --git a/test/orm/test_pickled.py b/test/orm/test_pickled.py index 96dec4a60b7..18904cc3861 100644 --- a/test/orm/test_pickled.py +++ b/test/orm/test_pickled.py @@ -654,6 +654,17 @@ def test_composite_column_mapped_collection(self): ) is_not_none(collections.collection_adapter(repickled.addresses)) + def test_bulk_save_objects_defaults_pickle(self): + "Test for #11332" + users = self.tables.users + + self.mapper_registry.map_imperatively(User, users) + pes = [User(name=f"foo{i}") for i in range(3)] + s = fixture_session() + s.bulk_save_objects(pes, return_defaults=True) + state = pickle.dumps(pes) + pickle.loads(state) + class OptionsTest(_Polymorphic): def test_options_of_type(self): From ee4120570899cad4896fc41893791156e729e4b3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 30 Apr 2024 09:28:07 -0400 Subject: [PATCH 195/544] set unique=False on indexes References: https://github.com/sqlalchemy/sqlalchemy/discussions/11339 Change-Id: Ia4adc2d5911926fdd1896cc561d511bdd647732d (cherry picked from commit f00f34437d37f4776b323317432167ad5fe8413b) --- examples/versioned_history/history_meta.py | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/versioned_history/history_meta.py b/examples/versioned_history/history_meta.py index 3f26832b9ed..e4c102c0ad0 100644 --- a/examples/versioned_history/history_meta.py +++ b/examples/versioned_history/history_meta.py @@ -59,6 +59,7 @@ def _history_mapper(local_mapper): for idx in history_table.indexes: if idx.name is not None: idx.name += "_history" + idx.unique = False for orig_c, history_c in zip( local_mapper.local_table.c, history_table.c From 53dc9653fbdf5b68f540db7fdea875a6b8e07b41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edgar=20Ram=C3=ADrez-Mondrag=C3=B3n?= Date: Mon, 29 Apr 2024 21:53:07 -0400 Subject: [PATCH 196/544] Ignore all dunders when checking attributes in `sqlalchemy.util.langhelpers.TypingOnly` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixed an internal class that was testing for unexpected attributes to work correctly under upcoming Python 3.13. Pull request courtesy Edgar Ramírez-Mondragón. Fixes: #11334 Closes: #11335 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11335 Pull-request-sha: babd703e6e34b93722a54c3adf13aa792d3a03b3 Change-Id: Ia2e7392c9403e25266c7d30b987b577f49d008c0 (cherry picked from commit eb118e23a29a29469edb4c1927250f4b726de68e) --- doc/build/changelog/unreleased_20/11334.rst | 7 +++++++ lib/sqlalchemy/util/langhelpers.py | 15 ++++++--------- 2 files changed, 13 insertions(+), 9 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11334.rst diff --git a/doc/build/changelog/unreleased_20/11334.rst b/doc/build/changelog/unreleased_20/11334.rst new file mode 100644 index 00000000000..48f590c4ac4 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11334.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, installation + :tickets: 11334 + + Fixed an internal class that was testing for unexpected attributes to work + correctly under upcoming Python 3.13. Pull request courtesy Edgar + Ramírez-Mondragón. diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 00d5d0f2b38..5f4485a8f72 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -1958,6 +1958,9 @@ def attrsetter(attrname): return env["set"] +_dunders = re.compile("^__.+__$") + + class TypingOnly: """A mixin class that marks a class as 'typing only', meaning it has absolutely no methods, attributes, or runtime functionality whatsoever. @@ -1968,15 +1971,9 @@ class TypingOnly: def __init_subclass__(cls) -> None: if TypingOnly in cls.__bases__: - remaining = set(cls.__dict__).difference( - { - "__module__", - "__doc__", - "__slots__", - "__orig_bases__", - "__annotations__", - } - ) + remaining = { + name for name in cls.__dict__ if not _dunders.match(name) + } if remaining: raise AssertionError( f"Class {cls} directly inherits TypingOnly but has " From f505795a931f3aba1709cb0b731d6fbd74007b38 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 30 Apr 2024 15:41:04 -0400 Subject: [PATCH 197/544] ensure result_map objects collection is non-empty Fixed issue in cursor handling which affected handling of duplicate :class:`_sql.Column` or similar objcts in the columns clause of :func:`_sql.select`, both in combination with arbitary :func:`_sql.text()` clauses in the SELECT list, as well as when attempting to retrieve :meth:`_engine.Result.mappings` for the object, which would lead to an internal error. Fixes: #11306 Change-Id: I418073b2fdba86b2121b6d00eaa40b1805b69bb8 (cherry picked from commit fbb7172c69402d5f0776edc96d1c23a7cfabd3d0) --- doc/build/changelog/unreleased_20/11306.rst | 12 +++++ lib/sqlalchemy/engine/cursor.py | 1 + lib/sqlalchemy/sql/compiler.py | 9 +++- test/sql/test_resultset.py | 54 +++++++++++++++++++++ 4 files changed, 74 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11306.rst diff --git a/doc/build/changelog/unreleased_20/11306.rst b/doc/build/changelog/unreleased_20/11306.rst new file mode 100644 index 00000000000..c5d4ebfb70c --- /dev/null +++ b/doc/build/changelog/unreleased_20/11306.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: bug, engine + :tickets: 11306 + + Fixed issue in cursor handling which affected handling of duplicate + :class:`_sql.Column` or similar objcts in the columns clause of + :func:`_sql.select`, both in combination with arbitary :func:`_sql.text()` + clauses in the SELECT list, as well as when attempting to retrieve + :meth:`_engine.Result.mappings` for the object, which would lead to an + internal error. + + diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index a885aca8e3b..b83cb451543 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -688,6 +688,7 @@ def _merge_textual_cols_by_position( % (num_ctx_cols, len(cursor_description)) ) seen = set() + for ( idx, colname, diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 9aa06be25a3..6d6d8278af6 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -2936,7 +2936,7 @@ def visit_function( **kwargs: Any, ) -> str: if add_to_result_map is not None: - add_to_result_map(func.name, func.name, (), func.type) + add_to_result_map(func.name, func.name, (func.name,), func.type) disp = getattr(self, "visit_%s_func" % func.name.lower(), None) @@ -4386,6 +4386,11 @@ def _add_to_result_map( objects: Tuple[Any, ...], type_: TypeEngine[Any], ) -> None: + + # note objects must be non-empty for cursor.py to handle the + # collection properly + assert objects + if keyname is None or keyname == "*": self._ordered_columns = False self._ad_hoc_textual = True @@ -4459,7 +4464,7 @@ def _label_select_column( _add_to_result_map = add_to_result_map def add_to_result_map(keyname, name, objects, type_): - _add_to_result_map(keyname, name, (), type_) + _add_to_result_map(keyname, name, (keyname,), type_) # if we redefined col_expr for type expressions, wrap the # callable with one that adds the original column to the targets diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py index 2fd16d46db0..93c5c892969 100644 --- a/test/sql/test_resultset.py +++ b/test/sql/test_resultset.py @@ -2573,6 +2573,60 @@ def test_keyed_accessor_column_is_repeated_multiple_times( eq_(row[6], "d3") eq_(row[7], "d3") + @testing.requires.duplicate_names_in_cursor_description + @testing.combinations((None,), (0,), (1,), (2,), argnames="pos") + @testing.variation("texttype", ["literal", "text"]) + def test_dupe_col_targeting(self, connection, pos, texttype): + """test #11306""" + + keyed2 = self.tables.keyed2 + col = keyed2.c.b + data_value = "b2" + + cols = [col, col, col] + expected = [data_value, data_value, data_value] + + if pos is not None: + if texttype.literal: + cols[pos] = literal_column("10") + elif texttype.text: + cols[pos] = text("10") + else: + texttype.fail() + + expected[pos] = 10 + + stmt = select(*cols) + + result = connection.execute(stmt) + + if texttype.text and pos is not None: + # when using text(), the name of the col is taken from + # cursor.description directly since we don't know what's + # inside a text() + key_for_text_col = result.cursor.description[pos][0] + elif texttype.literal and pos is not None: + # for literal_column(), we use the text + key_for_text_col = "10" + + eq_(result.all(), [tuple(expected)]) + + result = connection.execute(stmt).mappings() + if pos is None: + eq_(set(result.keys()), {"b", "b__1", "b__2"}) + eq_( + result.all(), + [{"b": data_value, "b__1": data_value, "b__2": data_value}], + ) + + else: + eq_(set(result.keys()), {"b", "b__1", key_for_text_col}) + + eq_( + result.all(), + [{"b": data_value, "b__1": data_value, key_for_text_col: 10}], + ) + def test_columnclause_schema_column_one(self, connection): # originally addressed by [ticket:2932], however liberalized # Column-targeting rules are deprecated From 3ade4dff677a3da100285c183d027c54d05466f9 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 2 May 2024 11:45:31 -0400 Subject: [PATCH 198/544] disable col deduping inside of Bundle Fixed issue where attribute key names in :class:`_orm.Bundle` would not be correct when using ORM enabled :class:`_sql.select` vs. :class:`_orm.Query`, when the statement contained duplicate column names. Fixed issue in typing for :class:`_orm.Bundle` where creating a nested :class:`_orm.Bundle` structure were not allowed. Fixes: #11347 Change-Id: I24b37c99f83068c668736caaaa06e69a6801ff50 (cherry picked from commit 7d6d7ef73a680d1502ac675b9ae53a6c335b723e) --- doc/build/changelog/unreleased_20/11347.rst | 13 +++++ lib/sqlalchemy/orm/context.py | 6 ++- lib/sqlalchemy/sql/_typing.py | 1 + lib/sqlalchemy/sql/selectable.py | 4 +- test/orm/test_bundle.py | 59 +++++++++++++++++++++ test/typing/plain_files/orm/orm_querying.py | 5 ++ 6 files changed, 85 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11347.rst diff --git a/doc/build/changelog/unreleased_20/11347.rst b/doc/build/changelog/unreleased_20/11347.rst new file mode 100644 index 00000000000..a0f9652065e --- /dev/null +++ b/doc/build/changelog/unreleased_20/11347.rst @@ -0,0 +1,13 @@ +.. change:: + :tags: bug, orm + :tickets: 11347 + + Fixed issue where attribute key names in :class:`_orm.Bundle` would not be + correct when using ORM enabled :class:`_sql.select` vs. + :class:`_orm.Query`, when the statement contained duplicate column names. + +.. change:: + :tags: bug, typing + + Fixed issue in typing for :class:`_orm.Bundle` where creating a nested + :class:`_orm.Bundle` structure were not allowed. diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index fcd01e65916..694e98ae953 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -446,7 +446,7 @@ def _column_naming_convention( ) -> _LabelConventionCallable: if legacy: - def name(col, col_name=None): + def name(col, col_name=None, cancel_dedupe=False): if col_name: return col_name else: @@ -3145,7 +3145,9 @@ def __init__( if is_current_entities: self._label_name = compile_state._label_convention( - column, col_name=orm_key + column, + col_name=orm_key, + cancel_dedupe=parent_bundle is not None, ) else: self._label_name = None diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index c861bae6e0f..f3bbf4cf29d 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -185,6 +185,7 @@ def dialect(self) -> Dialect: ... _HasClauseElement[_T], "SQLCoreOperations[_T]", roles.ExpressionElementRole[_T], + roles.TypedColumnsClauseRole[_T], Callable[[], "ColumnElement[_T]"], "LambdaElement", ] diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index aa823c16b1e..f33e0a41fb7 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -4547,6 +4547,7 @@ def _column_naming_convention( cls, label_style: SelectLabelStyle ) -> _LabelConventionCallable: table_qualified = label_style is LABEL_STYLE_TABLENAME_PLUS_COL + dedupe = label_style is not LABEL_STYLE_NONE pa = prefix_anon_map() @@ -4555,13 +4556,14 @@ def _column_naming_convention( def go( c: Union[ColumnElement[Any], TextClause], col_name: Optional[str] = None, + cancel_dedupe: bool = False, ) -> Optional[str]: if is_text_clause(c): return None elif TYPE_CHECKING: assert is_column_element(c) - if not dedupe: + if not dedupe or cancel_dedupe: name = c._proxy_key if name is None: name = "_no_label" diff --git a/test/orm/test_bundle.py b/test/orm/test_bundle.py index 6d613091def..81e789d1cfe 100644 --- a/test/orm/test_bundle.py +++ b/test/orm/test_bundle.py @@ -159,6 +159,65 @@ def test_c_attr(self): select(b1.c.d1, b1.c.d2), "SELECT data.d1, data.d2 FROM data" ) + @testing.variation("stmt_type", ["legacy", "newstyle"]) + def test_dupe_col_name(self, stmt_type): + """test #11347""" + Data = self.classes.Data + sess = fixture_session() + + b1 = Bundle("b1", Data.d1, Data.d3) + + if stmt_type.legacy: + row = ( + sess.query(Data.d1, Data.d2, b1) + .filter(Data.d1 == "d0d1") + .one() + ) + elif stmt_type.newstyle: + row = sess.execute( + select(Data.d1, Data.d2, b1).filter(Data.d1 == "d0d1") + ).one() + + eq_(row[2]._mapping, {"d1": "d0d1", "d3": "d0d3"}) + + @testing.variation("stmt_type", ["legacy", "newstyle"]) + def test_dupe_col_name_nested(self, stmt_type): + """test #11347""" + Data = self.classes.Data + sess = fixture_session() + + class DictBundle(Bundle): + def create_row_processor(self, query, procs, labels): + def proc(row): + return dict(zip(labels, (proc(row) for proc in procs))) + + return proc + + b1 = DictBundle("b1", Data.d1, Data.d3) + b2 = DictBundle("b2", Data.d2, Data.d3) + b3 = DictBundle("b3", Data.d2, Data.d3, b1, b2) + + if stmt_type.legacy: + row = ( + sess.query(Data.d1, Data.d2, b3) + .filter(Data.d1 == "d0d1") + .one() + ) + elif stmt_type.newstyle: + row = sess.execute( + select(Data.d1, Data.d2, b3).filter(Data.d1 == "d0d1") + ).one() + + eq_( + row[2], + { + "d2": "d0d2", + "d3": "d0d3", + "b1": {"d1": "d0d1", "d3": "d0d3"}, + "b2": {"d2": "d0d2", "d3": "d0d3"}, + }, + ) + def test_result(self): Data = self.classes.Data sess = fixture_session() diff --git a/test/typing/plain_files/orm/orm_querying.py b/test/typing/plain_files/orm/orm_querying.py index 83e0fefabbc..8f18e2fcc18 100644 --- a/test/typing/plain_files/orm/orm_querying.py +++ b/test/typing/plain_files/orm/orm_querying.py @@ -144,3 +144,8 @@ def test_10937() -> None: stmt3: ScalarSelect[str] = select(A.data + B.data).scalar_subquery() select(stmt, stmt2, stmt3, stmt1) + + +def test_bundles() -> None: + b1 = orm.Bundle("b1", A.id, A.data) + orm.Bundle("b2", A.id, A.data, b1) From 11b977fdfb1f137eeb3283bc2166cf02af565d23 Mon Sep 17 00:00:00 2001 From: Alc-Alc Date: Thu, 25 Apr 2024 15:42:34 -0400 Subject: [PATCH 199/544] improve pep-695 inference including Enum support Fixed issue in ORM Annotated Declarative where typing issue where literals defined using :pep:`695` type aliases would not work with inference of :class:`.Enum` datatypes. Pull request courtesy of Alc-Alc. Fixes: #11305 Closes: #11313 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11313 Pull-request-sha: 090f0d865c4129cffffbce6a6ce3db9b91602460 Change-Id: Iac63302ad74fd7018a34a50c80ec3aeb87dc94a4 (cherry picked from commit ce26cfa5d5253345a5f962359e5c742ea039c211) --- doc/build/changelog/unreleased_20/11305.rst | 7 ++++ lib/sqlalchemy/orm/decl_api.py | 36 +++++++++++-------- .../test_tm_future_annotations_sync.py | 35 +++++++++++++++--- test/orm/declarative/test_typed_mapping.py | 35 +++++++++++++++--- 4 files changed, 91 insertions(+), 22 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11305.rst diff --git a/doc/build/changelog/unreleased_20/11305.rst b/doc/build/changelog/unreleased_20/11305.rst new file mode 100644 index 00000000000..0a022c00de4 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11305.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, orm + :tickets: 11305 + + Fixed issue in ORM Annotated Declarative where typing issue where literals + defined using :pep:`695` type aliases would not work with inference of + :class:`.Enum` datatypes. Pull request courtesy of Alc-Alc. diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 09128ea8fb4..91f9539befc 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -1232,31 +1232,39 @@ def update_type_annotation_map( def _resolve_type( self, python_type: _MatchedOnType ) -> Optional[sqltypes.TypeEngine[Any]]: - search: Iterable[Tuple[_MatchedOnType, Type[Any]]] + + python_type_to_check = python_type + while is_pep695(python_type_to_check): + python_type_to_check = python_type_to_check.__value__ + + check_is_pt = python_type is python_type_to_check + python_type_type: Type[Any] + search: Iterable[Tuple[_MatchedOnType, Type[Any]]] - if is_generic(python_type): - if is_literal(python_type): - python_type_type = cast("Type[Any]", python_type) + if is_generic(python_type_to_check): + if is_literal(python_type_to_check): + python_type_type = cast("Type[Any]", python_type_to_check) search = ( # type: ignore[assignment] (python_type, python_type_type), (Literal, python_type_type), ) else: - python_type_type = python_type.__origin__ + python_type_type = python_type_to_check.__origin__ search = ((python_type, python_type_type),) - elif is_newtype(python_type): - python_type_type = flatten_newtype(python_type) - search = ((python_type, python_type_type),) - elif is_pep695(python_type): - python_type_type = python_type.__value__ - flattened = None + elif is_newtype(python_type_to_check): + python_type_type = flatten_newtype(python_type_to_check) search = ((python_type, python_type_type),) + elif isinstance(python_type_to_check, type): + python_type_type = python_type_to_check + search = ( + (pt if check_is_pt else python_type, pt) + for pt in python_type_type.__mro__ + ) else: - python_type_type = cast("Type[Any]", python_type) - flattened = None - search = ((pt, pt) for pt in python_type_type.__mro__) + python_type_type = python_type_to_check # type: ignore[assignment] + search = ((python_type, python_type_type),) for pt, flattened in search: # we search through full __mro__ for types. however... diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 1a045ec1bfb..2bdf340d4c0 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -111,8 +111,13 @@ class _SomeDict2(TypedDict): _StrTypeAlias: TypeAlias = str -_StrPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] -_UnionPep695: TypeAlias = str +_StrPep695: TypeAlias = str +_UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] + +_Literal695: TypeAlias = Literal["to-do", "in-progress", "done"] +_Recursive695_0: TypeAlias = _Literal695 +_Recursive695_1: TypeAlias = _Recursive695_0 +_Recursive695_2: TypeAlias = _Recursive695_1 if compat.py312: exec( @@ -126,6 +131,11 @@ class _SomeDict2(TypedDict): str, mapped_column(info={"hi": "there"})] strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] + +type _Literal695 = Literal["to-do", "in-progress", "done"] +type _Recursive695_0 = _Literal695 +type _Recursive695_1 = _Recursive695_0 +type _Recursive695_2 = _Recursive695_1 """, globals(), ) @@ -838,9 +848,10 @@ def test_pep695_typealias_as_typemap_keys( class Test(decl_base): __tablename__ = "test" id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[_StrPep695] # type: ignore - structure: Mapped[_UnionPep695] # type: ignore + data: Mapped[_StrPep695] + structure: Mapped[_UnionPep695] + eq_(Test.__table__.c.data.type._type_affinity, String) eq_(Test.__table__.c.data.type.length, 30) is_(Test.__table__.c.structure.type._type_affinity, JSON) @@ -870,6 +881,22 @@ class MyClass(decl_base): eq_(MyClass.data_one.expression.info, {"hi": "there"}) + @testing.requires.python312 + def test_pep695_literal_defaults_to_enum(self, decl_base): + """test #11305.""" + + class Foo(decl_base): + __tablename__ = "footable" + + id: Mapped[int] = mapped_column(primary_key=True) + status: Mapped[_Literal695] + r2: Mapped[_Recursive695_2] + + for col in (Foo.__table__.c.status, Foo.__table__.c.r2): + is_true(isinstance(col.type, Enum)) + eq_(col.type.enums, ["to-do", "in-progress", "done"]) + is_(col.type.native_enum, False) + @testing.requires.python310 def test_we_got_all_attrs_test_annotated(self): argnames = _py_inspect.getfullargspec(mapped_column) diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 175da290239..6fb792b0ba0 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -102,8 +102,13 @@ class _SomeDict2(TypedDict): _StrTypeAlias: TypeAlias = str -_StrPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] -_UnionPep695: TypeAlias = str +_StrPep695: TypeAlias = str +_UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] + +_Literal695: TypeAlias = Literal["to-do", "in-progress", "done"] +_Recursive695_0: TypeAlias = _Literal695 +_Recursive695_1: TypeAlias = _Recursive695_0 +_Recursive695_2: TypeAlias = _Recursive695_1 if compat.py312: exec( @@ -117,6 +122,11 @@ class _SomeDict2(TypedDict): str, mapped_column(info={"hi": "there"})] strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] + +type _Literal695 = Literal["to-do", "in-progress", "done"] +type _Recursive695_0 = _Literal695 +type _Recursive695_1 = _Recursive695_0 +type _Recursive695_2 = _Recursive695_1 """, globals(), ) @@ -829,9 +839,10 @@ def test_pep695_typealias_as_typemap_keys( class Test(decl_base): __tablename__ = "test" id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[_StrPep695] # type: ignore - structure: Mapped[_UnionPep695] # type: ignore + data: Mapped[_StrPep695] + structure: Mapped[_UnionPep695] + eq_(Test.__table__.c.data.type._type_affinity, String) eq_(Test.__table__.c.data.type.length, 30) is_(Test.__table__.c.structure.type._type_affinity, JSON) @@ -861,6 +872,22 @@ class MyClass(decl_base): eq_(MyClass.data_one.expression.info, {"hi": "there"}) + @testing.requires.python312 + def test_pep695_literal_defaults_to_enum(self, decl_base): + """test #11305.""" + + class Foo(decl_base): + __tablename__ = "footable" + + id: Mapped[int] = mapped_column(primary_key=True) + status: Mapped[_Literal695] + r2: Mapped[_Recursive695_2] + + for col in (Foo.__table__.c.status, Foo.__table__.c.r2): + is_true(isinstance(col.type, Enum)) + eq_(col.type.enums, ["to-do", "in-progress", "done"]) + is_(col.type.native_enum, False) + @testing.requires.python310 def test_we_got_all_attrs_test_annotated(self): argnames = _py_inspect.getfullargspec(mapped_column) From e793dca27eef6f68a45e1c634b9437be1f79e8c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sil=C3=A9n?= Date: Sat, 4 May 2024 11:06:32 +0300 Subject: [PATCH 200/544] Update index.rst to include MariaDB (#11337) Changing title of link to "dialects/mysql" to read "MySQL and MariaDB" to match the actual title of the page. (before link says just MySQL) (cherry picked from commit 000f1832700fc28cae5fe9f3d7356835095052bb) --- doc/build/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/index.rst b/doc/build/index.rst index 4ca88fa59c5..8214b867311 100644 --- a/doc/build/index.rst +++ b/doc/build/index.rst @@ -149,7 +149,7 @@ SQLAlchemy Documentation This section describes notes, options, and usage patterns regarding individual dialects. :doc:`PostgreSQL ` | - :doc:`MySQL ` | + :doc:`MySQL and MariaDB ` | :doc:`SQLite ` | :doc:`Oracle ` | :doc:`Microsoft SQL Server ` From 7a3dee5b2dc7d0b4e84d6d887817ba78b71034e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 4 May 2024 10:09:30 +0200 Subject: [PATCH 201/544] Bump pypa/cibuildwheel from 2.16.5 to 2.17.0 (#11148) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.5 to 2.17.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.5...v2.17.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit 8e1e980b50b0be71f641bca7d81d32fef6565612) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 2ca2b981d81..ae520cbbf35 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.16.5 + uses: pypa/cibuildwheel@v2.17.0 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From e2d4385eb7b03dab2f372b73c9f9a7522b798daa Mon Sep 17 00:00:00 2001 From: Mark Elliot <123787712+mark-thm@users.noreply.github.com> Date: Mon, 29 Apr 2024 17:50:10 -0400 Subject: [PATCH 202/544] Add overload for ColumnCollection.get(col, default) ### Description Fixes #11328 by adding an overload to ColumnCollection when a non-None default is provided. ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [x] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. Closes: #11329 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11329 Pull-request-sha: 32db849e0df1db357df79df3a0dc2263a755d04e Change-Id: I8bef91c423fb7048ec8d4a7c99f70f0b1588c37a (cherry picked from commit ab6df37dad5cccbd0328e83ed55c7cfed91344cb) --- lib/sqlalchemy/orm/properties.py | 2 +- lib/sqlalchemy/sql/base.py | 30 ++++++++++-------- test/sql/test_utils.py | 10 ++++++ test/typing/plain_files/sql/misc.py | 37 ++++++++++++++++++++++ test/typing/plain_files/sql/selectables.py | 17 ---------- 5 files changed, 65 insertions(+), 31 deletions(-) create mode 100644 test/typing/plain_files/sql/misc.py delete mode 100644 test/typing/plain_files/sql/selectables.py diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index adee44a77e1..5c49222be15 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -689,7 +689,7 @@ def declarative_scan( supercls_mapper = class_mapper(decl_scan.inherits, False) colname = column.name if column.name is not None else key - column = self.column = supercls_mapper.local_table.c.get( # type: ignore # noqa: E501 + column = self.column = supercls_mapper.local_table.c.get( # type: ignore[assignment] # noqa: E501 colname, column ) diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 1a65b653ea2..8ad17e2c1a4 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -72,7 +72,6 @@ from .elements import ClauseList from .elements import ColumnClause # noqa from .elements import ColumnElement - from .elements import KeyedColumnElement from .elements import NamedColumn from .elements import SQLCoreOperations from .elements import TextClause @@ -1354,7 +1353,7 @@ class _SentinelColumnCharacterization(NamedTuple): _COLKEY = TypeVar("_COLKEY", Union[None, str], str) _COL_co = TypeVar("_COL_co", bound="ColumnElement[Any]", covariant=True) -_COL = TypeVar("_COL", bound="KeyedColumnElement[Any]") +_COL = TypeVar("_COL", bound="ColumnElement[Any]") class _ColumnMetrics(Generic[_COL_co]): @@ -1642,9 +1641,15 @@ def compare(self, other: ColumnCollection[Any, Any]) -> bool: def __eq__(self, other: Any) -> bool: return self.compare(other) + @overload + def get(self, key: str, default: None = None) -> Optional[_COL_co]: ... + + @overload + def get(self, key: str, default: _COL) -> Union[_COL_co, _COL]: ... + def get( - self, key: str, default: Optional[_COL_co] = None - ) -> Optional[_COL_co]: + self, key: str, default: Optional[_COL] = None + ) -> Optional[Union[_COL_co, _COL]]: """Get a :class:`_sql.ColumnClause` or :class:`_schema.Column` object based on a string key name from this :class:`_expression.ColumnCollection`.""" @@ -1925,16 +1930,15 @@ class DedupeColumnCollection(ColumnCollection[str, _NAMEDCOL]): """ - def add( - self, column: ColumnElement[Any], key: Optional[str] = None + def add( # type: ignore[override] + self, column: _NAMEDCOL, key: Optional[str] = None ) -> None: - named_column = cast(_NAMEDCOL, column) - if key is not None and named_column.key != key: + if key is not None and column.key != key: raise exc.ArgumentError( "DedupeColumnCollection requires columns be under " "the same key as their .key" ) - key = named_column.key + key = column.key if key is None: raise exc.ArgumentError( @@ -1944,17 +1948,17 @@ def add( if key in self._index: existing = self._index[key][1] - if existing is named_column: + if existing is column: return - self.replace(named_column) + self.replace(column) # pop out memoized proxy_set as this # operation may very well be occurring # in a _make_proxy operation - util.memoized_property.reset(named_column, "proxy_set") + util.memoized_property.reset(column, "proxy_set") else: - self._append_new_column(key, named_column) + self._append_new_column(key, column) def _append_new_column(self, key: str, named_column: _NAMEDCOL) -> None: l = len(self._collection) diff --git a/test/sql/test_utils.py b/test/sql/test_utils.py index 74cf1eb4f2e..b741d5d8c0b 100644 --- a/test/sql/test_utils.py +++ b/test/sql/test_utils.py @@ -14,6 +14,7 @@ from sqlalchemy.sql import column from sqlalchemy.sql import ColumnElement from sqlalchemy.sql import roles +from sqlalchemy.sql import table from sqlalchemy.sql import util as sql_util from sqlalchemy.testing import assert_raises from sqlalchemy.testing import assert_raises_message @@ -174,3 +175,12 @@ def test_unwrap_order_by(self, expr, expected): for a, b in zip_longest(unwrapped, expected): assert a is not None and a.compare(b) + + def test_column_collection_get(self): + col_id = column("id", Integer) + col_alt = column("alt", Integer) + table1 = table("mytable", col_id) + + is_(table1.columns.get("id"), col_id) + is_(table1.columns.get("alt"), None) + is_(table1.columns.get("alt", col_alt), col_alt) diff --git a/test/typing/plain_files/sql/misc.py b/test/typing/plain_files/sql/misc.py new file mode 100644 index 00000000000..d598af06ef0 --- /dev/null +++ b/test/typing/plain_files/sql/misc.py @@ -0,0 +1,37 @@ +from typing import Any + +from sqlalchemy import column +from sqlalchemy import ColumnElement +from sqlalchemy import Integer +from sqlalchemy import literal +from sqlalchemy import table + + +def test_col_accessors() -> None: + t = table("t", column("a"), column("b"), column("c")) + + t.c.a + t.c["a"] + + t.c[2] + t.c[0, 1] + t.c[0, 1, "b", "c"] + t.c[(0, 1, "b", "c")] + + t.c[:-1] + t.c[0:2] + + +def test_col_get() -> None: + col_id = column("id", Integer) + col_alt = column("alt", Integer) + tbl = table("mytable", col_id) + + # EXPECTED_TYPE: Union[ColumnClause[Any], None] + reveal_type(tbl.c.get("id")) + # EXPECTED_TYPE: Union[ColumnClause[Any], None] + reveal_type(tbl.c.get("id", None)) + # EXPECTED_TYPE: Union[ColumnClause[Any], ColumnClause[int]] + reveal_type(tbl.c.get("alt", col_alt)) + col: ColumnElement[Any] = tbl.c.get("foo", literal("bar")) + print(col) diff --git a/test/typing/plain_files/sql/selectables.py b/test/typing/plain_files/sql/selectables.py deleted file mode 100644 index 7d31124587f..00000000000 --- a/test/typing/plain_files/sql/selectables.py +++ /dev/null @@ -1,17 +0,0 @@ -from sqlalchemy import column -from sqlalchemy import table - - -def test_col_accessors() -> None: - t = table("t", column("a"), column("b"), column("c")) - - t.c.a - t.c["a"] - - t.c[2] - t.c[0, 1] - t.c[0, 1, "b", "c"] - t.c[(0, 1, "b", "c")] - - t.c[:-1] - t.c[0:2] From 24c73f98aee62e02d75dcd30652dc429f16c330e Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 4 May 2024 11:23:52 +0200 Subject: [PATCH 203/544] Updated typing for self_group() Fixes: #10939 Closes: #11037 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11037 Pull-request-sha: 3ebf4db506ffef629f938f4f36fc76d6671b98e1 Change-Id: I22218286b0dac7bafaaf6955557e25f99a6aefe1 (cherry picked from commit 7173b047788f8a4230647bfc252037c6e227c708) --- lib/sqlalchemy/sql/elements.py | 59 +++++++++++++++++------- lib/sqlalchemy/sql/selectable.py | 35 +++++++------- test/typing/plain_files/sql/operators.py | 5 ++ 3 files changed, 66 insertions(+), 33 deletions(-) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 24f04fd7670..0d753182969 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -77,6 +77,7 @@ from ..util import HasMemoized_ro_memoized_attribute from ..util import TypingOnly from ..util.typing import Literal +from ..util.typing import ParamSpec from ..util.typing import Self if typing.TYPE_CHECKING: @@ -1429,13 +1430,11 @@ def _non_anon_label(self) -> Optional[str]: _alt_names: Sequence[str] = () @overload - def self_group( - self: ColumnElement[_T], against: Optional[OperatorType] = None - ) -> ColumnElement[_T]: ... + def self_group(self, against: None = None) -> ColumnElement[_T]: ... @overload def self_group( - self: ColumnElement[Any], against: Optional[OperatorType] = None + self, against: Optional[OperatorType] = None ) -> ColumnElement[Any]: ... def self_group( @@ -2581,7 +2580,9 @@ def comparator(self): # be using this method. return self.type.comparator_factory(self) # type: ignore - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> Union[Self, Grouping[Any]]: if against is operators.in_op: return Grouping(self) else: @@ -2786,7 +2787,9 @@ def append(self, clause): def _from_objects(self) -> List[FromClause]: return list(itertools.chain(*[c._from_objects for c in self.clauses])) - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> Union[Self, Grouping[Any]]: if self.group and operators.is_precedent(self.operator, against): return Grouping(self) else: @@ -2809,7 +2812,9 @@ class OperatorExpression(ColumnElement[_T]): def is_comparison(self): return operators.is_comparison(self.operator) - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> Union[Self, Grouping[_T]]: if ( self.group and operators.is_precedent(self.operator, against) @@ -3169,7 +3174,9 @@ def or_( def _select_iterable(self) -> _SelectIterable: return (self,) - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> Union[Self, Grouping[bool]]: if not self.clauses: return self else: @@ -3252,7 +3259,7 @@ def _bind_param(self, operator, obj, type_=None, expanding=False): ] ) - def self_group(self, against=None): + def self_group(self, against: Optional[OperatorType] = None) -> Self: # Tuple is parenthesized by definition. return self @@ -3485,7 +3492,9 @@ def typed_expression(self): def wrapped_column_expression(self): return self.clause - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> TypeCoerce[_T]: grouped = self.clause.self_group(against=against) if grouped is not self.clause: return TypeCoerce(grouped, self.type) @@ -3700,7 +3709,9 @@ def _negate(self): else: return ClauseElement._negate(self) - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> Union[Self, Grouping[_T]]: if self.operator and operators.is_precedent(self.operator, against): return Grouping(self) else: @@ -3787,7 +3798,7 @@ def __init__(self, element, operator, negate): def wrapped_column_expression(self): return self.element - def self_group(self, against=None): + def self_group(self, against: Optional[OperatorType] = None) -> Self: return self def _negate(self): @@ -3987,8 +3998,8 @@ def __init__(self, start, stop, step, _name=None): ) self.type = type_api.NULLTYPE - def self_group(self, against=None): - assert against is operator.getitem + def self_group(self, against: Optional[OperatorType] = None) -> Self: + assert against is operator.getitem # type: ignore[comparison-overlap] return self @@ -4006,7 +4017,7 @@ class GroupedElement(DQLDMLClauseElement): element: ClauseElement - def self_group(self, against=None): + def self_group(self, against: Optional[OperatorType] = None) -> Self: return self def _ungroup(self): @@ -4070,6 +4081,12 @@ def __setstate__(self, state): self.element = state["element"] self.type = state["type"] + if TYPE_CHECKING: + + def self_group( + self, against: Optional[OperatorType] = None + ) -> Self: ... + class _OverrideBinds(Grouping[_T]): """used by cache_key->_apply_params_to_element to allow compilation / @@ -4570,6 +4587,9 @@ def _make_proxy( return c.key, c +_PS = ParamSpec("_PS") + + class Label(roles.LabeledColumnExprRole[_T], NamedColumn[_T]): """Represents a column label (AS). @@ -4667,13 +4687,18 @@ def _order_by_label_element(self): def element(self) -> ColumnElement[_T]: return self._element.self_group(against=operators.as_) - def self_group(self, against=None): + def self_group(self, against: Optional[OperatorType] = None) -> Label[_T]: return self._apply_to_inner(self._element.self_group, against=against) def _negate(self): return self._apply_to_inner(self._element._negate) - def _apply_to_inner(self, fn, *arg, **kw): + def _apply_to_inner( + self, + fn: Callable[_PS, ColumnElement[_T]], + *arg: _PS.args, + **kw: _PS.kwargs, + ) -> Label[_T]: sub_element = fn(*arg, **kw) if sub_element is not self._element: return Label(self.name, sub_element, type_=self.type) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index f33e0a41fb7..143d67b58d3 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -1242,7 +1242,6 @@ def is_derived_from(self, fromclause: Optional[FromClause]) -> bool: def self_group( self, against: Optional[OperatorType] = None ) -> FromGrouping: - ... return FromGrouping(self) @util.preload_module("sqlalchemy.sql.util") @@ -2889,6 +2888,12 @@ def __getstate__(self) -> Dict[str, FromClause]: def __setstate__(self, state: Dict[str, FromClause]) -> None: self.element = state["element"] + if TYPE_CHECKING: + + def self_group( + self, against: Optional[OperatorType] = None + ) -> Self: ... + class NamedFromGrouping(FromGrouping, NamedFromClause): """represent a grouping of a named FROM clause @@ -2899,6 +2904,12 @@ class NamedFromGrouping(FromGrouping, NamedFromClause): inherit_cache = True + if TYPE_CHECKING: + + def self_group( + self, against: Optional[OperatorType] = None + ) -> Self: ... + class TableClause(roles.DMLTableRole, Immutable, NamedFromClause): """Represents a minimal "table" construct. @@ -3312,6 +3323,12 @@ def _column_types(self) -> List[TypeEngine[Any]]: def __clause_element__(self) -> ScalarValues: return self + if TYPE_CHECKING: + + def self_group( + self, against: Optional[OperatorType] = None + ) -> Self: ... + class SelectBase( roles.SelectStatementRole, @@ -3684,7 +3701,6 @@ def select_statement(self) -> _SB: return self.element def self_group(self, against: Optional[OperatorType] = None) -> Self: - ... return self if TYPE_CHECKING: @@ -6325,7 +6341,6 @@ def _needs_parens_for_grouping(self) -> bool: def self_group( self, against: Optional[OperatorType] = None ) -> Union[SelectStatementGrouping[Self], Self]: - ... """Return a 'grouping' construct as per the :class:`_expression.ClauseElement` specification. @@ -6517,19 +6532,7 @@ def where(self, crit: _ColumnExpressionArgument[bool]) -> Self: self.element = cast("Select[Any]", self.element).where(crit) return self - @overload - def self_group( - self: ScalarSelect[Any], against: Optional[OperatorType] = None - ) -> ScalarSelect[Any]: ... - - @overload - def self_group( - self: ColumnElement[Any], against: Optional[OperatorType] = None - ) -> ColumnElement[Any]: ... - - def self_group( - self, against: Optional[OperatorType] = None - ) -> ColumnElement[Any]: + def self_group(self, against: Optional[OperatorType] = None) -> Self: return self if TYPE_CHECKING: diff --git a/test/typing/plain_files/sql/operators.py b/test/typing/plain_files/sql/operators.py index dbd6f3d48f4..d52461d41f1 100644 --- a/test/typing/plain_files/sql/operators.py +++ b/test/typing/plain_files/sql/operators.py @@ -154,3 +154,8 @@ class A(Base): # op functions t1 = operators.eq(A.id, 1) select().where(t1) + +# EXPECTED_TYPE: BinaryExpression[Any] +reveal_type(col.op("->>")("field")) +# EXPECTED_TYPE: Union[BinaryExpression[Any], Grouping[Any]] +reveal_type(col.op("->>")("field").self_group()) From cbc2b9c99b2ba60d71f06bb0249e907f8f72fda7 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 4 May 2024 10:38:48 -0400 Subject: [PATCH 204/544] revise approach for bundle deduping Revise the approach from 7d6d7ef73 to make a special case for Bundle-targeted columns entirely, and don't involve the _label_convention() callable. Add tests for select() with tablename labeling convention. Fixes: #11347 Change-Id: I1d15523de5709d45b2b69bc17724831ac3425791 (cherry picked from commit 83f8dd53e362c3ea7562c0076add044740d2c4cc) --- lib/sqlalchemy/orm/context.py | 18 +++-- lib/sqlalchemy/sql/selectable.py | 3 +- test/orm/test_bundle.py | 109 +++++++++++++++++++++++++++---- 3 files changed, 110 insertions(+), 20 deletions(-) diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 694e98ae953..c2cb54e191c 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -446,7 +446,7 @@ def _column_naming_convention( ) -> _LabelConventionCallable: if legacy: - def name(col, col_name=None, cancel_dedupe=False): + def name(col, col_name=None): if col_name: return col_name else: @@ -3050,7 +3050,10 @@ def __init__( if not is_current_entities or column._is_text_clause: self._label_name = None else: - self._label_name = compile_state._label_convention(column) + if parent_bundle: + self._label_name = column._proxy_key + else: + self._label_name = compile_state._label_convention(column) if parent_bundle: parent_bundle._entities.append(self) @@ -3144,11 +3147,12 @@ def __init__( self.raw_column_index = raw_column_index if is_current_entities: - self._label_name = compile_state._label_convention( - column, - col_name=orm_key, - cancel_dedupe=parent_bundle is not None, - ) + if parent_bundle: + self._label_name = orm_key if orm_key else column._proxy_key + else: + self._label_name = compile_state._label_convention( + column, col_name=orm_key + ) else: self._label_name = None diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index f33e0a41fb7..be8be8e3add 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -4556,14 +4556,13 @@ def _column_naming_convention( def go( c: Union[ColumnElement[Any], TextClause], col_name: Optional[str] = None, - cancel_dedupe: bool = False, ) -> Optional[str]: if is_text_clause(c): return None elif TYPE_CHECKING: assert is_column_element(c) - if not dedupe or cancel_dedupe: + if not dedupe: name = c._proxy_key if name is None: name = "_no_label" diff --git a/test/orm/test_bundle.py b/test/orm/test_bundle.py index 81e789d1cfe..a1bd399a4cb 100644 --- a/test/orm/test_bundle.py +++ b/test/orm/test_bundle.py @@ -3,6 +3,7 @@ from sqlalchemy import func from sqlalchemy import Integer from sqlalchemy import select +from sqlalchemy import SelectLabelStyle from sqlalchemy import String from sqlalchemy import testing from sqlalchemy import tuple_ @@ -159,29 +160,68 @@ def test_c_attr(self): select(b1.c.d1, b1.c.d2), "SELECT data.d1, data.d2 FROM data" ) - @testing.variation("stmt_type", ["legacy", "newstyle"]) - def test_dupe_col_name(self, stmt_type): + @testing.variation( + "stmt_type", ["legacy", "newstyle", "newstyle_w_label_conv"] + ) + @testing.variation("col_type", ["orm", "core"]) + def test_dupe_col_name(self, stmt_type, col_type): """test #11347""" Data = self.classes.Data sess = fixture_session() - b1 = Bundle("b1", Data.d1, Data.d3) + if col_type.orm: + b1 = Bundle("b1", Data.d1, Data.d3) + cols = Data.d1, Data.d2 + elif col_type.core: + data_table = self.tables.data + b1 = Bundle("b1", data_table.c.d1, data_table.c.d3) + cols = data_table.c.d1, data_table.c.d2 + else: + col_type.fail() if stmt_type.legacy: row = ( - sess.query(Data.d1, Data.d2, b1) + sess.query(cols[0], cols[1], b1) .filter(Data.d1 == "d0d1") .one() ) elif stmt_type.newstyle: row = sess.execute( - select(Data.d1, Data.d2, b1).filter(Data.d1 == "d0d1") + select(cols[0], cols[1], b1).filter(Data.d1 == "d0d1") ).one() + elif stmt_type.newstyle_w_label_conv: + row = sess.execute( + select(cols[0], cols[1], b1) + .filter(Data.d1 == "d0d1") + .set_label_style( + SelectLabelStyle.LABEL_STYLE_TABLENAME_PLUS_COL + ) + ).one() + else: + stmt_type.fail() + + if stmt_type.newstyle_w_label_conv: + # decision is made here that even if a SELECT with the + # "tablename_plus_colname" label style, within a Bundle we still + # use straight column name, even though the overall row + # uses tablename_colname + eq_( + row._mapping, + {"data_d1": "d0d1", "data_d2": "d0d2", "b1": ("d0d1", "d0d3")}, + ) + else: + eq_( + row._mapping, + {"d1": "d0d1", "d2": "d0d2", "b1": ("d0d1", "d0d3")}, + ) eq_(row[2]._mapping, {"d1": "d0d1", "d3": "d0d3"}) - @testing.variation("stmt_type", ["legacy", "newstyle"]) - def test_dupe_col_name_nested(self, stmt_type): + @testing.variation( + "stmt_type", ["legacy", "newstyle", "newstyle_w_label_conv"] + ) + @testing.variation("col_type", ["orm", "core"]) + def test_dupe_col_name_nested(self, stmt_type, col_type): """test #11347""" Data = self.classes.Data sess = fixture_session() @@ -193,9 +233,18 @@ def proc(row): return proc - b1 = DictBundle("b1", Data.d1, Data.d3) - b2 = DictBundle("b2", Data.d2, Data.d3) - b3 = DictBundle("b3", Data.d2, Data.d3, b1, b2) + if col_type.core: + data_table = self.tables.data + + b1 = DictBundle("b1", data_table.c.d1, data_table.c.d3) + b2 = DictBundle("b2", data_table.c.d2, data_table.c.d3) + b3 = DictBundle("b3", data_table.c.d2, data_table.c.d3, b1, b2) + elif col_type.orm: + b1 = DictBundle("b1", Data.d1, Data.d3) + b2 = DictBundle("b2", Data.d2, Data.d3) + b3 = DictBundle("b3", Data.d2, Data.d3, b1, b2) + else: + col_type.fail() if stmt_type.legacy: row = ( @@ -207,7 +256,45 @@ def proc(row): row = sess.execute( select(Data.d1, Data.d2, b3).filter(Data.d1 == "d0d1") ).one() - + elif stmt_type.newstyle_w_label_conv: + row = sess.execute( + select(Data.d1, Data.d2, b3) + .filter(Data.d1 == "d0d1") + .set_label_style( + SelectLabelStyle.LABEL_STYLE_TABLENAME_PLUS_COL + ) + ).one() + else: + stmt_type.fail() + + if stmt_type.newstyle_w_label_conv: + eq_( + row._mapping, + { + "data_d1": "d0d1", + "data_d2": "d0d2", + "b3": { + "d2": "d0d2", + "d3": "d0d3", + "b1": {"d1": "d0d1", "d3": "d0d3"}, + "b2": {"d2": "d0d2", "d3": "d0d3"}, + }, + }, + ) + else: + eq_( + row._mapping, + { + "d1": "d0d1", + "d2": "d0d2", + "b3": { + "d2": "d0d2", + "d3": "d0d3", + "b1": {"d1": "d0d1", "d3": "d0d3"}, + "b2": {"d2": "d0d2", "d3": "d0d3"}, + }, + }, + ) eq_( row[2], { From de3dd737f1602112f1d5fb701d552136e15b4139 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 5 May 2024 13:15:57 -0400 Subject: [PATCH 205/544] typo fixes Change-Id: I8f3a1d755d03e6c11fa1f783f111977b7ddc22fb (cherry picked from commit 96a50e381ed97dfa92900e3212e1598bc99123a7) --- doc/build/changelog/unreleased_20/11200.rst | 11 +++++------ doc/build/changelog/unreleased_20/11220.rst | 4 ++-- doc/build/changelog/unreleased_20/11291.rst | 2 +- doc/build/changelog/unreleased_20/11327.rst | 2 +- doc/build/changelog/unreleased_20/11332.rst | 7 ++++--- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/doc/build/changelog/unreleased_20/11200.rst b/doc/build/changelog/unreleased_20/11200.rst index 61ab6506b1c..e600d1a149a 100644 --- a/doc/build/changelog/unreleased_20/11200.rst +++ b/doc/build/changelog/unreleased_20/11200.rst @@ -2,9 +2,8 @@ :tags: bug, typing, regression :tickets: 11200 - Fixed typing regression caused by PR :ticket:`11055` in version 2.0.29 that - attempted to add ``ParamSpec`` to the asyncio ``run_sync()`` methods, where - using :meth:`_asyncio.AsyncConnection.run_sync` with - :meth:`_schema.MetaData.reflect` would fail on mypy due to a bug. - See https://github.com/python/mypy/issues/17093 for details. - Pull request courtesy of Francisco R. Del Roio + Fixed typing regression caused by :ticket:`11055` in version 2.0.29 that + added ``ParamSpec`` to the asyncio ``run_sync()`` methods, where using + :meth:`_asyncio.AsyncConnection.run_sync` with + :meth:`_schema.MetaData.reflect` would fail on mypy due to a mypy issue. + Pull request courtesy of Francisco R. Del Roio. diff --git a/doc/build/changelog/unreleased_20/11220.rst b/doc/build/changelog/unreleased_20/11220.rst index 4f04cbf23da..f58a624f10d 100644 --- a/doc/build/changelog/unreleased_20/11220.rst +++ b/doc/build/changelog/unreleased_20/11220.rst @@ -2,8 +2,8 @@ :tags: bug, orm :tickets: 11220 - Added new attribute :attr:`_orm.ORMExecuteState.is_from_statement`, to - detect statements of the form ``select().from_statement()``, and also + Added new attribute :attr:`_orm.ORMExecuteState.is_from_statement` to + detect statements created using :meth:`_sql.Select.from_statement`, and enhanced ``FromStatement`` to set :attr:`_orm.ORMExecuteState.is_select`, :attr:`_orm.ORMExecuteState.is_insert`, :attr:`_orm.ORMExecuteState.is_update`, and diff --git a/doc/build/changelog/unreleased_20/11291.rst b/doc/build/changelog/unreleased_20/11291.rst index e341ff8aff8..c676c9c40a6 100644 --- a/doc/build/changelog/unreleased_20/11291.rst +++ b/doc/build/changelog/unreleased_20/11291.rst @@ -2,7 +2,7 @@ :tags: bug, orm :tickets: 11291 - Fixed issue in :func:`_orm.selectin_polymorhpic` loader option where + Fixed issue in :func:`_orm.selectin_polymorphic` loader option where attributes defined with :func:`_orm.composite` on a superclass would cause an internal exception on load. diff --git a/doc/build/changelog/unreleased_20/11327.rst b/doc/build/changelog/unreleased_20/11327.rst index f7169ad9803..c5fe3e15463 100644 --- a/doc/build/changelog/unreleased_20/11327.rst +++ b/doc/build/changelog/unreleased_20/11327.rst @@ -2,7 +2,7 @@ :tags: bug, orm :tickets: 11327 - Fixed issue in :func:`_orm.selectin_polymorhpic` loader option where the + Fixed issue in :func:`_orm.selectin_polymorphic` loader option where the SELECT emitted would only accommodate for the child-most class among the result rows that were returned, leading intermediary-class attributes to be unloaded if there were no concrete instances of that intermediary-class diff --git a/doc/build/changelog/unreleased_20/11332.rst b/doc/build/changelog/unreleased_20/11332.rst index c8f748654c6..2c23dc6de15 100644 --- a/doc/build/changelog/unreleased_20/11332.rst +++ b/doc/build/changelog/unreleased_20/11332.rst @@ -2,6 +2,7 @@ :tags: bug, orm :tickets: 11332 - Fixes issue in :meth:`_orm.Session.bulk_save_objects` where it would write a - wrong identity key when using ``return_defaults=True``. - The wrong identity key could lead to an index error when entities are then pickled. + Fixed issue in :meth:`_orm.Session.bulk_save_objects` where the form of the + identity key produced when using ``return_defaults=True`` would be + incorrect. This could lead to an errors during pickling as well as identity + map mismatches. From 403194a3979a55a5387311595210ce05c877e355 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 5 May 2024 13:52:32 -0400 Subject: [PATCH 206/544] - 2.0.30 --- doc/build/changelog/changelog_20.rst | 127 +++++++++++++++++++- doc/build/changelog/unreleased_20/11200.rst | 9 -- doc/build/changelog/unreleased_20/11210.rst | 11 -- doc/build/changelog/unreleased_20/11220.rst | 11 -- doc/build/changelog/unreleased_20/11268.rst | 6 - doc/build/changelog/unreleased_20/11291.rst | 8 -- doc/build/changelog/unreleased_20/11292.rst | 11 -- doc/build/changelog/unreleased_20/11305.rst | 7 -- doc/build/changelog/unreleased_20/11306.rst | 12 -- doc/build/changelog/unreleased_20/11327.rst | 10 -- doc/build/changelog/unreleased_20/11332.rst | 8 -- doc/build/changelog/unreleased_20/11334.rst | 7 -- doc/build/changelog/unreleased_20/11347.rst | 13 -- doc/build/conf.py | 4 +- 14 files changed, 128 insertions(+), 116 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11200.rst delete mode 100644 doc/build/changelog/unreleased_20/11210.rst delete mode 100644 doc/build/changelog/unreleased_20/11220.rst delete mode 100644 doc/build/changelog/unreleased_20/11268.rst delete mode 100644 doc/build/changelog/unreleased_20/11291.rst delete mode 100644 doc/build/changelog/unreleased_20/11292.rst delete mode 100644 doc/build/changelog/unreleased_20/11305.rst delete mode 100644 doc/build/changelog/unreleased_20/11306.rst delete mode 100644 doc/build/changelog/unreleased_20/11327.rst delete mode 100644 doc/build/changelog/unreleased_20/11332.rst delete mode 100644 doc/build/changelog/unreleased_20/11334.rst delete mode 100644 doc/build/changelog/unreleased_20/11347.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 973a480fe23..b273976eb87 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,132 @@ .. changelog:: :version: 2.0.30 - :include_notes_from: unreleased_20 + :released: May 5, 2024 + + .. change:: + :tags: bug, typing, regression + :tickets: 11200 + + Fixed typing regression caused by :ticket:`11055` in version 2.0.29 that + added ``ParamSpec`` to the asyncio ``run_sync()`` methods, where using + :meth:`_asyncio.AsyncConnection.run_sync` with + :meth:`_schema.MetaData.reflect` would fail on mypy due to a mypy issue. + Pull request courtesy of Francisco R. Del Roio. + + .. change:: + :tags: bug, engine + :tickets: 11210 + + Fixed issue in the + :paramref:`_engine.Connection.execution_options.logging_token` option, + where changing the value of ``logging_token`` on a connection that has + already logged messages would not be updated to reflect the new logging + token. This in particular prevented the use of + :meth:`_orm.Session.connection` to change the option on the connection, + since the BEGIN logging message would already have been emitted. + + .. change:: + :tags: bug, orm + :tickets: 11220 + + Added new attribute :attr:`_orm.ORMExecuteState.is_from_statement` to + detect statements created using :meth:`_sql.Select.from_statement`, and + enhanced ``FromStatement`` to set :attr:`_orm.ORMExecuteState.is_select`, + :attr:`_orm.ORMExecuteState.is_insert`, + :attr:`_orm.ORMExecuteState.is_update`, and + :attr:`_orm.ORMExecuteState.is_delete` according to the element that is + sent to the :meth:`_sql.Select.from_statement` method itself. + + .. change:: + :tags: bug, test + :tickets: 11268 + + Ensure the ``PYTHONPATH`` variable is properly initialized when + using ``subprocess.run`` in the tests. + + .. change:: + :tags: bug, orm + :tickets: 11291 + + Fixed issue in :func:`_orm.selectin_polymorphic` loader option where + attributes defined with :func:`_orm.composite` on a superclass would cause + an internal exception on load. + + + .. change:: + :tags: bug, orm, regression + :tickets: 11292 + + Fixed regression from 1.4 where using :func:`_orm.defaultload` in + conjunction with a non-propagating loader like :func:`_orm.contains_eager` + would nonetheless propagate the :func:`_orm.contains_eager` to a lazy load + operation, causing incorrect queries as this option is only intended to + come from an original load. + + + + .. change:: + :tags: bug, orm + :tickets: 11305 + + Fixed issue in ORM Annotated Declarative where typing issue where literals + defined using :pep:`695` type aliases would not work with inference of + :class:`.Enum` datatypes. Pull request courtesy of Alc-Alc. + + .. change:: + :tags: bug, engine + :tickets: 11306 + + Fixed issue in cursor handling which affected handling of duplicate + :class:`_sql.Column` or similar objcts in the columns clause of + :func:`_sql.select`, both in combination with arbitary :func:`_sql.text()` + clauses in the SELECT list, as well as when attempting to retrieve + :meth:`_engine.Result.mappings` for the object, which would lead to an + internal error. + + + + .. change:: + :tags: bug, orm + :tickets: 11327 + + Fixed issue in :func:`_orm.selectin_polymorphic` loader option where the + SELECT emitted would only accommodate for the child-most class among the + result rows that were returned, leading intermediary-class attributes to be + unloaded if there were no concrete instances of that intermediary-class + present in the result. This issue only presented itself for multi-level + inheritance hierarchies. + + .. change:: + :tags: bug, orm + :tickets: 11332 + + Fixed issue in :meth:`_orm.Session.bulk_save_objects` where the form of the + identity key produced when using ``return_defaults=True`` would be + incorrect. This could lead to an errors during pickling as well as identity + map mismatches. + + .. change:: + :tags: bug, installation + :tickets: 11334 + + Fixed an internal class that was testing for unexpected attributes to work + correctly under upcoming Python 3.13. Pull request courtesy Edgar + Ramírez-Mondragón. + + .. change:: + :tags: bug, orm + :tickets: 11347 + + Fixed issue where attribute key names in :class:`_orm.Bundle` would not be + correct when using ORM enabled :class:`_sql.select` vs. + :class:`_orm.Query`, when the statement contained duplicate column names. + + .. change:: + :tags: bug, typing + + Fixed issue in typing for :class:`_orm.Bundle` where creating a nested + :class:`_orm.Bundle` structure were not allowed. .. changelog:: :version: 2.0.29 diff --git a/doc/build/changelog/unreleased_20/11200.rst b/doc/build/changelog/unreleased_20/11200.rst deleted file mode 100644 index e600d1a149a..00000000000 --- a/doc/build/changelog/unreleased_20/11200.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, typing, regression - :tickets: 11200 - - Fixed typing regression caused by :ticket:`11055` in version 2.0.29 that - added ``ParamSpec`` to the asyncio ``run_sync()`` methods, where using - :meth:`_asyncio.AsyncConnection.run_sync` with - :meth:`_schema.MetaData.reflect` would fail on mypy due to a mypy issue. - Pull request courtesy of Francisco R. Del Roio. diff --git a/doc/build/changelog/unreleased_20/11210.rst b/doc/build/changelog/unreleased_20/11210.rst deleted file mode 100644 index 088f07d61ba..00000000000 --- a/doc/build/changelog/unreleased_20/11210.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 11210 - - Fixed issue in the - :paramref:`_engine.Connection.execution_options.logging_token` option, - where changing the value of ``logging_token`` on a connection that has - already logged messages would not be updated to reflect the new logging - token. This in particular prevented the use of - :meth:`_orm.Session.connection` to change the option on the connection, - since the BEGIN logging message would already have been emitted. diff --git a/doc/build/changelog/unreleased_20/11220.rst b/doc/build/changelog/unreleased_20/11220.rst deleted file mode 100644 index f58a624f10d..00000000000 --- a/doc/build/changelog/unreleased_20/11220.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11220 - - Added new attribute :attr:`_orm.ORMExecuteState.is_from_statement` to - detect statements created using :meth:`_sql.Select.from_statement`, and - enhanced ``FromStatement`` to set :attr:`_orm.ORMExecuteState.is_select`, - :attr:`_orm.ORMExecuteState.is_insert`, - :attr:`_orm.ORMExecuteState.is_update`, and - :attr:`_orm.ORMExecuteState.is_delete` according to the element that is - sent to the :meth:`_sql.Select.from_statement` method itself. diff --git a/doc/build/changelog/unreleased_20/11268.rst b/doc/build/changelog/unreleased_20/11268.rst deleted file mode 100644 index 40c1eb7bcca..00000000000 --- a/doc/build/changelog/unreleased_20/11268.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, test - :tickets: 11268 - - Ensure the ``PYTHONPATH`` variable is properly initialized when - using ``subprocess.run`` in the tests. diff --git a/doc/build/changelog/unreleased_20/11291.rst b/doc/build/changelog/unreleased_20/11291.rst deleted file mode 100644 index c676c9c40a6..00000000000 --- a/doc/build/changelog/unreleased_20/11291.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11291 - - Fixed issue in :func:`_orm.selectin_polymorphic` loader option where - attributes defined with :func:`_orm.composite` on a superclass would cause - an internal exception on load. - diff --git a/doc/build/changelog/unreleased_20/11292.rst b/doc/build/changelog/unreleased_20/11292.rst deleted file mode 100644 index 65fbdf719a0..00000000000 --- a/doc/build/changelog/unreleased_20/11292.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, orm, regression - :tickets: 11292 - - Fixed regression from 1.4 where using :func:`_orm.defaultload` in - conjunction with a non-propagating loader like :func:`_orm.contains_eager` - would nonetheless propagate the :func:`_orm.contains_eager` to a lazy load - operation, causing incorrect queries as this option is only intended to - come from an original load. - - diff --git a/doc/build/changelog/unreleased_20/11305.rst b/doc/build/changelog/unreleased_20/11305.rst deleted file mode 100644 index 0a022c00de4..00000000000 --- a/doc/build/changelog/unreleased_20/11305.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11305 - - Fixed issue in ORM Annotated Declarative where typing issue where literals - defined using :pep:`695` type aliases would not work with inference of - :class:`.Enum` datatypes. Pull request courtesy of Alc-Alc. diff --git a/doc/build/changelog/unreleased_20/11306.rst b/doc/build/changelog/unreleased_20/11306.rst deleted file mode 100644 index c5d4ebfb70c..00000000000 --- a/doc/build/changelog/unreleased_20/11306.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 11306 - - Fixed issue in cursor handling which affected handling of duplicate - :class:`_sql.Column` or similar objcts in the columns clause of - :func:`_sql.select`, both in combination with arbitary :func:`_sql.text()` - clauses in the SELECT list, as well as when attempting to retrieve - :meth:`_engine.Result.mappings` for the object, which would lead to an - internal error. - - diff --git a/doc/build/changelog/unreleased_20/11327.rst b/doc/build/changelog/unreleased_20/11327.rst deleted file mode 100644 index c5fe3e15463..00000000000 --- a/doc/build/changelog/unreleased_20/11327.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11327 - - Fixed issue in :func:`_orm.selectin_polymorphic` loader option where the - SELECT emitted would only accommodate for the child-most class among the - result rows that were returned, leading intermediary-class attributes to be - unloaded if there were no concrete instances of that intermediary-class - present in the result. This issue only presented itself for multi-level - inheritance hierarchies. diff --git a/doc/build/changelog/unreleased_20/11332.rst b/doc/build/changelog/unreleased_20/11332.rst deleted file mode 100644 index 2c23dc6de15..00000000000 --- a/doc/build/changelog/unreleased_20/11332.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11332 - - Fixed issue in :meth:`_orm.Session.bulk_save_objects` where the form of the - identity key produced when using ``return_defaults=True`` would be - incorrect. This could lead to an errors during pickling as well as identity - map mismatches. diff --git a/doc/build/changelog/unreleased_20/11334.rst b/doc/build/changelog/unreleased_20/11334.rst deleted file mode 100644 index 48f590c4ac4..00000000000 --- a/doc/build/changelog/unreleased_20/11334.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, installation - :tickets: 11334 - - Fixed an internal class that was testing for unexpected attributes to work - correctly under upcoming Python 3.13. Pull request courtesy Edgar - Ramírez-Mondragón. diff --git a/doc/build/changelog/unreleased_20/11347.rst b/doc/build/changelog/unreleased_20/11347.rst deleted file mode 100644 index a0f9652065e..00000000000 --- a/doc/build/changelog/unreleased_20/11347.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11347 - - Fixed issue where attribute key names in :class:`_orm.Bundle` would not be - correct when using ORM enabled :class:`_sql.select` vs. - :class:`_orm.Query`, when the statement contained duplicate column names. - -.. change:: - :tags: bug, typing - - Fixed issue in typing for :class:`_orm.Bundle` where creating a nested - :class:`_orm.Bundle` structure were not allowed. diff --git a/doc/build/conf.py b/doc/build/conf.py index db6ee5c8f9c..9193ba8ac71 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.29" +release = "2.0.30" -release_date = "March 23, 2024" +release_date = "May 5, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 4e5bdf800f3e1946a1d78375d950bd7b02b3f1ce Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 5 May 2024 14:04:18 -0400 Subject: [PATCH 207/544] Version 2.0.31 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index b273976eb87..4b3c9b90005 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.31 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.30 :released: May 5, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 1d5bfd5d3d1..0751c2482ff 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.30" +__version__ = "2.0.31" def __go(lcls: Any) -> None: From 401533e218c5e7950d65489863e0207d4787e50d Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 4 May 2024 22:12:28 +0200 Subject: [PATCH 208/544] Improve implementation of server side cursor in asyncpg Change-Id: I36d0ff5ccea7fbf46cabcfeae1492b9a90b7f68b (cherry picked from commit e3571e1d4b4d34a250886a8967a9b1339f0c68a7) --- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index df2656de2a8..12e711f52e2 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -171,7 +171,7 @@ from __future__ import annotations -import collections +from collections import deque import decimal import json as _py_json import re @@ -611,23 +611,21 @@ class AsyncAdapt_asyncpg_ss_cursor(AsyncAdapt_asyncpg_cursor): def __init__(self, adapt_connection): super().__init__(adapt_connection) - self._rowbuffer = None + self._rowbuffer = deque() def close(self): self._cursor = None - self._rowbuffer = None + self._rowbuffer.clear() def _buffer_rows(self): + assert self._cursor is not None new_rows = self._adapt_connection.await_(self._cursor.fetch(50)) - self._rowbuffer = collections.deque(new_rows) + self._rowbuffer.extend(new_rows) def __aiter__(self): return self async def __anext__(self): - if not self._rowbuffer: - self._buffer_rows() - while True: while self._rowbuffer: yield self._rowbuffer.popleft() @@ -650,21 +648,19 @@ def fetchmany(self, size=None): if not self._rowbuffer: self._buffer_rows() - buf = list(self._rowbuffer) - lb = len(buf) + assert self._cursor is not None + rb = self._rowbuffer + lb = len(rb) if size > lb: - buf.extend( + rb.extend( self._adapt_connection.await_(self._cursor.fetch(size - lb)) ) - result = buf[0:size] - self._rowbuffer = collections.deque(buf[size:]) - return result + return [rb.popleft() for _ in range(min(size, len(rb)))] def fetchall(self): - ret = list(self._rowbuffer) + list( - self._adapt_connection.await_(self._all()) - ) + ret = list(self._rowbuffer) + ret.extend(self._adapt_connection.await_(self._all())) self._rowbuffer.clear() return ret From 3848aa957870ad84d79aabe90c1309d4e5426844 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 6 May 2024 23:10:46 +0200 Subject: [PATCH 209/544] Add name parameter to with_polymorphic. Added missing parameter :paramref:`_orm.with_polymorphic.name` that allows specifying the name of returned :class:`_orm.AliasedClass`. Fixes: #11361 Change-Id: I1eae550452526d85da1377207c5fa5e93ac673c3 (cherry picked from commit 02001e9458802ebb512a140aa24e663b364dc3ad) --- doc/build/changelog/unreleased_20/11361.rst | 6 ++++++ lib/sqlalchemy/orm/_orm_constructors.py | 6 ++++++ lib/sqlalchemy/orm/util.py | 2 ++ test/orm/inheritance/test_polymorphic_rel.py | 8 ++++++++ test/orm/test_cache_key.py | 17 +++++------------ 5 files changed, 27 insertions(+), 12 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11361.rst diff --git a/doc/build/changelog/unreleased_20/11361.rst b/doc/build/changelog/unreleased_20/11361.rst new file mode 100644 index 00000000000..bd9fe1d3ff4 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11361.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: usecase, orm + :tickets: 11361 + + Added missing parameter :paramref:`_orm.with_polymorphic.name` that + allows specifying the name of returned :class:`_orm.AliasedClass`. diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index 7cb536b2976..3403c39e29f 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -2317,6 +2317,7 @@ def with_polymorphic( aliased: bool = False, innerjoin: bool = False, adapt_on_names: bool = False, + name: Optional[str] = None, _use_mapper_path: bool = False, ) -> AliasedClass[_O]: """Produce an :class:`.AliasedClass` construct which specifies @@ -2388,6 +2389,10 @@ def with_polymorphic( .. versionadded:: 1.4.33 + :param name: Name given to the generated :class:`.AliasedClass`. + + .. versionadded:: 2.0.31 + """ return AliasedInsp._with_polymorphic_factory( base, @@ -2398,6 +2403,7 @@ def with_polymorphic( adapt_on_names=adapt_on_names, aliased=aliased, innerjoin=innerjoin, + name=name, _use_mapper_path=_use_mapper_path, ) diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 8e153e63dbd..ad2b69ce313 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1067,6 +1067,7 @@ def _with_polymorphic_factory( aliased: bool = False, innerjoin: bool = False, adapt_on_names: bool = False, + name: Optional[str] = None, _use_mapper_path: bool = False, ) -> AliasedClass[_O]: primary_mapper = _class_to_mapper(base) @@ -1087,6 +1088,7 @@ def _with_polymorphic_factory( return AliasedClass( base, selectable, + name=name, with_polymorphic_mappers=mappers, adapt_on_names=adapt_on_names, with_polymorphic_discriminator=polymorphic_on, diff --git a/test/orm/inheritance/test_polymorphic_rel.py b/test/orm/inheritance/test_polymorphic_rel.py index 0b358f8894b..1216aa0106f 100644 --- a/test/orm/inheritance/test_polymorphic_rel.py +++ b/test/orm/inheritance/test_polymorphic_rel.py @@ -2060,6 +2060,14 @@ def test_correlation_three(self): [(e3.name,)], ) + def test_with_polymorphic_named(self): + session = fixture_session() + poly = with_polymorphic(Person, "*", name="poly_name") + + res = session.execute(select(poly)).mappings() + eq_(res.keys(), ["poly_name"]) + eq_(len(res.all()), 5) + class PolymorphicTest(_PolymorphicTestBase, _Polymorphic): def test_joined_aliasing_unrelated_subuqery(self): diff --git a/test/orm/test_cache_key.py b/test/orm/test_cache_key.py index ff70e4718b5..4bd353b84fd 100644 --- a/test/orm/test_cache_key.py +++ b/test/orm/test_cache_key.py @@ -643,15 +643,9 @@ def test_wpoly_cache_keys(self): self._run_cache_key_fixture( lambda: ( inspect(Person), - inspect( - aliased(Person, me_stmt), - ), - inspect( - aliased(Person, meb_stmt), - ), - inspect( - with_polymorphic(Person, [Manager, Engineer]), - ), + inspect(aliased(Person, me_stmt)), + inspect(aliased(Person, meb_stmt)), + inspect(with_polymorphic(Person, [Manager, Engineer])), # aliased=True is the same as flat=True for default selectable inspect( with_polymorphic( @@ -695,9 +689,7 @@ def test_wpoly_cache_keys(self): aliased=True, ), ), - inspect( - with_polymorphic(Person, [Manager, Engineer, Boss]), - ), + inspect(with_polymorphic(Person, [Manager, Engineer, Boss])), inspect( with_polymorphic( Person, @@ -712,6 +704,7 @@ def test_wpoly_cache_keys(self): polymorphic_on=literal_column("bar"), ), ), + inspect(with_polymorphic(Person, "*", name="foo")), ), compare_values=True, ) From 039c684331c387718339f1a735c0cb22aa00faa9 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 7 May 2024 18:03:51 -0400 Subject: [PATCH 210/544] setup JoinedDispatch to support pickling Fixed issue where a :class:`.MetaData` collection would not be serializable, if an :class:`.Enum` or :class:`.Boolean` datatype were present which had been adapted. This specific scenario in turn could occur when using the :class:`.Enum` or :class:`.Boolean` within ORM Annotated Declarative form where type objects frequently get copied. Fixes: #11365 Change-Id: Iaaa64baad79c41075d37cf53dade744d79e600a3 (cherry picked from commit 93cfb49572ac56bc320a09b82285bf8ef8cdff57) --- doc/build/changelog/unreleased_20/11365.rst | 9 +++++++++ lib/sqlalchemy/event/base.py | 7 +++++++ test/sql/test_types.py | 9 ++++++++- 3 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11365.rst diff --git a/doc/build/changelog/unreleased_20/11365.rst b/doc/build/changelog/unreleased_20/11365.rst new file mode 100644 index 00000000000..d2b353e9123 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11365.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 11365 + + Fixed issue where a :class:`.MetaData` collection would not be + serializable, if an :class:`.Enum` or :class:`.Boolean` datatype were + present which had been adapted. This specific scenario in turn could occur + when using the :class:`.Enum` or :class:`.Boolean` within ORM Annotated + Declarative form where type objects frequently get copied. diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py index 1f52e2eb799..434886316f0 100644 --- a/lib/sqlalchemy/event/base.py +++ b/lib/sqlalchemy/event/base.py @@ -198,6 +198,10 @@ def _join(self, other: _DispatchCommon[_ET]) -> _JoinedDispatcher[_ET]: {"__slots__": self._event_names}, ) self.__class__._joined_dispatch_cls = cls + + # establish pickle capability by adding it to this module + globals()[cls.__name__] = cls + return self._joined_dispatch_cls(self, other) def __reduce__(self) -> Union[str, Tuple[Any, ...]]: @@ -398,6 +402,9 @@ def __init__( self.parent = parent self._instance_cls = self.local._instance_cls + def __reduce__(self) -> Any: + return (self.__class__, (self.local, self.parent)) + def __getattr__(self, name: str) -> _JoinedListener[_ET]: # Assign _JoinedListeners as attributes on demand # to reduce startup time for new dispatch objects. diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 0127004438c..5214ebac53c 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -507,15 +507,22 @@ class PickleTypesTest(fixtures.TestBase): ("Big", BigInteger()), ("Num", Numeric()), ("Flo", Float()), + ("Enu", Enum("one", "two", "three")), ("Dat", DateTime()), ("Dat", Date()), ("Tim", Time()), ("Lar", LargeBinary()), ("Pic", PickleType()), ("Int", Interval()), + argnames="name,type_", id_="ar", ) - def test_pickle_types(self, name, type_): + @testing.variation("use_adapt", [True, False]) + def test_pickle_types(self, name, type_, use_adapt): + + if use_adapt: + type_ = type_.copy() + column_type = Column(name, type_) meta = MetaData() Table("foo", meta, column_type) From 713c83bae03402cf6917d6da091e5232dda01df6 Mon Sep 17 00:00:00 2001 From: roche-quentin Date: Wed, 8 May 2024 06:48:09 -0400 Subject: [PATCH 211/544] Add ``SET DEFAULT`` reflection option Added missing foreign key reflection option ``SET DEFAULT`` in the MySQL and MariaDB dialects. Fixes: #11285 Closes: #11368 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11368 Pull-request-sha: dbd9c239c60b8f4f2be66967825ee15c1f7941b0 Change-Id: If61d3365fc4271432d5591d1b50e10f4a1da9290 (cherry picked from commit 323a7dcb5e70ae555e771beb63e3a58158f003a2) --- doc/build/changelog/unreleased_20/11285.rst | 7 +++++++ lib/sqlalchemy/dialects/mysql/reflection.py | 2 +- test/dialect/mysql/test_reflection.py | 4 ++-- 3 files changed, 10 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11285.rst diff --git a/doc/build/changelog/unreleased_20/11285.rst b/doc/build/changelog/unreleased_20/11285.rst new file mode 100644 index 00000000000..a965799c172 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11285.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: usecase, reflection, mysql + :tickets: 11285 + + Added missing foreign key reflection option ``SET DEFAULT`` + in the MySQL and MariaDB dialects. + Pull request courtesy of Quentin Roche. diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py index c764e8ccc7f..d7622c5463d 100644 --- a/lib/sqlalchemy/dialects/mysql/reflection.py +++ b/lib/sqlalchemy/dialects/mysql/reflection.py @@ -505,7 +505,7 @@ def _prep_regexes(self): # # unique constraints come back as KEYs kw = quotes.copy() - kw["on"] = "RESTRICT|CASCADE|SET NULL|NO ACTION" + kw["on"] = "RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT" self._re_fk_constraint = _re_compile( r" " r"CONSTRAINT +" diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py index 79e7198ef3d..4fa472ce1ae 100644 --- a/test/dialect/mysql/test_reflection.py +++ b/test/dialect/mysql/test_reflection.py @@ -1557,7 +1557,7 @@ def test_fk_reflection(self): " CONSTRAINT `addresses_user_id_fkey` " "FOREIGN KEY (`user_id`) " "REFERENCES `users` (`id`) " - "ON DELETE CASCADE ON UPDATE SET NULL" + "ON DELETE SET DEFAULT ON UPDATE SET NULL" ) eq_( m.groups(), @@ -1567,7 +1567,7 @@ def test_fk_reflection(self): "`users`", "`id`", None, - "CASCADE", + "SET DEFAULT", "SET NULL", ), ) From 951646fbd4c13b49ff2fe44a307e1c0d4a6a872c Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 9 May 2024 22:21:11 +0200 Subject: [PATCH 212/544] Add ``insert_default`` to ``Column``. Added :paramref:`_schema.Column.insert_default` as an alias of :paramref:`_schema.Column.default` for compatibility with func:`_orm.mapped_column`. Fixes: #11374 Change-Id: I5509b6cbac7b37ac37430a88442b1319cc9c1024 (cherry picked from commit c87572b60cbcb869c41a7b4283a11c5c14ef048c) --- doc/build/changelog/unreleased_20/11374.rst | 7 ++ doc/build/faq/ormconfiguration.rst | 91 +++++++++++++++++++++ doc/build/orm/dataclasses.rst | 2 +- lib/sqlalchemy/orm/_orm_constructors.py | 25 ++++++ lib/sqlalchemy/sql/schema.py | 26 ++++-- test/sql/test_metadata.py | 34 ++++++-- 6 files changed, 172 insertions(+), 13 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11374.rst diff --git a/doc/build/changelog/unreleased_20/11374.rst b/doc/build/changelog/unreleased_20/11374.rst new file mode 100644 index 00000000000..d52da2e7670 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11374.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: schema, usecase + :tickets: 11374 + + Added :paramref:`_schema.Column.insert_default` as an alias of + :paramref:`_schema.Column.default` for compatibility with + :func:`_orm.mapped_column`. diff --git a/doc/build/faq/ormconfiguration.rst b/doc/build/faq/ormconfiguration.rst index 90d74d23ee9..bfcf117ae09 100644 --- a/doc/build/faq/ormconfiguration.rst +++ b/doc/build/faq/ormconfiguration.rst @@ -349,3 +349,94 @@ loads directly to primary key values just loaded. .. seealso:: :ref:`subquery_eager_loading` + +.. _defaults_default_factory_insert_default: + +What are ``default``, ``default_factory`` and ``insert_default`` and what should I use? +--------------------------------------------------------------------------------------- + +There's a bit of a clash in SQLAlchemy's API here due to the addition of PEP-681 +dataclass transforms, which is strict about its naming conventions. PEP-681 comes +into play if you are using :class:`_orm.MappedAsDataclass` as shown in :ref:`orm_declarative_native_dataclasses`. +If you are not using MappedAsDataclass, then it does not apply. + +Part One - Classic SQLAlchemy that is not using dataclasses +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When **not** using :class:`_orm.MappedAsDataclass`, as has been the case for many years +in SQLAlchemy, the :func:`_orm.mapped_column` (and :class:`_schema.Column`) +construct supports a parameter :paramref:`_orm.mapped_column.default`. +This indicates a Python-side default (as opposed to a server side default that +would be part of your database's schema definition) that will take place when +an ``INSERT`` statement is emitted. This default can be **any** of a static Python value +like a string, **or** a Python callable function, **or** a SQLAlchemy SQL construct. +Full documentation for :paramref:`_orm.mapped_column.default` is at +:ref:`defaults_client_invoked_sql`. + +When using :paramref:`_orm.mapped_column.default` with an ORM mapping that is **not** +using :class:`_orm.MappedAsDataclass`, this default value /callable **does not show +up on your object when you first construct it**. It only takes place when SQLAlchemy +works up an ``INSERT`` statement for your object. + +A very important thing to note is that when using :func:`_orm.mapped_column` +(and :class:`_schema.Column`), the classic :paramref:`_orm.mapped_column.default` +parameter is also available under a new name, called +:paramref:`_orm.mapped_column.insert_default`. If you build a +:func:`_orm.mapped_column` and you are **not** using :class:`_orm.MappedAsDataclass`, the +:paramref:`_orm.mapped_column.default` and :paramref:`_orm.mapped_column.insert_default` +parameters are **synonymous**. + +Part Two - Using Dataclasses support with MappedAsDataclass +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When you **are** using :class:`_orm.MappedAsDataclass`, that is, the specific form +of mapping used at :ref:`orm_declarative_native_dataclasses`, the meaning of the +:paramref:`_orm.mapped_column.default` keyword changes. We recognize that it's not +ideal that this name changes its behavior, however there was no alternative as +PEP-681 requires :paramref:`_orm.mapped_column.default` to take on this meaning. + +When dataclasses are used, the :paramref:`_orm.mapped_column.default` parameter must +be used the way it's described at +`Python Dataclasses `_ - it refers +to a constant value like a string or a number, and **is applied to your object +immediately when constructed**. It is also at the moment also applied to the +:paramref:`_orm.mapped_column.default` parameter of :class:`_schema.Column` where +it would be used in an ``INSERT`` statement automatically even if not present +on the object. If you instead want to use a callable for your dataclass, +which will be applied to the object when constructed, you would use +:paramref:`_orm.mapped_column.default_factory`. + +To get access to the ``INSERT``-only behavior of :paramref:`_orm.mapped_column.default` +that is described in part one above, you would use the +:paramref:`_orm.mapped_column.insert_default` parameter instead. +:paramref:`_orm.mapped_column.insert_default` when dataclasses are used continues +to be a direct route to the Core-level "default" process where the parameter can +be a static value or callable. + +.. list-table:: Summary Chart + :header-rows: 1 + + * - Construct + - Works with dataclasses? + - Works without dataclasses? + - Accepts scalar? + - Accepts callable? + - Populates object immediately? + * - :paramref:`_orm.mapped_column.default` + - ✔ + - ✔ + - ✔ + - Only if no dataclasses + - Only if dataclasses + * - :paramref:`_orm.mapped_column.insert_default` + - ✔ + - ✔ + - ✔ + - ✔ + - ✖ + * - :paramref:`_orm.mapped_column.default_factory` + - ✔ + - ✖ + - ✖ + - ✔ + - Only if dataclasses diff --git a/doc/build/orm/dataclasses.rst b/doc/build/orm/dataclasses.rst index e737597cf14..910d6a21c55 100644 --- a/doc/build/orm/dataclasses.rst +++ b/doc/build/orm/dataclasses.rst @@ -18,7 +18,7 @@ attrs_ third party integration library. .. _orm_declarative_native_dataclasses: Declarative Dataclass Mapping -------------------------------- +----------------------------- SQLAlchemy :ref:`Annotated Declarative Table ` mappings may be augmented with an additional diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index 3403c39e29f..38ea2b2f25f 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -255,12 +255,28 @@ def mapped_column( be used instead**. This is necessary to disambiguate the callable from being interpreted as a dataclass level default. + .. seealso:: + + :ref:`defaults_default_factory_insert_default` + + :paramref:`_orm.mapped_column.insert_default` + + :paramref:`_orm.mapped_column.default_factory` + :param insert_default: Passed directly to the :paramref:`_schema.Column.default` parameter; will supersede the value of :paramref:`_orm.mapped_column.default` when present, however :paramref:`_orm.mapped_column.default` will always apply to the constructor default for a dataclasses mapping. + .. seealso:: + + :ref:`defaults_default_factory_insert_default` + + :paramref:`_orm.mapped_column.default` + + :paramref:`_orm.mapped_column.default_factory` + :param sort_order: An integer that indicates how this mapped column should be sorted compared to the others when the ORM is creating a :class:`_schema.Table`. Among mapped columns that have the same @@ -295,6 +311,15 @@ def mapped_column( specifies a default-value generation function that will take place as part of the ``__init__()`` method as generated by the dataclass process. + + .. seealso:: + + :ref:`defaults_default_factory_insert_default` + + :paramref:`_orm.mapped_column.default` + + :paramref:`_orm.mapped_column.insert_default` + :param compare: Specific to :ref:`orm_declarative_native_dataclasses`, indicates if this field should be included in comparison operations when generating the diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index aa359fdbbd7..eda4a97cc2d 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -60,6 +60,7 @@ from . import type_api from . import visitors from .base import _DefaultDescriptionTuple +from .base import _NoArg from .base import _NoneName from .base import _SentinelColumnCharacterization from .base import _SentinelDefaultCharacterization @@ -1514,7 +1515,8 @@ def __init__( name: Optional[str] = None, type_: Optional[_TypeEngineArgument[_T]] = None, autoincrement: _AutoIncrementType = "auto", - default: Optional[Any] = None, + default: Optional[Any] = _NoArg.NO_ARG, + insert_default: Optional[Any] = _NoArg.NO_ARG, doc: Optional[str] = None, key: Optional[str] = None, index: Optional[bool] = None, @@ -1751,6 +1753,11 @@ def __init__( :ref:`metadata_defaults_toplevel` + :param insert_default: An alias of :paramref:`.Column.default` + for compatibility with :func:`_orm.mapped_column`. + + .. versionadded: 2.0.31 + :param doc: optional String that can be used by the ORM or similar to document attributes on the Python side. This attribute does **not** render SQL comments; use the @@ -2104,12 +2111,19 @@ def __init__( # otherwise, add DDL-related events self._set_type(self.type) - if default is not None: - if not isinstance(default, (ColumnDefault, Sequence)): - default = ColumnDefault(default) + if insert_default is not _NoArg.NO_ARG: + resolved_default = insert_default + elif default is not _NoArg.NO_ARG: + resolved_default = default + else: + resolved_default = None + + if resolved_default is not None: + if not isinstance(resolved_default, (ColumnDefault, Sequence)): + resolved_default = ColumnDefault(resolved_default) - self.default = default - l_args.append(default) + self.default = resolved_default + l_args.append(resolved_default) else: self.default = None diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index a54a5fcc8d5..97c2f086458 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -751,13 +751,25 @@ def test_assorted_repr(self): comment="foo", ), "Column('foo', Integer(), table=None, primary_key=True, " - "nullable=False, onupdate=%s, default=%s, server_default=%s, " - "comment='foo')" - % ( - ColumnDefault(1), - ColumnDefault(42), - DefaultClause("42"), + f"nullable=False, onupdate={ColumnDefault(1)}, default=" + f"{ColumnDefault(42)}, server_default={DefaultClause('42')}, " + "comment='foo')", + ), + ( + Column( + "foo", + Integer, + primary_key=True, + nullable=False, + onupdate=1, + insert_default=42, + server_default="42", + comment="foo", ), + "Column('foo', Integer(), table=None, primary_key=True, " + f"nullable=False, onupdate={ColumnDefault(1)}, default=" + f"{ColumnDefault(42)}, server_default={DefaultClause('42')}, " + "comment='foo')", ), ( Table("bar", MetaData(), Column("x", String)), @@ -4691,6 +4703,16 @@ def test_column_default_onupdate_keyword_as_clause(self): assert c.onupdate.arg == target assert c.onupdate.column is c + def test_column_insert_default(self): + c = self._fixture(insert_default="y") + assert c.default.arg == "y" + + def test_column_insert_default_predecende_on_default(self): + c = self._fixture(insert_default="x", default="y") + assert c.default.arg == "x" + c = self._fixture(default="y", insert_default="x") + assert c.default.arg == "x" + class ColumnOptionsTest(fixtures.TestBase): def test_default_generators(self): From ca7d2e9a482fd0b983c6df3db3ce0b2f4111af1f Mon Sep 17 00:00:00 2001 From: Semyon Pupkov Date: Sat, 11 May 2024 08:41:06 -0400 Subject: [PATCH 213/544] Optimize has_intersection func Optimize `has_intersection` function. It uses in few places, but even so it might be optimized. New version: 1. Does not allocate new set 2. A bit of performance speedup ``` from sqlalchemy import util import timeit import functools a = {1, 2, 3} b = [2, 3, 4] t1 = timeit.Timer(functools.partial(util.has_intersection, a, b)) t2 = timeit.Timer(functools.partial(util.has_intersection2, a, b)) print("old", t1.timeit()) print("new", t2.timeit()) old 0.37196154199773446 new 0.29704541599494405 old 0.37331208398973104 new 0.29647241700149607 ``` ### Description ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11378 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11378 Pull-request-sha: 258bf1af7c73c83502eb49240a996f5846c6a0a9 Change-Id: Ic1ec1448641304eba4751f55f1e3c2b217f7f352 (cherry picked from commit 5b43687da6820884c75531e89d6347bf285a3b2c) --- lib/sqlalchemy/util/_collections.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index e3a8ad834a5..9755f8d99e1 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -16,6 +16,7 @@ from typing import Any from typing import Callable from typing import cast +from typing import Container from typing import Dict from typing import FrozenSet from typing import Generic @@ -425,15 +426,14 @@ def to_list(x: Any, default: Optional[List[Any]] = None) -> List[Any]: return list(x) -def has_intersection(set_, iterable): +def has_intersection(set_: Container[Any], iterable: Iterable[Any]) -> bool: r"""return True if any items of set\_ are present in iterable. Goes through special effort to ensure __hash__ is not called on items in iterable that don't support it. """ - # TODO: optimize, write in C, etc. - return bool(set_.intersection([i for i in iterable if i.__hash__])) + return any(i in set_ for i in iterable if i.__hash__) def to_set(x): From a0a52e79eec780206bc014f301d301f345ec57a0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 24 May 2024 10:58:02 -0400 Subject: [PATCH 214/544] Add test for issue 11412 Issue #10365 was found to also fix an issue where single-inheritance criteria would not be correctly applied to a subclass entity that only appeared in the ``select_from()`` list. Fixes: #11412 Change-Id: Ic865737a3d075fceee346eea8044345233038f72 (cherry picked from commit 61d227a7d4f7be7b1f6fa72171d01c60e571939e) --- doc/build/changelog/changelog_14.rst | 7 +++- doc/build/changelog/changelog_20.rst | 7 +++- test/orm/inheritance/test_single.py | 52 ++++++++++++++++++++++++++++ 3 files changed, 64 insertions(+), 2 deletions(-) diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index 1d6a3f775ae..47586bfd4f6 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -23,7 +23,7 @@ This document details individual issue-level changes made throughout .. change:: :tags: bug, orm - :tickets: 10365 + :tickets: 10365, 11412 Fixed bug where ORM :func:`_orm.with_loader_criteria` would not apply itself to a :meth:`_sql.Select.join` where the ON clause were given as a @@ -31,6 +31,11 @@ This document details individual issue-level changes made throughout This is a backport of the same issue fixed in version 2.0 for 2.0.22. + **update** - this was found to also fix an issue where + single-inheritance criteria would not be correctly applied to a + subclass entity that only appeared in the ``select_from()`` list, + see :ticket:`11412` + .. changelog:: :version: 1.4.51 :released: January 2, 2024 diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 4b3c9b90005..b0194baa5b8 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -972,12 +972,17 @@ .. change:: :tags: bug, orm - :tickets: 10365 + :tickets: 10365, 11412 Fixed bug where ORM :func:`_orm.with_loader_criteria` would not apply itself to a :meth:`_sql.Select.join` where the ON clause were given as a plain SQL comparison, rather than as a relationship target or similar. + **update** - this was found to also fix an issue where + single-inheritance criteria would not be correctly applied to a + subclass entity that only appeared in the ``select_from()`` list, + see :ticket:`11412` + .. change:: :tags: bug, sql :tickets: 10408 diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py index f45194f29c5..bfdf0b7bcfa 100644 --- a/test/orm/inheritance/test_single.py +++ b/test/orm/inheritance/test_single.py @@ -377,6 +377,58 @@ def test_select_from_aliased_w_subclass(self): "WHERE employees_1.type IN (__[POSTCOMPILE_type_1])", ) + @testing.combinations( + ( + lambda Engineer, Report: select(Report.report_id) + .select_from(Engineer) + .join(Engineer.reports), + ), + ( + lambda Engineer, Report: select(Report.report_id).select_from( + orm_join(Engineer, Report, Engineer.reports) + ), + ), + ( + lambda Engineer, Report: select(Report.report_id).join_from( + Engineer, Report, Engineer.reports + ), + ), + ( + lambda Engineer, Report: select(Report.report_id) + .select_from(Engineer) + .join(Report), + ), + argnames="stmt_fn", + ) + @testing.combinations(True, False, argnames="alias_engineer") + def test_select_col_only_from_w_join(self, stmt_fn, alias_engineer): + """test #11412 which seems to have been fixed by #10365""" + + Engineer = self.classes.Engineer + Report = self.classes.Report + + if alias_engineer: + Engineer = aliased(Engineer) + stmt = testing.resolve_lambda( + stmt_fn, Engineer=Engineer, Report=Report + ) + + if alias_engineer: + self.assert_compile( + stmt, + "SELECT reports.report_id FROM employees AS employees_1 " + "JOIN reports ON employees_1.employee_id = " + "reports.employee_id WHERE employees_1.type " + "IN (__[POSTCOMPILE_type_1])", + ) + else: + self.assert_compile( + stmt, + "SELECT reports.report_id FROM employees JOIN reports " + "ON employees.employee_id = reports.employee_id " + "WHERE employees.type IN (__[POSTCOMPILE_type_1])", + ) + @testing.combinations( ( lambda Engineer, Report: select(Report) From c2094b70e130e8390a82347290bb0817d48d14f6 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 4 May 2024 22:06:12 +0200 Subject: [PATCH 215/544] Use deque for the asnycio cursors Uniform the implementation of the asyncio adapted cursors to consistently use a deque as the internal row buffer. Change-Id: If074f06c672569dbdd326cdf7a75800c45336d66 --- lib/sqlalchemy/dialects/mysql/aiomysql.py | 21 +++++++++-------- lib/sqlalchemy/dialects/mysql/asyncmy.py | 20 ++++++++-------- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 19 ++++++++------- lib/sqlalchemy/dialects/postgresql/psycopg.py | 23 ++++++++----------- lib/sqlalchemy/dialects/sqlite/aiosqlite.py | 20 ++++++++-------- 5 files changed, 49 insertions(+), 54 deletions(-) diff --git a/lib/sqlalchemy/dialects/mysql/aiomysql.py b/lib/sqlalchemy/dialects/mysql/aiomysql.py index 405fa82c8a5..45e226b94e0 100644 --- a/lib/sqlalchemy/dialects/mysql/aiomysql.py +++ b/lib/sqlalchemy/dialects/mysql/aiomysql.py @@ -27,6 +27,8 @@ """ # noqa +from collections import deque + from .pymysql import MySQLDialect_pymysql from ... import pool from ... import util @@ -57,7 +59,7 @@ def __init__(self, adapt_connection): # see https://github.com/aio-libs/aiomysql/issues/543 self._cursor = self.await_(cursor.__aenter__()) - self._rows = [] + self._rows = deque() @property def description(self): @@ -87,7 +89,7 @@ def close(self): # exhausting rows, which we already have done for sync cursor. # another option would be to emulate aiosqlite dialect and assign # cursor only if we are doing server side cursor operation. - self._rows[:] = [] + self._rows.clear() def execute(self, operation, parameters=None): return self.await_(self._execute_async(operation, parameters)) @@ -106,7 +108,7 @@ async def _execute_async(self, operation, parameters): # of that here since our default result is not async. # we could just as easily grab "_rows" here and be done with it # but this is safer. - self._rows = list(await self._cursor.fetchall()) + self._rows = deque(await self._cursor.fetchall()) return result async def _executemany_async(self, operation, seq_of_parameters): @@ -118,11 +120,11 @@ def setinputsizes(self, *inputsizes): def __iter__(self): while self._rows: - yield self._rows.pop(0) + yield self._rows.popleft() def fetchone(self): if self._rows: - return self._rows.pop(0) + return self._rows.popleft() else: return None @@ -130,13 +132,12 @@ def fetchmany(self, size=None): if size is None: size = self.arraysize - retval = self._rows[0:size] - self._rows[:] = self._rows[size:] - return retval + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] def fetchall(self): - retval = self._rows[:] - self._rows[:] = [] + retval = list(self._rows) + self._rows.clear() return retval diff --git a/lib/sqlalchemy/dialects/mysql/asyncmy.py b/lib/sqlalchemy/dialects/mysql/asyncmy.py index 7360044d20b..474eb626d36 100644 --- a/lib/sqlalchemy/dialects/mysql/asyncmy.py +++ b/lib/sqlalchemy/dialects/mysql/asyncmy.py @@ -25,6 +25,7 @@ """ # noqa +from collections import deque from contextlib import asynccontextmanager from .pymysql import MySQLDialect_pymysql @@ -56,7 +57,7 @@ def __init__(self, adapt_connection): cursor = self._connection.cursor() self._cursor = self.await_(cursor.__aenter__()) - self._rows = [] + self._rows = deque() @property def description(self): @@ -86,7 +87,7 @@ def close(self): # exhausting rows, which we already have done for sync cursor. # another option would be to emulate aiosqlite dialect and assign # cursor only if we are doing server side cursor operation. - self._rows[:] = [] + self._rows.clear() def execute(self, operation, parameters=None): return self.await_(self._execute_async(operation, parameters)) @@ -108,7 +109,7 @@ async def _execute_async(self, operation, parameters): # of that here since our default result is not async. # we could just as easily grab "_rows" here and be done with it # but this is safer. - self._rows = list(await self._cursor.fetchall()) + self._rows = deque(await self._cursor.fetchall()) return result async def _executemany_async(self, operation, seq_of_parameters): @@ -120,11 +121,11 @@ def setinputsizes(self, *inputsizes): def __iter__(self): while self._rows: - yield self._rows.pop(0) + yield self._rows.popleft() def fetchone(self): if self._rows: - return self._rows.pop(0) + return self._rows.popleft() else: return None @@ -132,13 +133,12 @@ def fetchmany(self, size=None): if size is None: size = self.arraysize - retval = self._rows[0:size] - self._rows[:] = self._rows[size:] - return retval + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] def fetchall(self): - retval = self._rows[:] - self._rows[:] = [] + retval = list(self._rows) + self._rows.clear() return retval diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 12e711f52e2..b00ce5a02da 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -487,7 +487,7 @@ class AsyncAdapt_asyncpg_cursor: def __init__(self, adapt_connection): self._adapt_connection = adapt_connection self._connection = adapt_connection._connection - self._rows = [] + self._rows = deque() self._cursor = None self.description = None self.arraysize = 1 @@ -495,7 +495,7 @@ def __init__(self, adapt_connection): self._invalidate_schema_cache_asof = 0 def close(self): - self._rows[:] = [] + self._rows.clear() def _handle_exception(self, error): self._adapt_connection._handle_exception(error) @@ -535,7 +535,7 @@ async def _prepare_and_execute(self, operation, parameters): self._cursor = await prepared_stmt.cursor(*parameters) self.rowcount = -1 else: - self._rows = await prepared_stmt.fetch(*parameters) + self._rows = deque(await prepared_stmt.fetch(*parameters)) status = prepared_stmt.get_statusmsg() reg = re.match( @@ -583,11 +583,11 @@ def setinputsizes(self, *inputsizes): def __iter__(self): while self._rows: - yield self._rows.pop(0) + yield self._rows.popleft() def fetchone(self): if self._rows: - return self._rows.pop(0) + return self._rows.popleft() else: return None @@ -595,13 +595,12 @@ def fetchmany(self, size=None): if size is None: size = self.arraysize - retval = self._rows[0:size] - self._rows[:] = self._rows[size:] - return retval + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] def fetchall(self): - retval = self._rows[:] - self._rows[:] = [] + retval = list(self._rows) + self._rows.clear() return retval diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index a1ad0fc6821..b8c0087dd49 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -85,6 +85,7 @@ """ # noqa from __future__ import annotations +from collections import deque import logging import re from typing import cast @@ -564,7 +565,7 @@ class AsyncAdapt_psycopg_cursor: def __init__(self, cursor, await_) -> None: self._cursor = cursor self.await_ = await_ - self._rows = [] + self._rows = deque() def __getattr__(self, name): return getattr(self._cursor, name) @@ -591,24 +592,19 @@ def execute(self, query, params=None, **kw): # eq/ne if res and res.status == self._psycopg_ExecStatus.TUPLES_OK: rows = self.await_(self._cursor.fetchall()) - if not isinstance(rows, list): - self._rows = list(rows) - else: - self._rows = rows + self._rows = deque(rows) return result def executemany(self, query, params_seq): return self.await_(self._cursor.executemany(query, params_seq)) def __iter__(self): - # TODO: try to avoid pop(0) on a list while self._rows: - yield self._rows.pop(0) + yield self._rows.popleft() def fetchone(self): if self._rows: - # TODO: try to avoid pop(0) on a list - return self._rows.pop(0) + return self._rows.popleft() else: return None @@ -616,13 +612,12 @@ def fetchmany(self, size=None): if size is None: size = self._cursor.arraysize - retval = self._rows[0:size] - self._rows = self._rows[size:] - return retval + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] def fetchall(self): - retval = self._rows - self._rows = [] + retval = list(self._rows) + self._rows.clear() return retval diff --git a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py index 6c915634d11..796a80cf060 100644 --- a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py @@ -78,6 +78,7 @@ def do_begin(conn): """ # noqa import asyncio +from collections import deque from functools import partial from .base import SQLiteExecutionContext @@ -113,10 +114,10 @@ def __init__(self, adapt_connection): self.arraysize = 1 self.rowcount = -1 self.description = None - self._rows = [] + self._rows = deque() def close(self): - self._rows[:] = [] + self._rows.clear() def execute(self, operation, parameters=None): try: @@ -132,7 +133,7 @@ def execute(self, operation, parameters=None): self.lastrowid = self.rowcount = -1 if not self.server_side: - self._rows = self.await_(_cursor.fetchall()) + self._rows = deque(self.await_(_cursor.fetchall())) else: self.description = None self.lastrowid = _cursor.lastrowid @@ -161,11 +162,11 @@ def setinputsizes(self, *inputsizes): def __iter__(self): while self._rows: - yield self._rows.pop(0) + yield self._rows.popleft() def fetchone(self): if self._rows: - return self._rows.pop(0) + return self._rows.popleft() else: return None @@ -173,13 +174,12 @@ def fetchmany(self, size=None): if size is None: size = self.arraysize - retval = self._rows[0:size] - self._rows[:] = self._rows[size:] - return retval + rr = self._rows + return [rr.popleft() for _ in range(min(size, len(rr)))] def fetchall(self): - retval = self._rows[:] - self._rows[:] = [] + retval = list(self._rows) + self._rows.clear() return retval From 62c242b78dee306738a2cd22f548679e9818a1ac Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 26 May 2024 11:34:27 -0400 Subject: [PATCH 216/544] skip dunders for langhelper symbol redefine; update tox Set up full Python 3.13 support to the extent currently possible, repairing issues within internal language helpers as well as the serializer extension module. update tox for what will be a regular condition: greenlet is not available (and possibly other things like pymssql): 1. dont use "sqlalchemy[asyncio]" in pyproejct.toml as an extra; this goes out to pypi and ignores the local file 2. add py{3,38,39,...} qualifiers for asyncio deps in tox.ini. After many attempts I seem to have something that's fairly non-repetetive though I'd still prefer a single variable for this, somehow Fixes: #11417 Change-Id: Ib2ceccd9583d8776700f0da5b591906efcfe6e6f (cherry picked from commit 577b53c70993a496aa4149adc477e0732310dd7d) --- doc/build/changelog/unreleased_20/11417.rst | 7 +++ lib/sqlalchemy/ext/serializer.py | 42 ++++++-------- lib/sqlalchemy/util/langhelpers.py | 2 + setup.cfg | 2 +- test/orm/test_mapper.py | 4 +- tox.ini | 64 +++++++++++++++------ 6 files changed, 78 insertions(+), 43 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11417.rst diff --git a/doc/build/changelog/unreleased_20/11417.rst b/doc/build/changelog/unreleased_20/11417.rst new file mode 100644 index 00000000000..8e27d059237 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11417.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, general + :tickets: 11417 + + Set up full Python 3.13 support to the extent currently possible, repairing + issues within internal language helpers as well as the serializer extension + module. diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py index f21e997a227..130d2537474 100644 --- a/lib/sqlalchemy/ext/serializer.py +++ b/lib/sqlalchemy/ext/serializer.py @@ -82,10 +82,9 @@ __all__ = ["Serializer", "Deserializer", "dumps", "loads"] -def Serializer(*args, **kw): - pickler = pickle.Pickler(*args, **kw) +class Serializer(pickle.Pickler): - def persistent_id(obj): + def persistent_id(self, obj): # print "serializing:", repr(obj) if isinstance(obj, Mapper) and not obj.non_primary: id_ = "mapper:" + b64encode(pickle.dumps(obj.class_)) @@ -113,9 +112,6 @@ def persistent_id(obj): return None return id_ - pickler.persistent_id = persistent_id - return pickler - our_ids = re.compile( r"(mapperprop|mapper|mapper_selectable|table|column|" @@ -123,20 +119,23 @@ def persistent_id(obj): ) -def Deserializer(file, metadata=None, scoped_session=None, engine=None): - unpickler = pickle.Unpickler(file) +class Deserializer(pickle.Unpickler): + + def __init__(self, file, metadata=None, scoped_session=None, engine=None): + super().__init__(file) + self.metadata = metadata + self.scoped_session = scoped_session + self.engine = engine - def get_engine(): - if engine: - return engine - elif scoped_session and scoped_session().bind: - return scoped_session().bind - elif metadata and metadata.bind: - return metadata.bind + def get_engine(self): + if self.engine: + return self.engine + elif self.scoped_session and self.scoped_session().bind: + return self.scoped_session().bind else: return None - def persistent_load(id_): + def persistent_load(self, id_): m = our_ids.match(str(id_)) if not m: return None @@ -157,20 +156,17 @@ def persistent_load(id_): cls = pickle.loads(b64decode(mapper)) return class_mapper(cls).attrs[keyname] elif type_ == "table": - return metadata.tables[args] + return self.metadata.tables[args] elif type_ == "column": table, colname = args.split(":") - return metadata.tables[table].c[colname] + return self.metadata.tables[table].c[colname] elif type_ == "session": - return scoped_session() + return self.scoped_session() elif type_ == "engine": - return get_engine() + return self.get_engine() else: raise Exception("Unknown token: %s" % type_) - unpickler.persistent_load = persistent_load - return unpickler - def dumps(obj, protocol=pickle.HIGHEST_PROTOCOL): buf = BytesIO() diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 5f4485a8f72..72cb28d1122 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -1659,6 +1659,8 @@ def __init__( items: List[symbol] cls._items = items = [] for k, v in dict_.items(): + if re.match(r"^__.*__$", k): + continue if isinstance(v, int): sym = symbol(k, canonical=v) elif not k.startswith("_"): diff --git a/setup.cfg b/setup.cfg index c977ae7a986..f9dcc52667f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,7 +38,7 @@ package_dir = install_requires = importlib-metadata;python_version<"3.8" - greenlet != 0.4.17;(platform_machine=='aarch64' or (platform_machine=='ppc64le' or (platform_machine=='x86_64' or (platform_machine=='amd64' or (platform_machine=='AMD64' or (platform_machine=='win32' or platform_machine=='WIN32')))))) + greenlet != 0.4.17;(python_version<"3.13" and (platform_machine=='aarch64' or (platform_machine=='ppc64le' or (platform_machine=='x86_64' or (platform_machine=='amd64' or (platform_machine=='AMD64' or (platform_machine=='win32' or platform_machine=='WIN32'))))))) typing-extensions >= 4.6.0 [options.extras_require] diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py index 64d0ac9abde..4b3bb99c5b1 100644 --- a/test/orm/test_mapper.py +++ b/test/orm/test_mapper.py @@ -2010,12 +2010,12 @@ def _x(self): ) # object gracefully handles this condition - assert not hasattr(User.x, "__name__") + assert not hasattr(User.x, "foobar") assert not hasattr(User.x, "comparator") m.add_property("some_attr", column_property(users.c.name)) - assert not hasattr(User.x, "__name__") + assert not hasattr(User.x, "foobar") assert hasattr(User.x, "comparator") def test_synonym_of_non_property_raises(self): diff --git a/tox.ini b/tox.ini index 72e17cfb686..746432bebfc 100644 --- a/tox.ini +++ b/tox.ini @@ -2,6 +2,20 @@ [tox] envlist = py +[greenletextras] +extras= + asyncio + sqlite: aiosqlite + sqlite_file: aiosqlite + postgresql: postgresql_asyncpg + mysql: asyncmy + mysql: aiomysql + mssql: aioodbc + + # not greenlet, but tends to not have packaging until the py version + # has been fully released + mssql: mssql_pymssql + [testenv] cov_args=--cov=sqlalchemy --cov-report term --cov-append --cov-report xml --exclude-tag memory-intensive --exclude-tag timing-intensive -k "not aaa_profiling" @@ -14,25 +28,20 @@ usedevelop= cov: True extras= - sqlite: aiosqlite - sqlite_file: aiosqlite - sqlite_file: sqlcipher; python_version < '3.10' + py{3,37,38,39,310,311,312}: {[greenletextras]extras} + + py{37,38,39,310}-sqlite_file: sqlcipher postgresql: postgresql - postgresql: postgresql_asyncpg postgresql: postgresql_pg8000 postgresql: postgresql_psycopg mysql: mysql mysql: pymysql - mysql: asyncmy - mysql: aiomysql mysql: mariadb_connector oracle: oracle oracle: oracle_oracledb mssql: mssql - mssql: aioodbc - py{3,37,38,39,310,311}-mssql: mssql_pymssql install_command= # TODO: I can find no way to get pip / tox / anyone to have this @@ -45,8 +54,6 @@ deps= # tracked by https://github.com/pytest-dev/pytest-xdist/issues/907 pytest-xdist!=3.3.0 - py312: greenlet>=3.0.0a1 - dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git\#egg=aiosqlite dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git\#egg=sqlcipher3 @@ -63,7 +70,7 @@ deps= dbapimain-oracle: git+https://github.com/oracle/python-cx_Oracle.git\#egg=cx_Oracle - py312-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc + py313-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc dbapimain-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc cov: pytest-cov @@ -100,8 +107,6 @@ setenv= WORKERS={env:TOX_WORKERS:-n4 --max-worker-restart=5} - - nocext: DISABLE_SQLALCHEMY_CEXT=1 cext: REQUIRE_SQLALCHEMY_CEXT=1 cov: COVERAGE={[testenv]cov_args} @@ -110,12 +115,16 @@ setenv= oracle: WORKERS={env:TOX_WORKERS:-n2 --max-worker-restart=5} oracle: ORACLE={env:TOX_ORACLE:--db oracle} + oracle: EXTRA_ORACLE_DRIVERS={env:EXTRA_ORACLE_DRIVERS:--dbdriver cx_oracle --dbdriver oracledb --dbdriver oracledb_async} + py{313,314}-oracle: EXTRA_ORACLE_DRIVERS={env:EXTRA_ORACLE_DRIVERS:--dbdriver cx_oracle --dbdriver oracledb} sqlite: SQLITE={env:TOX_SQLITE:--db sqlite} sqlite_file: SQLITE={env:TOX_SQLITE_FILE:--db sqlite_file} sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric --dbdriver aiosqlite} + py{313,314}-sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric} + sqlite-nogreenlet: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric} py{37,38,39}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite --dbdriver pysqlcipher} @@ -139,10 +148,11 @@ setenv= mysql-nogreenlet: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver mariadbconnector} mssql: MSSQL={env:TOX_MSSQL:--db mssql} - py{3,37,38,39,310,311}-mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc --dbdriver pymssql} - py{3,37,38,39,310,311}-mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver pymssql} - py312-mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc} - py312-mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc} + mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc --dbdriver pymssql} + py{313,314}-mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc} + + mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver pymssql} + py{313,314}-mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc} oracle,mssql,sqlite_file: IDENTS=--write-idents db_idents.txt @@ -187,6 +197,9 @@ commands = # suddently appearing for it to be stable enough for CI # pyright +extras = + {[greenletextras]extras} + [testenv:mypy] deps= pytest>=7.0.0rc1,<8 @@ -195,6 +208,8 @@ deps= importlib_metadata; python_version < '3.8' mypy >= 1.2.0 patch==1.* +extras= + {[greenletextras]extras} commands = pytest {env:PYTEST_COLOR} -m mypy {posargs} @@ -205,6 +220,9 @@ deps= {[testenv:mypy]deps} pytest-cov +extras= + {[greenletextras]extras} + commands = pytest {env:PYTEST_COLOR} -m mypy {env:COVERAGE} {posargs} @@ -214,6 +232,10 @@ setenv= # thanks to https://julien.danjou.info/the-best-flake8-extensions/ [testenv:lint] basepython = python3 + +extras= + {[greenletextras]extras} + deps= flake8==6.1.0 flake8-import-order @@ -260,10 +282,15 @@ basepython = {[testenv:lint]basepython} deps = {[testenv:lint]deps} allowlist_externals = {[testenv:lint]allowlist_externals} commands = {[testenv:lint]commands} +extras = {[testenv:lint]extras} + # command run in the github action when cext are active. [testenv:github-cext] +extras= + {[greenletextras]extras} + deps = {[testenv]deps} .[aiosqlite] commands= @@ -272,6 +299,9 @@ commands= # command run in the github action when cext are not active. [testenv:github-nocext] +extras= + {[greenletextras]extras} + deps = {[testenv]deps} .[aiosqlite] commands= From 3807dceccbe2ec289d9480a5a8a5655d8e66afed Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 29 May 2024 22:03:17 +0200 Subject: [PATCH 217/544] Fix Over serialization Fixed issue when serializing an :func:`_sql.over` clause with unbounded range or rows. Fixes: #11422 Change-Id: I52a9f72205fd9c7ef5620596c83551e73d5cee5b (cherry picked from commit 9f4f84ffdc1be487930b00d0b190bd492d302ca1) --- doc/build/changelog/unreleased_20/11422.rst | 6 ++++ lib/sqlalchemy/sql/elements.py | 32 +++++++++++++------- test/ext/test_serializer.py | 33 ++++++++++++++++++--- 3 files changed, 56 insertions(+), 15 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11422.rst diff --git a/doc/build/changelog/unreleased_20/11422.rst b/doc/build/changelog/unreleased_20/11422.rst new file mode 100644 index 00000000000..bde78793382 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11422.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, sql + :tickets: 11422 + + Fixed issue when serializing an :func:`_sql.over` clause with + unbounded range or rows. diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 0d753182969..d828c4f1f9c 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -14,7 +14,7 @@ from __future__ import annotations from decimal import Decimal -from enum import IntEnum +from enum import Enum import itertools import operator import re @@ -4139,7 +4139,7 @@ def _gen_cache_key( return ck -class _OverRange(IntEnum): +class _OverRange(Enum): RANGE_UNBOUNDED = 0 RANGE_CURRENT = 1 @@ -4147,6 +4147,8 @@ class _OverRange(IntEnum): RANGE_UNBOUNDED = _OverRange.RANGE_UNBOUNDED RANGE_CURRENT = _OverRange.RANGE_CURRENT +_IntOrRange = Union[int, _OverRange] + class Over(ColumnElement[_T]): """Represent an OVER clause. @@ -4175,7 +4177,8 @@ class Over(ColumnElement[_T]): """The underlying expression object to which this :class:`.Over` object refers.""" - range_: Optional[typing_Tuple[int, int]] + range_: Optional[typing_Tuple[_IntOrRange, _IntOrRange]] + rows: Optional[typing_Tuple[_IntOrRange, _IntOrRange]] def __init__( self, @@ -4220,19 +4223,24 @@ def __reduce__(self): ) def _interpret_range( - self, range_: typing_Tuple[Optional[int], Optional[int]] - ) -> typing_Tuple[int, int]: + self, + range_: typing_Tuple[Optional[_IntOrRange], Optional[_IntOrRange]], + ) -> typing_Tuple[_IntOrRange, _IntOrRange]: if not isinstance(range_, tuple) or len(range_) != 2: raise exc.ArgumentError("2-tuple expected for range/rows") - lower: int - upper: int + r0, r1 = range_ + + lower: _IntOrRange + upper: _IntOrRange - if range_[0] is None: + if r0 is None: lower = RANGE_UNBOUNDED + elif isinstance(r0, _OverRange): + lower = r0 else: try: - lower = int(range_[0]) + lower = int(r0) except ValueError as err: raise exc.ArgumentError( "Integer or None expected for range value" @@ -4241,11 +4249,13 @@ def _interpret_range( if lower == 0: lower = RANGE_CURRENT - if range_[1] is None: + if r1 is None: upper = RANGE_UNBOUNDED + elif isinstance(r1, _OverRange): + upper = r1 else: try: - upper = int(range_[1]) + upper = int(r1) except ValueError as err: raise exc.ArgumentError( "Integer or None expected for range value" diff --git a/test/ext/test_serializer.py b/test/ext/test_serializer.py index a52c59e2d34..40544f3ba03 100644 --- a/test/ext/test_serializer.py +++ b/test/ext/test_serializer.py @@ -18,6 +18,7 @@ from sqlalchemy.orm import scoped_session from sqlalchemy.orm import sessionmaker from sqlalchemy.testing import AssertsCompiledSQL +from sqlalchemy.testing import combinations from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures from sqlalchemy.testing.entities import ComparableEntity @@ -279,6 +280,34 @@ def test_unicode(self): dialect="default", ) + @combinations( + ( + lambda: func.max(users.c.name).over(range_=(None, 0)), + "max(users.name) OVER (RANGE BETWEEN UNBOUNDED " + "PRECEDING AND CURRENT ROW)", + ), + ( + lambda: func.max(users.c.name).over(range_=(0, None)), + "max(users.name) OVER (RANGE BETWEEN CURRENT " + "ROW AND UNBOUNDED FOLLOWING)", + ), + ( + lambda: func.max(users.c.name).over(rows=(None, 0)), + "max(users.name) OVER (ROWS BETWEEN UNBOUNDED " + "PRECEDING AND CURRENT ROW)", + ), + ( + lambda: func.max(users.c.name).over(rows=(0, None)), + "max(users.name) OVER (ROWS BETWEEN CURRENT " + "ROW AND UNBOUNDED FOLLOWING)", + ), + ) + def test_over(self, over_fn, sql): + o = over_fn() + self.assert_compile(o, sql) + ol = serializer.loads(serializer.dumps(o), users.metadata) + self.assert_compile(ol, sql) + class ColumnPropertyWParamTest( AssertsCompiledSQL, fixtures.DeclarativeMappedTest @@ -331,7 +360,3 @@ def test_deserailize_colprop(self): "CAST(left(test.some_id, :left_2) AS INTEGER) = :param_1", checkparams={"left_1": 6, "left_2": 6, "param_1": 123456}, ) - - -if __name__ == "__main__": - testing.main() From 163631b55db27ba4d1d4f8f53b90ced8eda44bd8 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 29 May 2024 22:18:50 +0200 Subject: [PATCH 218/544] Add missing function element methods Added missing methods :meth:`_sql.FunctionFilter.within_group` and :meth:`_sql.WithinGroup.filter` Fixes: #11423 Change-Id: I4bafd9e3cab5883b28b2b997269df239739a2212 (cherry picked from commit 57bba096599ff10be008283261054e46c9d08d0b) --- doc/build/changelog/unreleased_20/11423.rst | 6 +++ lib/sqlalchemy/sql/elements.py | 44 +++++++++++++++++-- test/sql/test_functions.py | 35 +++++++++++++-- .../typing/plain_files/sql/functions_again.py | 24 +++++----- 4 files changed, 93 insertions(+), 16 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11423.rst diff --git a/doc/build/changelog/unreleased_20/11423.rst b/doc/build/changelog/unreleased_20/11423.rst new file mode 100644 index 00000000000..ed6f988460e --- /dev/null +++ b/doc/build/changelog/unreleased_20/11423.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, sql + :tickets: 11423 + + Added missing methods :meth:`_sql.FunctionFilter.within_group` + and :meth:`_sql.WithinGroup.filter` diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index d828c4f1f9c..51735fdc95f 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -4313,7 +4313,7 @@ class WithinGroup(ColumnElement[_T]): def __init__( self, - element: FunctionElement[_T], + element: Union[FunctionElement[_T], FunctionFilter[_T]], *order_by: _ColumnExpressionArgument[Any], ): self.element = element @@ -4327,7 +4327,14 @@ def __reduce__(self): tuple(self.order_by) if self.order_by is not None else () ) - def over(self, partition_by=None, order_by=None, range_=None, rows=None): + def over( + self, + *, + partition_by: Optional[_ByArgument] = None, + order_by: Optional[_ByArgument] = None, + rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, + range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, + ) -> Over[_T]: """Produce an OVER clause against this :class:`.WithinGroup` construct. @@ -4343,6 +4350,24 @@ def over(self, partition_by=None, order_by=None, range_=None, rows=None): rows=rows, ) + @overload + def filter(self) -> Self: ... + + @overload + def filter( + self, + __criterion0: _ColumnExpressionArgument[bool], + *criterion: _ColumnExpressionArgument[bool], + ) -> FunctionFilter[_T]: ... + + def filter( + self, *criterion: _ColumnExpressionArgument[bool] + ) -> Union[Self, FunctionFilter[_T]]: + """Produce a FILTER clause against this function.""" + if not criterion: + return self + return FunctionFilter(self, *criterion) + if not TYPE_CHECKING: @util.memoized_property @@ -4395,7 +4420,7 @@ class FunctionFilter(ColumnElement[_T]): def __init__( self, - func: FunctionElement[_T], + func: Union[FunctionElement[_T], WithinGroup[_T]], *criterion: _ColumnExpressionArgument[bool], ): self.func = func @@ -4465,6 +4490,19 @@ def over( rows=rows, ) + def within_group( + self, *order_by: _ColumnExpressionArgument[Any] + ) -> WithinGroup[_T]: + """Produce a WITHIN GROUP (ORDER BY expr) clause against + this function. + """ + return WithinGroup(self, *order_by) + + def within_group_type( + self, within_group: WithinGroup[_T] + ) -> Optional[TypeEngine[_T]]: + return None + def self_group( self, against: Optional[OperatorType] = None ) -> Union[Self, Grouping[_T]]: diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index c47601b7616..7782f215bcd 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -845,6 +845,18 @@ def test_funcfilter_windowing_rows(self): ) def test_funcfilter_within_group(self): + self.assert_compile( + select( + func.rank() + .filter(table1.c.name > "foo") + .within_group(table1.c.name) + ), + "SELECT rank() FILTER (WHERE mytable.name > :name_1) " + "WITHIN GROUP (ORDER BY mytable.name) " + "AS anon_1 FROM mytable", + ) + + def test_within_group(self): stmt = select( table1.c.myid, func.percentile_cont(0.5).within_group(table1.c.name), @@ -858,7 +870,7 @@ def test_funcfilter_within_group(self): {"percentile_cont_1": 0.5}, ) - def test_funcfilter_within_group_multi(self): + def test_within_group_multi(self): stmt = select( table1.c.myid, func.percentile_cont(0.5).within_group( @@ -874,7 +886,7 @@ def test_funcfilter_within_group_multi(self): {"percentile_cont_1": 0.5}, ) - def test_funcfilter_within_group_desc(self): + def test_within_group_desc(self): stmt = select( table1.c.myid, func.percentile_cont(0.5).within_group(table1.c.name.desc()), @@ -888,7 +900,7 @@ def test_funcfilter_within_group_desc(self): {"percentile_cont_1": 0.5}, ) - def test_funcfilter_within_group_w_over(self): + def test_within_group_w_over(self): stmt = select( table1.c.myid, func.percentile_cont(0.5) @@ -904,6 +916,23 @@ def test_funcfilter_within_group_w_over(self): {"percentile_cont_1": 0.5}, ) + def test_within_group_filter(self): + stmt = select( + table1.c.myid, + func.percentile_cont(0.5) + .within_group(table1.c.name) + .filter(table1.c.myid > 42), + ) + self.assert_compile( + stmt, + "SELECT mytable.myid, percentile_cont(:percentile_cont_1) " + "WITHIN GROUP (ORDER BY mytable.name) " + "FILTER (WHERE mytable.myid > :myid_1) " + "AS anon_1 " + "FROM mytable", + {"percentile_cont_1": 0.5, "myid_1": 42}, + ) + def test_incorrect_none_type(self): from sqlalchemy.sql.expression import FunctionElement diff --git a/test/typing/plain_files/sql/functions_again.py b/test/typing/plain_files/sql/functions_again.py index 1919218f58d..67888790f6b 100644 --- a/test/typing/plain_files/sql/functions_again.py +++ b/test/typing/plain_files/sql/functions_again.py @@ -18,7 +18,8 @@ class Foo(Base): c: Mapped[str] -func.row_number().over(order_by=Foo.a, partition_by=Foo.b.desc()) +# EXPECTED_TYPE: Over[Any] +reveal_type(func.row_number().over(order_by=Foo.a, partition_by=Foo.b.desc())) func.row_number().over(order_by=[Foo.a.desc(), Foo.b.desc()]) func.row_number().over(partition_by=[Foo.a.desc(), Foo.b.desc()]) func.row_number().over(order_by="a", partition_by=("a", "b")) @@ -29,17 +30,23 @@ class Foo(Base): reveal_type(func.row_number().filter()) # EXPECTED_TYPE: FunctionFilter[Any] reveal_type(func.row_number().filter(Foo.a > 0)) - +# EXPECTED_TYPE: FunctionFilter[Any] +reveal_type(func.row_number().within_group(Foo.a).filter(Foo.b < 0)) +# EXPECTED_TYPE: WithinGroup[Any] +reveal_type(func.row_number().within_group(Foo.a)) +# EXPECTED_TYPE: WithinGroup[Any] +reveal_type(func.row_number().filter(Foo.a > 0).within_group(Foo.a)) +# EXPECTED_TYPE: Over[Any] +reveal_type(func.row_number().filter(Foo.a > 0).over()) +# EXPECTED_TYPE: Over[Any] +reveal_type(func.row_number().within_group(Foo.a).over()) # test #10801 # EXPECTED_TYPE: max[int] reveal_type(func.max(Foo.b)) -stmt1 = select( - Foo.a, - func.min(Foo.b), -).group_by(Foo.a) +stmt1 = select(Foo.a, func.min(Foo.b)).group_by(Foo.a) # EXPECTED_TYPE: Select[Tuple[int, int]] reveal_type(stmt1) @@ -48,10 +55,7 @@ class Foo(Base): reveal_type(func.coalesce(Foo.c, "a", "b")) -stmt2 = select( - Foo.a, - func.coalesce(Foo.c, "a", "b"), -).group_by(Foo.a) +stmt2 = select(Foo.a, func.coalesce(Foo.c, "a", "b")).group_by(Foo.a) # EXPECTED_TYPE: Select[Tuple[int, str]] reveal_type(stmt2) From 13f87c2bdc2ef95cfc12f46b736fa0c4af26d337 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 29 May 2024 21:39:08 +0200 Subject: [PATCH 219/544] Make `FunctionFilter.filter` generative Fixed bug in :meth:`_sql.FunctionFilter.filter` that would mutate the existing function in-place. It now behaves like the rest of the SQLAlchemy API, returning a new instance instead of mutating the original one. Fixes: #11426 Change-Id: I46ffebaed82426cfb1623db066686cfb911055a1 (cherry picked from commit fe2ced9e79b9640f3ca135f8d3782dd41ca16782) --- doc/build/changelog/unreleased_20/11426.rst | 8 ++++++++ lib/sqlalchemy/sql/elements.py | 5 +++-- test/sql/test_functions.py | 11 +++++++++++ 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11426.rst diff --git a/doc/build/changelog/unreleased_20/11426.rst b/doc/build/changelog/unreleased_20/11426.rst new file mode 100644 index 00000000000..c9018b02f45 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11426.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, sql + :tickets: 11426 + + Fixed bug in :meth:`_sql.FunctionFilter.filter` that would mutate + the existing function in-place. It now behaves like the rest of the + SQLAlchemy API, returning a new instance instead of mutating the + original one. diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 0d753182969..3f8f9abbcb3 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -4356,7 +4356,7 @@ def _from_objects(self) -> List[FromClause]: ) -class FunctionFilter(ColumnElement[_T]): +class FunctionFilter(Generative, ColumnElement[_T]): """Represent a function FILTER clause. This is a special operator against aggregate and window functions, @@ -4389,8 +4389,9 @@ def __init__( *criterion: _ColumnExpressionArgument[bool], ): self.func = func - self.filter(*criterion) + self.filter.non_generative(self, *criterion) # type: ignore + @_generative def filter(self, *criterion: _ColumnExpressionArgument[bool]) -> Self: """Produce an additional FILTER against the function. diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index c47601b7616..c324c8f33ab 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -844,6 +844,17 @@ def test_funcfilter_windowing_rows(self): "AS anon_1 FROM mytable", ) + def test_funcfilter_more_criteria(self): + ff = func.rank().filter(table1.c.name > "foo") + ff2 = ff.filter(table1.c.myid == 1) + self.assert_compile( + select(ff, ff2), + "SELECT rank() FILTER (WHERE mytable.name > :name_1) AS anon_1, " + "rank() FILTER (WHERE mytable.name > :name_1 AND " + "mytable.myid = :myid_1) AS anon_2 FROM mytable", + {"name_1": "foo", "myid_1": 1}, + ) + def test_funcfilter_within_group(self): stmt = select( table1.c.myid, From 8e7a649e9a5e61f19e2f3125eea8d9a7cd090d0d Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sun, 2 Jun 2024 11:51:31 +0200 Subject: [PATCH 220/544] Document InstanceState modified and expired. Fixes: #11431 Change-Id: Iff762b0c14db5b8851ea8fa20f4304c0cc6657de (cherry picked from commit c1cc8793388214351ab0ccead9616c80458c5bb9) --- lib/sqlalchemy/orm/state.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index 03b81f90405..9dfd7f64fe9 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -98,7 +98,7 @@ def __call__( @inspection._self_inspects class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]): - """tracks state information at the instance level. + """Tracks state information at the instance level. The :class:`.InstanceState` is a key object used by the SQLAlchemy ORM in order to track the state of an object; @@ -148,7 +148,14 @@ class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]): committed_state: Dict[str, Any] modified: bool = False + """When ``True`` the object was modified.""" expired: bool = False + """When ``True`` the object is :term:`expired`. + + .. seealso:: + + :ref:`session_expire` + """ _deleted: bool = False _load_pending: bool = False _orphaned_outside_of_session: bool = False @@ -169,11 +176,12 @@ def _instance_dict(self): expired_attributes: Set[str] """The set of keys which are 'expired' to be loaded by - the manager's deferred scalar loader, assuming no pending - changes. + the manager's deferred scalar loader, assuming no pending + changes. - see also the ``unmodified`` collection which is intersected - against this set when a refresh operation occurs.""" + See also the ``unmodified`` collection which is intersected + against this set when a refresh operation occurs. + """ callables: Dict[str, Callable[[InstanceState[_O], PassiveFlag], Any]] """A namespace where a per-state loader callable can be associated. @@ -228,7 +236,6 @@ def transient(self) -> bool: def pending(self) -> bool: """Return ``True`` if the object is :term:`pending`. - .. seealso:: :ref:`session_object_states` From 6eda3cae02b0944bd5adb47145ca11404ec25f44 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 3 Jun 2024 14:50:25 -0400 Subject: [PATCH 221/544] expand entity_isa to include simple "isa" in poly case Fixed issue where the :func:`_orm.selectinload` and :func:`_orm.subqueryload` loader options would fail to take effect when made against an inherited subclass that itself included a subclass-specific :paramref:`_orm.Mapper.with_polymorphic` setting. Fixes: #11446 Change-Id: I2df3ebedbe4aa9da58af99d7729e5f3052ad6abc (cherry picked from commit 63a903b918343ca312aaded93b7e9af7a88fa3a8) --- doc/build/changelog/unreleased_20/11446.rst | 8 + lib/sqlalchemy/orm/util.py | 2 +- test/orm/inheritance/test_assorted_poly.py | 175 ++++++++++++++++++++ 3 files changed, 184 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11446.rst diff --git a/doc/build/changelog/unreleased_20/11446.rst b/doc/build/changelog/unreleased_20/11446.rst new file mode 100644 index 00000000000..747230b869f --- /dev/null +++ b/doc/build/changelog/unreleased_20/11446.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm + :tickets: 11446 + + Fixed issue where the :func:`_orm.selectinload` and + :func:`_orm.subqueryload` loader options would fail to take effect when + made against an inherited subclass that itself included a subclass-specific + :paramref:`_orm.Mapper.with_polymorphic` setting. diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index ad2b69ce313..0ab3536dddc 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -2154,7 +2154,7 @@ def _entity_isa(given: _InternalEntityType[Any], mapper: Mapper[Any]) -> bool: mapper ) elif given.with_polymorphic_mappers: - return mapper in given.with_polymorphic_mappers + return mapper in given.with_polymorphic_mappers or given.isa(mapper) else: return given.isa(mapper) diff --git a/test/orm/inheritance/test_assorted_poly.py b/test/orm/inheritance/test_assorted_poly.py index 49d90f6c437..ab06dbaea3d 100644 --- a/test/orm/inheritance/test_assorted_poly.py +++ b/test/orm/inheritance/test_assorted_poly.py @@ -32,6 +32,7 @@ from sqlalchemy.orm import selectinload from sqlalchemy.orm import Session from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm import subqueryload from sqlalchemy.orm import with_polymorphic from sqlalchemy.orm.interfaces import MANYTOONE from sqlalchemy.testing import AssertsCompiledSQL @@ -3148,3 +3149,177 @@ def test_big_query(self, query_type, use_criteria): head, UnitHead(managers=expected_managers), ) + + +@testing.combinations( + (2,), + (3,), + id_="s", + argnames="num_levels", +) +@testing.combinations( + ("with_poly_star",), + ("inline",), + ("selectin",), + ("none",), + id_="s", + argnames="wpoly_type", +) +class SubclassWithPolyEagerLoadTest(fixtures.DeclarativeMappedTest): + """test #11446""" + + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class B(Base): + __tablename__ = "b" + id = Column(Integer, primary_key=True) + a_id = Column(ForeignKey("a.id")) + + class A(Base): + __tablename__ = "a" + + id = Column(Integer, primary_key=True) + type = Column(String(10)) + bs = relationship("B") + + if cls.wpoly_type == "selectin": + __mapper_args__ = {"polymorphic_on": "type"} + elif cls.wpoly_type == "inline": + __mapper_args__ = {"polymorphic_on": "type"} + elif cls.wpoly_type == "with_poly_star": + __mapper_args__ = { + "with_polymorphic": "*", + "polymorphic_on": "type", + } + else: + __mapper_args__ = {"polymorphic_on": "type"} + + class ASub(A): + __tablename__ = "asub" + id = Column(ForeignKey("a.id"), primary_key=True) + sub_data = Column(String(10)) + + if cls.wpoly_type == "selectin": + __mapper_args__ = { + "polymorphic_load": "selectin", + "polymorphic_identity": "asub", + } + elif cls.wpoly_type == "inline": + __mapper_args__ = { + "polymorphic_load": "inline", + "polymorphic_identity": "asub", + } + elif cls.wpoly_type == "with_poly_star": + __mapper_args__ = { + "with_polymorphic": "*", + "polymorphic_identity": "asub", + } + else: + __mapper_args__ = {"polymorphic_identity": "asub"} + + if cls.num_levels == 3: + + class ASubSub(ASub): + __tablename__ = "asubsub" + id = Column(ForeignKey("asub.id"), primary_key=True) + sub_sub_data = Column(String(10)) + + if cls.wpoly_type == "selectin": + __mapper_args__ = { + "polymorphic_load": "selectin", + "polymorphic_identity": "asubsub", + } + elif cls.wpoly_type == "inline": + __mapper_args__ = { + "polymorphic_load": "inline", + "polymorphic_identity": "asubsub", + } + elif cls.wpoly_type == "with_poly_star": + __mapper_args__ = { + "with_polymorphic": "*", + "polymorphic_identity": "asubsub", + } + else: + __mapper_args__ = {"polymorphic_identity": "asubsub"} + + @classmethod + def insert_data(cls, connection): + if cls.num_levels == 3: + ASubSub, B = cls.classes("ASubSub", "B") + + with Session(connection) as sess: + sess.add_all( + [ + ASubSub( + sub_data="sub", + sub_sub_data="subsub", + bs=[B(), B(), B()], + ) + for i in range(3) + ] + ) + + sess.commit() + else: + ASub, B = cls.classes("ASub", "B") + + with Session(connection) as sess: + sess.add_all( + [ + ASub(sub_data="sub", bs=[B(), B(), B()]) + for i in range(3) + ] + ) + sess.commit() + + @testing.variation("query_from", ["aliased_class", "class_", "parent"]) + @testing.combinations(selectinload, subqueryload, argnames="loader_fn") + def test_thing(self, query_from, loader_fn): + + A = self.classes.A + + if self.num_levels == 2: + target = self.classes.ASub + elif self.num_levels == 3: + target = self.classes.ASubSub + + if query_from.aliased_class: + asub_alias = aliased(target) + query = select(asub_alias).options(loader_fn(asub_alias.bs)) + elif query_from.class_: + query = select(target).options(loader_fn(A.bs)) + elif query_from.parent: + query = select(A).options(loader_fn(A.bs)) + + s = fixture_session() + + # NOTE: this is likely a different bug - setting + # polymorphic_load to "inline" and loading from the parent does not + # descend to the ASubSub subclass; however "selectin" setting + # **does**. this is inconsistent + if ( + query_from.parent + and self.wpoly_type == "inline" + and self.num_levels == 3 + ): + # this should ideally be "2" + expected_q = 5 + + elif query_from.parent and self.wpoly_type == "none": + expected_q = 5 + elif query_from.parent and self.wpoly_type == "selectin": + expected_q = 3 + else: + expected_q = 2 + + with self.assert_statement_count(testing.db, expected_q): + for obj in s.scalars(query): + # test both that with_polymorphic loaded + eq_(obj.sub_data, "sub") + if self.num_levels == 3: + eq_(obj.sub_sub_data, "subsub") + + # as well as the collection eagerly loaded + assert obj.bs From 9a74a282d0cb5a924322b9ad4b07e6196b55612a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 4 Jun 2024 10:56:26 -0400 Subject: [PATCH 222/544] add additional contextual path info when splicing eager joins Fixed very old issue involving the :paramref:`_orm.joinedload.innerjoin` parameter where making use of this parameter mixed into a query that also included joined eager loads along a self-referential or other cyclical relationship, along with complicating factors like inner joins added for secondary tables and such, would have the chance of splicing a particular inner join to the wrong part of the query. Additional state has been added to the internal method that does this splice to make a better decision as to where splicing should proceed. Fixes: #11449 Change-Id: Ie8f0e8d9bb7958baac33c7c2231e4afae15cf5b1 (cherry picked from commit c4c57237b76f3992a62c6eb5c23fd4e1919f1e4a) --- doc/build/changelog/unreleased_20/11449.rst | 12 ++ lib/sqlalchemy/orm/strategies.py | 32 +++- lib/sqlalchemy/orm/util.py | 2 +- test/orm/test_eager_relations.py | 174 ++++++++++++++++++++ 4 files changed, 215 insertions(+), 5 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11449.rst diff --git a/doc/build/changelog/unreleased_20/11449.rst b/doc/build/changelog/unreleased_20/11449.rst new file mode 100644 index 00000000000..f7974cfd76f --- /dev/null +++ b/doc/build/changelog/unreleased_20/11449.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: bug, orm + :tickets: 11449 + + Fixed very old issue involving the :paramref:`_orm.joinedload.innerjoin` + parameter where making use of this parameter mixed into a query that also + included joined eager loads along a self-referential or other cyclical + relationship, along with complicating factors like inner joins added for + secondary tables and such, would have the chance of splicing a particular + inner join to the wrong part of the query. Additional state has been added + to the internal method that does this splice to make a better decision as + to where splicing should proceed. diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 20c3b9cc6b0..00c6fcb6c1a 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -2506,7 +2506,7 @@ def _create_eager_join( or query_entity.entity_zero.represents_outer_join or (chained_from_outerjoin and isinstance(towrap, sql.Join)), _left_memo=self.parent, - _right_memo=self.mapper, + _right_memo=path[self.mapper], _extra_criteria=extra_join_criteria, ) else: @@ -2546,7 +2546,14 @@ def _create_eager_join( ) def _splice_nested_inner_join( - self, path, join_obj, clauses, onclause, extra_criteria, splicing=False + self, + path, + join_obj, + clauses, + onclause, + extra_criteria, + splicing=False, + detected_existing_path=None, ): # recursive fn to splice a nested join into an existing one. # splicing=False means this is the outermost call, and it @@ -2568,13 +2575,23 @@ def _splice_nested_inner_join( ) elif not isinstance(join_obj, orm_util._ORMJoin): if path[-2].isa(splicing): + + if detected_existing_path: + # TODO: refine this into a more efficient method + if not detected_existing_path.contains_mapper(splicing): + return None + elif path_registry.PathRegistry.coerce( + detected_existing_path[len(path) :] + ).contains_mapper(splicing): + return None + return orm_util._ORMJoin( join_obj, clauses.aliased_insp, onclause, isouter=False, _left_memo=splicing, - _right_memo=path[-1].mapper, + _right_memo=path[path[-1].mapper], _extra_criteria=extra_criteria, ) else: @@ -2586,7 +2603,12 @@ def _splice_nested_inner_join( clauses, onclause, extra_criteria, - join_obj._right_memo, + # NOTE: this is the one place _right_memo is consumed + splicing=( + join_obj._right_memo[-1].mapper + if join_obj._right_memo is not None + else None + ), ) if target_join is None: right_splice = False @@ -2597,7 +2619,9 @@ def _splice_nested_inner_join( onclause, extra_criteria, join_obj._left_memo, + detected_existing_path=join_obj._right_memo, ) + if target_join is None: # should only return None when recursively called, # e.g. splicing refers to a from obj diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 0ab3536dddc..9835f824470 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1943,7 +1943,7 @@ def _splice_into_center(self, other): self.onclause, isouter=self.isouter, _left_memo=self._left_memo, - _right_memo=other._left_memo, + _right_memo=None, ) return _ORMJoin( diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index 2e762c2d3cb..bc3d8f10c2c 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -41,6 +41,7 @@ from sqlalchemy.testing import is_not from sqlalchemy.testing import mock from sqlalchemy.testing.assertsql import CompiledSQL +from sqlalchemy.testing.assertsql import RegexSQL from sqlalchemy.testing.entities import ComparableEntity from sqlalchemy.testing.fixtures import fixture_session from sqlalchemy.testing.schema import Column @@ -3696,6 +3697,179 @@ def test_joined_across(self): self._assert_result(q) +class InnerJoinSplicingWSecondarySelfRefTest( + fixtures.MappedTest, testing.AssertsCompiledSQL +): + """test for issue 11449""" + + __dialect__ = "default" + __backend__ = True # exercise hardcore join nesting on backends + + @classmethod + def define_tables(cls, metadata): + Table( + "kind", + metadata, + Column("id", Integer, primary_key=True), + Column("name", String(50)), + ) + + Table( + "node", + metadata, + Column("id", Integer, primary_key=True), + Column("name", String(50)), + Column( + "common_node_id", Integer, ForeignKey("node.id"), nullable=True + ), + Column("kind_id", Integer, ForeignKey("kind.id"), nullable=False), + ) + Table( + "node_group", + metadata, + Column("id", Integer, primary_key=True), + Column("name", String(50)), + ) + Table( + "node_group_node", + metadata, + Column( + "node_group_id", + Integer, + ForeignKey("node_group.id"), + primary_key=True, + ), + Column( + "node_id", Integer, ForeignKey("node.id"), primary_key=True + ), + ) + + @classmethod + def setup_classes(cls): + class Kind(cls.Comparable): + pass + + class Node(cls.Comparable): + pass + + class NodeGroup(cls.Comparable): + pass + + class NodeGroupNode(cls.Comparable): + pass + + @classmethod + def insert_data(cls, connection): + kind = cls.tables.kind + connection.execute( + kind.insert(), [{"id": 1, "name": "a"}, {"id": 2, "name": "c"}] + ) + node = cls.tables.node + connection.execute( + node.insert(), + {"id": 1, "name": "nc", "kind_id": 2}, + ) + + connection.execute( + node.insert(), + {"id": 2, "name": "na", "kind_id": 1, "common_node_id": 1}, + ) + + node_group = cls.tables.node_group + node_group_node = cls.tables.node_group_node + + connection.execute(node_group.insert(), {"id": 1, "name": "group"}) + connection.execute( + node_group_node.insert(), + {"id": 1, "node_group_id": 1, "node_id": 2}, + ) + connection.commit() + + @testing.fixture(params=["common_nodes,kind", "kind,common_nodes"]) + def node_fixture(self, request): + Kind, Node, NodeGroup, NodeGroupNode = self.classes( + "Kind", "Node", "NodeGroup", "NodeGroupNode" + ) + kind, node, node_group, node_group_node = self.tables( + "kind", "node", "node_group", "node_group_node" + ) + self.mapper_registry.map_imperatively(Kind, kind) + + if request.param == "common_nodes,kind": + self.mapper_registry.map_imperatively( + Node, + node, + properties=dict( + common_node=relationship( + "Node", + remote_side=[node.c.id], + ), + kind=relationship(Kind, innerjoin=True, lazy="joined"), + ), + ) + elif request.param == "kind,common_nodes": + self.mapper_registry.map_imperatively( + Node, + node, + properties=dict( + kind=relationship(Kind, innerjoin=True, lazy="joined"), + common_node=relationship( + "Node", + remote_side=[node.c.id], + ), + ), + ) + + self.mapper_registry.map_imperatively( + NodeGroup, + node_group, + properties=dict( + nodes=relationship(Node, secondary="node_group_node") + ), + ) + self.mapper_registry.map_imperatively(NodeGroupNode, node_group_node) + + def test_select(self, node_fixture): + Kind, Node, NodeGroup, NodeGroupNode = self.classes( + "Kind", "Node", "NodeGroup", "NodeGroupNode" + ) + + session = fixture_session() + with self.sql_execution_asserter(testing.db) as asserter: + group = ( + session.scalars( + select(NodeGroup) + .where(NodeGroup.name == "group") + .options( + joinedload(NodeGroup.nodes).joinedload( + Node.common_node + ) + ) + ) + .unique() + .one_or_none() + ) + + eq_(group.nodes[0].common_node.kind.name, "c") + eq_(group.nodes[0].kind.name, "a") + + asserter.assert_( + RegexSQL( + r"SELECT .* FROM node_group " + r"LEFT OUTER JOIN \(node_group_node AS node_group_node_1 " + r"JOIN node AS node_2 " + r"ON node_2.id = node_group_node_1.node_id " + r"JOIN kind AS kind_\d ON kind_\d.id = node_2.kind_id\) " + r"ON node_group.id = node_group_node_1.node_group_id " + r"LEFT OUTER JOIN " + r"\(node AS node_1 JOIN kind AS kind_\d " + r"ON kind_\d.id = node_1.kind_id\) " + r"ON node_1.id = node_2.common_node_id " + r"WHERE node_group.name = :name_5" + ) + ) + + class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL): """test #2188""" From 228d98e43a0bba1f33cd46f92f14851ecccbb728 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 10 Jun 2024 22:59:49 -0400 Subject: [PATCH 223/544] include HasCTE traversal elements in TextualSelect Fixed caching issue where using the :meth:`.TextualSelect.add_cte` method of the :class:`.TextualSelect` construct would not set a correct cache key which distinguished between different CTE expressions. Fixes: #11471 Change-Id: Ia9ce2c8cfd128f0f130aa9b26448dc23d994c324 (cherry picked from commit faecebc9df2a57173ee720973ba44ada370b682f) --- doc/build/changelog/unreleased_14/11471.rst | 7 +++++++ lib/sqlalchemy/sql/selectable.py | 12 ++++++++---- test/sql/test_compare.py | 9 +++++++++ 3 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_14/11471.rst diff --git a/doc/build/changelog/unreleased_14/11471.rst b/doc/build/changelog/unreleased_14/11471.rst new file mode 100644 index 00000000000..f669eabc789 --- /dev/null +++ b/doc/build/changelog/unreleased_14/11471.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, sql + :tickets: 11471 + + Fixed caching issue where using the :meth:`.TextualSelect.add_cte` method + of the :class:`.TextualSelect` construct would not set a correct cache key + which distinguished between different CTE expressions. diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 8d6c986a325..c6e598504b3 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -6790,10 +6790,14 @@ class was renamed _label_style = LABEL_STYLE_NONE - _traverse_internals: _TraverseInternalsType = [ - ("element", InternalTraversal.dp_clauseelement), - ("column_args", InternalTraversal.dp_clauseelement_list), - ] + SupportsCloneAnnotations._clone_annotations_traverse_internals + _traverse_internals: _TraverseInternalsType = ( + [ + ("element", InternalTraversal.dp_clauseelement), + ("column_args", InternalTraversal.dp_clauseelement_list), + ] + + SupportsCloneAnnotations._clone_annotations_traverse_internals + + HasCTE._has_ctes_traverse_internals + ) _is_textual = True diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index 746058c679e..c1f6e7f1136 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -205,6 +205,15 @@ class CoreFixtures: bindparam("bar", type_=String) ), ), + lambda: ( + # test #11471 + text("select * from table") + .columns(a=Integer()) + .add_cte(table_b.select().cte()), + text("select * from table") + .columns(a=Integer()) + .add_cte(table_b.select().where(table_b.c.a > 5).cte()), + ), lambda: ( literal(1).op("+")(literal(1)), literal(1).op("-")(literal(1)), From 49ab8820b7931d3682dc2263f5f69fd032ca03bc Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 11 Jun 2024 09:16:26 -0400 Subject: [PATCH 224/544] loosen up hash_limit test hash_limit_string works by doing a modulus of a hash value so that the range of possible numbers is 0-N. however, there's a chance we might not populate every 0-N value in unusual cases on CI, even after iterating 500 times apparently. Loosen the change by making sure we got at least N/2 unique hash messages but not greater than N. Change-Id: I5cd2845697ec0a718ddca1c95fbc4867b06eabee (cherry picked from commit ef04a401100ff37915c281c412ed3d784565e429) --- test/base/test_warnings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/base/test_warnings.py b/test/base/test_warnings.py index ee286a7bc9e..069835ff9ec 100644 --- a/test/base/test_warnings.py +++ b/test/base/test_warnings.py @@ -36,7 +36,7 @@ def test_warn_deprecated_limited_cap(self): messages.add(message) eq_(len(printouts), occurrences) - eq_(len(messages), cap) + assert cap / 2 < len(messages) <= cap class ClsWarningTest(fixtures.TestBase): From 307cefe471cfb8de41bc9354cc00eb6e01598b0f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Jun 2024 23:59:19 +0200 Subject: [PATCH 225/544] Bump pypa/cibuildwheel from 2.17.0 to 2.19.0 (#11474) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.17.0 to 2.19.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.17.0...v2.19.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit b320b04af2eb247c3466ac446f138add5eddb3b6) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index ae520cbbf35..8601af8e46f 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.17.0 + uses: pypa/cibuildwheel@v2.19.0 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From 0b99307e21b071405da3e686143b64041498acae Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 12 Jun 2024 12:42:29 -0400 Subject: [PATCH 226/544] open up async greenlet for third parties Modified the internal representation used for adapting asyncio calls to greenlets to allow for duck-typed compatibility with third party libraries that implement SQLAlchemy's "greenlet-to-asyncio" pattern directly. Running code within a greenlet that features the attribute ``__sqlalchemy_greenlet_provider__ = True`` will allow calls to :func:`sqlalchemy.util.await_only` directly. Change-Id: I79c67264e1a642b9a80d3b46dc64bdda80acf0aa (cherry picked from commit c1e2d9180a14c74495b712e08d8156b92f907ac0) --- .../unreleased_14/greenlet_compat.rst | 10 ++++ lib/sqlalchemy/util/_concurrency_py3k.py | 46 +++++++++---------- 2 files changed, 32 insertions(+), 24 deletions(-) create mode 100644 doc/build/changelog/unreleased_14/greenlet_compat.rst diff --git a/doc/build/changelog/unreleased_14/greenlet_compat.rst b/doc/build/changelog/unreleased_14/greenlet_compat.rst new file mode 100644 index 00000000000..d9eb51cd9c0 --- /dev/null +++ b/doc/build/changelog/unreleased_14/greenlet_compat.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: usecase, engine + + Modified the internal representation used for adapting asyncio calls to + greenlets to allow for duck-typed compatibility with third party libraries + that implement SQLAlchemy's "greenlet-to-asyncio" pattern directly. + Running code within a greenlet that features the attribute + ``__sqlalchemy_greenlet_provider__ = True`` will allow calls to + :func:`sqlalchemy.util.await_only` directly. + diff --git a/lib/sqlalchemy/util/_concurrency_py3k.py b/lib/sqlalchemy/util/_concurrency_py3k.py index 5717d970617..a19607cd01c 100644 --- a/lib/sqlalchemy/util/_concurrency_py3k.py +++ b/lib/sqlalchemy/util/_concurrency_py3k.py @@ -74,9 +74,10 @@ def is_exit_exception(e: BaseException) -> bool: class _AsyncIoGreenlet(greenlet): dead: bool + __sqlalchemy_greenlet_provider__ = True + def __init__(self, fn: Callable[..., Any], driver: greenlet): greenlet.__init__(self, fn, driver) - self.driver = driver if _has_gr_context: self.gr_context = driver.gr_context @@ -102,7 +103,7 @@ def _safe_cancel_awaitable(awaitable: Awaitable[Any]) -> None: def in_greenlet() -> bool: current = getcurrent() - return isinstance(current, _AsyncIoGreenlet) + return getattr(current, "__sqlalchemy_greenlet_provider__", False) def await_only(awaitable: Awaitable[_T]) -> _T: @@ -116,7 +117,7 @@ def await_only(awaitable: Awaitable[_T]) -> _T: """ # this is called in the context greenlet while running fn current = getcurrent() - if not isinstance(current, _AsyncIoGreenlet): + if not getattr(current, "__sqlalchemy_greenlet_provider__", False): _safe_cancel_awaitable(awaitable) raise exc.MissingGreenlet( @@ -128,7 +129,7 @@ def await_only(awaitable: Awaitable[_T]) -> _T: # a coroutine to run. Once the awaitable is done, the driver greenlet # switches back to this greenlet with the result of awaitable that is # then returned to the caller (or raised as error) - return current.driver.switch(awaitable) # type: ignore[no-any-return] + return current.parent.switch(awaitable) # type: ignore[no-any-return,attr-defined] # noqa: E501 def await_fallback(awaitable: Awaitable[_T]) -> _T: @@ -148,7 +149,7 @@ def await_fallback(awaitable: Awaitable[_T]) -> _T: # this is called in the context greenlet while running fn current = getcurrent() - if not isinstance(current, _AsyncIoGreenlet): + if not getattr(current, "__sqlalchemy_greenlet_provider__", False): loop = get_event_loop() if loop.is_running(): _safe_cancel_awaitable(awaitable) @@ -160,7 +161,7 @@ def await_fallback(awaitable: Awaitable[_T]) -> _T: ) return loop.run_until_complete(awaitable) - return current.driver.switch(awaitable) # type: ignore[no-any-return] + return current.parent.switch(awaitable) # type: ignore[no-any-return,attr-defined] # noqa: E501 async def greenlet_spawn( @@ -186,24 +187,21 @@ async def greenlet_spawn( # coroutine to wait. If the context is dead the function has # returned, and its result can be returned. switch_occurred = False - try: - result = context.switch(*args, **kwargs) - while not context.dead: - switch_occurred = True - try: - # wait for a coroutine from await_only and then return its - # result back to it. - value = await result - except BaseException: - # this allows an exception to be raised within - # the moderated greenlet so that it can continue - # its expected flow. - result = context.throw(*sys.exc_info()) - else: - result = context.switch(value) - finally: - # clean up to avoid cycle resolution by gc - del context.driver + result = context.switch(*args, **kwargs) + while not context.dead: + switch_occurred = True + try: + # wait for a coroutine from await_only and then return its + # result back to it. + value = await result + except BaseException: + # this allows an exception to be raised within + # the moderated greenlet so that it can continue + # its expected flow. + result = context.throw(*sys.exc_info()) + else: + result = context.switch(value) + if _require_await and not switch_occurred: raise exc.AwaitRequired( "The current operation required an async execution but none was " From 9ae9b434ef4e19a9da37d7857e56f49cffafe36f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 13 Jun 2024 10:04:01 -0400 Subject: [PATCH 227/544] strongly discourage noload Change-Id: I54a1c1d29b33091838b7f3fcd46eeec48bbd498b --- doc/build/orm/queryguide/relationships.rst | 3 +-- lib/sqlalchemy/orm/strategy_options.py | 8 ++++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/doc/build/orm/queryguide/relationships.rst b/doc/build/orm/queryguide/relationships.rst index 30c8b1906fc..bf6f692b98a 100644 --- a/doc/build/orm/queryguide/relationships.rst +++ b/doc/build/orm/queryguide/relationships.rst @@ -1001,8 +1001,7 @@ Wildcard Loading Strategies --------------------------- Each of :func:`_orm.joinedload`, :func:`.subqueryload`, :func:`.lazyload`, -:func:`.selectinload`, -:func:`.noload`, and :func:`.raiseload` can be used to set the default +:func:`.selectinload`, and :func:`.raiseload` can be used to set the default style of :func:`_orm.relationship` loading for a particular query, affecting all :func:`_orm.relationship` -mapped attributes not otherwise diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 31c3a54e323..974a1ff45f6 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -507,10 +507,10 @@ def noload(self, attr: _AttrType) -> Self: :func:`_orm.noload` applies to :func:`_orm.relationship` attributes only. - .. note:: Setting this loading strategy as the default strategy - for a relationship using the :paramref:`.orm.relationship.lazy` - parameter may cause issues with flushes, such if a delete operation - needs to load related objects and instead ``None`` was returned. + .. legacy:: The :func:`_orm.noload` option is **legacy**. As it + forces collections to be empty, which invariably leads to + non-intuitive and difficult to predict results. There are no + legitimate uses for this option in modern SQLAlchemy. .. seealso:: From 78b2ed2f49647c0be458e7bdc4b8147981aea22a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 16 Jun 2024 11:26:41 -0400 Subject: [PATCH 228/544] add greenlet support for py313 based on vstinners patch Change-Id: I3e852796b49fe51db51c030d03d17cfb6baa7484 (cherry picked from commit 5360cd115422d560b65456794cd700c7c87bfca6) --- tox.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 746432bebfc..a5b82c034b0 100644 --- a/tox.ini +++ b/tox.ini @@ -28,7 +28,7 @@ usedevelop= cov: True extras= - py{3,37,38,39,310,311,312}: {[greenletextras]extras} + py{3,37,38,39,310,311,312,313}: {[greenletextras]extras} py{37,38,39,310}-sqlite_file: sqlcipher postgresql: postgresql @@ -54,6 +54,8 @@ deps= # tracked by https://github.com/pytest-dev/pytest-xdist/issues/907 pytest-xdist!=3.3.0 + py313: git+https://github.com/vstinner/greenlet@py313\#egg=greenlet + dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git\#egg=aiosqlite dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git\#egg=sqlcipher3 From 30492d665ed7edc474d151f0854ed4837dcafaed Mon Sep 17 00:00:00 2001 From: Roman Druzhkov Date: Sun, 16 Jun 2024 14:48:30 -0400 Subject: [PATCH 229/544] Correct 'Session.is_modified' method documentation Correct 'Session.is_modified' method documentation. ### Description Correct documentation according to discussion: https://github.com/sqlalchemy/sqlalchemy/discussions/11481#discussioncomment-9759171 ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11501 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11501 Pull-request-sha: 16e949af544b4a1a94ccc5660e164fd2bca8be78 Change-Id: Ia8be0e3865190f0667de006c3006ecf646ef8422 --- lib/sqlalchemy/ext/asyncio/scoping.py | 2 +- lib/sqlalchemy/ext/asyncio/session.py | 2 +- lib/sqlalchemy/orm/scoping.py | 2 +- lib/sqlalchemy/orm/session.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/sqlalchemy/ext/asyncio/scoping.py b/lib/sqlalchemy/ext/asyncio/scoping.py index e879a1654e9..162f34eabfc 100644 --- a/lib/sqlalchemy/ext/asyncio/scoping.py +++ b/lib/sqlalchemy/ext/asyncio/scoping.py @@ -864,7 +864,7 @@ def is_modified( This method retrieves the history for each instrumented attribute on the instance and performs a comparison of the current - value to its previously committed value, if any. + value to its previously flushed or committed value, if any. It is in effect a more expensive and accurate version of checking for the given instance in the diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index c5fe469a0d4..473a8c1717c 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -1307,7 +1307,7 @@ def is_modified( This method retrieves the history for each instrumented attribute on the instance and performs a comparison of the current - value to its previously committed value, if any. + value to its previously flushed or committed value, if any. It is in effect a more expensive and accurate version of checking for the given instance in the diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index 819616ae85f..283f4a0221f 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -1228,7 +1228,7 @@ def is_modified( This method retrieves the history for each instrumented attribute on the instance and performs a comparison of the current - value to its previously committed value, if any. + value to its previously flushed or committed value, if any. It is in effect a more expensive and accurate version of checking for the given instance in the diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index a4bf7c1cecf..c455ffac930 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -4761,7 +4761,7 @@ def is_modified( This method retrieves the history for each instrumented attribute on the instance and performs a comparison of the current - value to its previously committed value, if any. + value to its previously flushed or committed value, if any. It is in effect a more expensive and accurate version of checking for the given instance in the From 0e2a91163ea250b40dc33e4f6fa07996d8d14ce3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 17 Jun 2024 22:45:16 -0400 Subject: [PATCH 230/544] restore declared_attr consumption for __table__ Fixed bug in ORM Declarative where the ``__table__`` directive could not be declared as a class function with :func:`_orm.declared_attr` on a superclass, including an ``__abstract__`` class as well as coming from the declarative base itself. This was a regression since 1.4 where this was working, and there were apparently no tests for this particular use case. Fixes: #11509 Change-Id: I82ef0f93d00cb7a43b0b1b16ea28f1a9a79eba3b (cherry picked from commit 30ec43440168fa79a4d45db64c387562ef8f97e6) --- doc/build/changelog/unreleased_20/11509.rst | 9 ++ lib/sqlalchemy/orm/decl_base.py | 40 ++++- test/orm/declarative/test_mixin.py | 154 ++++++++++++++++++++ 3 files changed, 197 insertions(+), 6 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11509.rst diff --git a/doc/build/changelog/unreleased_20/11509.rst b/doc/build/changelog/unreleased_20/11509.rst new file mode 100644 index 00000000000..1761c2bf7ad --- /dev/null +++ b/doc/build/changelog/unreleased_20/11509.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm, regression + :tickets: 11509 + + Fixed bug in ORM Declarative where the ``__table__`` directive could not be + declared as a class function with :func:`_orm.declared_attr` on a + superclass, including an ``__abstract__`` class as well as coming from the + declarative base itself. This was a regression since 1.4 where this was + working, and there were apparently no tests for this particular use case. diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index 96530c3ac47..1203c9cb36a 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -453,6 +453,7 @@ class _ClassScanMapperConfig(_MapperConfig): "tablename", "mapper_args", "mapper_args_fn", + "table_fn", "inherits", "single", "allow_dataclass_fields", @@ -759,7 +760,7 @@ def _scan_attributes(self) -> None: _include_dunders = self._include_dunders mapper_args_fn = None table_args = inherited_table_args = None - + table_fn = None tablename = None fixed_table = "__table__" in clsdict_view @@ -840,6 +841,22 @@ def _mapper_args_fn() -> Dict[str, Any]: ) if not tablename and (not class_mapped or check_decl): tablename = cls_as_Decl.__tablename__ + elif name == "__table__": + check_decl = _check_declared_props_nocascade( + obj, name, cls + ) + # if a @declared_attr using "__table__" is detected, + # wrap up a callable to look for "__table__" from + # the final concrete class when we set up a table. + # this was fixed by + # #11509, regression in 2.0 from version 1.4. + if check_decl and not table_fn: + # don't even invoke __table__ until we're ready + def _table_fn() -> FromClause: + return cls_as_Decl.__table__ + + table_fn = _table_fn + elif name == "__table_args__": check_decl = _check_declared_props_nocascade( obj, name, cls @@ -856,9 +873,10 @@ def _mapper_args_fn() -> Dict[str, Any]: if base is not cls: inherited_table_args = True else: - # skip all other dunder names, which at the moment - # should only be __table__ - continue + # any other dunder names; should not be here + # as we have tested for all four names in + # _include_dunders + assert False elif class_mapped: if _is_declarative_props(obj) and not obj._quiet: util.warn( @@ -1031,6 +1049,7 @@ def _mapper_args_fn() -> Dict[str, Any]: self.table_args = table_args self.tablename = tablename self.mapper_args_fn = mapper_args_fn + self.table_fn = table_fn def _setup_dataclasses_transforms(self) -> None: dataclass_setup_arguments = self.dataclass_setup_arguments @@ -1687,7 +1706,11 @@ def _setup_table(self, table: Optional[FromClause] = None) -> None: manager = attributes.manager_of_class(cls) - if "__table__" not in clsdict_view and table is None: + if ( + self.table_fn is None + and "__table__" not in clsdict_view + and table is None + ): if hasattr(cls, "__table_cls__"): table_cls = cast( Type[Table], @@ -1733,7 +1756,12 @@ def _setup_table(self, table: Optional[FromClause] = None) -> None: ) else: if table is None: - table = cls_as_Decl.__table__ + if self.table_fn: + table = self.set_cls_attribute( + "__table__", self.table_fn() + ) + else: + table = cls_as_Decl.__table__ if declared_columns: for c in declared_columns: if not table.c.contains_column(c): diff --git a/test/orm/declarative/test_mixin.py b/test/orm/declarative/test_mixin.py index 2520eb846d7..d670e96dcbf 100644 --- a/test/orm/declarative/test_mixin.py +++ b/test/orm/declarative/test_mixin.py @@ -7,6 +7,7 @@ from sqlalchemy import func from sqlalchemy import Integer from sqlalchemy import MetaData +from sqlalchemy import schema from sqlalchemy import select from sqlalchemy import String from sqlalchemy import testing @@ -98,6 +99,159 @@ class Foo(Base): self.assert_compile(select(Foo), "SELECT foo.name, foo.id FROM foo") + @testing.variation("base_type", ["generate_base", "subclass"]) + @testing.variation("attrname", ["table", "tablename"]) + @testing.variation("position", ["base", "abstract"]) + @testing.variation("assert_no_extra_cols", [True, False]) + def test_declared_attr_on_base( + self, registry, base_type, attrname, position, assert_no_extra_cols + ): + """test #11509""" + + if position.abstract: + if base_type.generate_base: + SuperBase = registry.generate_base() + + class Base(SuperBase): + __abstract__ = True + if attrname.table: + + @declared_attr.directive + def __table__(cls): + return Table( + cls.__name__, + cls.registry.metadata, + Column("id", Integer, primary_key=True), + ) + + elif attrname.tablename: + + @declared_attr.directive + def __tablename__(cls): + return cls.__name__ + + else: + attrname.fail() + + elif base_type.subclass: + + class SuperBase(DeclarativeBase): + pass + + class Base(SuperBase): + __abstract__ = True + if attrname.table: + + @declared_attr.directive + def __table__(cls): + return Table( + cls.__name__, + cls.registry.metadata, + Column("id", Integer, primary_key=True), + ) + + elif attrname.tablename: + + @declared_attr.directive + def __tablename__(cls): + return cls.__name__ + + else: + attrname.fail() + + else: + base_type.fail() + else: + if base_type.generate_base: + + class Base: + if attrname.table: + + @declared_attr.directive + def __table__(cls): + return Table( + cls.__name__, + cls.registry.metadata, + Column("id", Integer, primary_key=True), + ) + + elif attrname.tablename: + + @declared_attr.directive + def __tablename__(cls): + return cls.__name__ + + else: + attrname.fail() + + Base = registry.generate_base(cls=Base) + elif base_type.subclass: + + class Base(DeclarativeBase): + if attrname.table: + + @declared_attr.directive + def __table__(cls): + return Table( + cls.__name__, + cls.registry.metadata, + Column("id", Integer, primary_key=True), + ) + + elif attrname.tablename: + + @declared_attr.directive + def __tablename__(cls): + return cls.__name__ + + else: + attrname.fail() + + else: + base_type.fail() + + if attrname.table and assert_no_extra_cols: + with expect_raises_message( + sa.exc.ArgumentError, + "Can't add additional column 'data' when specifying __table__", + ): + + class MyNopeClass(Base): + data = Column(String) + + return + + class MyClass(Base): + if attrname.tablename: + id = Column(Integer, primary_key=True) # noqa: A001 + + class MyOtherClass(Base): + if attrname.tablename: + id = Column(Integer, primary_key=True) # noqa: A001 + + t = Table( + "my_override", + Base.metadata, + Column("id", Integer, primary_key=True), + ) + + class MyOverrideClass(Base): + __table__ = t + + Base.registry.configure() + + # __table__ was assigned + assert isinstance(MyClass.__dict__["__table__"], schema.Table) + assert isinstance(MyOtherClass.__dict__["__table__"], schema.Table) + + eq_(MyClass.__table__.name, "MyClass") + eq_(MyClass.__table__.c.keys(), ["id"]) + + eq_(MyOtherClass.__table__.name, "MyOtherClass") + eq_(MyOtherClass.__table__.c.keys(), ["id"]) + + is_(MyOverrideClass.__table__, t) + def test_simple_wbase(self): class MyMixin: id = Column( From a6f2c4eb9486087b5e53000e118e2a489f7ecae1 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 27 Mar 2024 22:00:00 +0100 Subject: [PATCH 231/544] Improve the documentation of json.as method Mention that these method are more like ``type_coerce`` than ``cast``. Fixes: #11065 Change-Id: Ia5bd4f6d5f48be9557d0504f628202e1e6ddf6d1 (cherry picked from commit 312f2e017dfcd9f4d9132e76705bd8420a130fb4) --- lib/sqlalchemy/sql/sqltypes.py | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 1af3c5e339f..c1c2b1159aa 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -2516,7 +2516,10 @@ def _setup_getitem(self, index): return operator, index, self.type def as_boolean(self): - """Cast an indexed value as boolean. + """Consider an indexed value as boolean. + + This is similar to using :class:`_sql.type_coerce`, and will + usually not apply a ``CAST()``. e.g.:: @@ -2532,7 +2535,10 @@ def as_boolean(self): return self._binary_w_type(Boolean(), "as_boolean") def as_string(self): - """Cast an indexed value as string. + """Consider an indexed value as string. + + This is similar to using :class:`_sql.type_coerce`, and will + usually not apply a ``CAST()``. e.g.:: @@ -2549,7 +2555,10 @@ def as_string(self): return self._binary_w_type(Unicode(), "as_string") def as_integer(self): - """Cast an indexed value as integer. + """Consider an indexed value as integer. + + This is similar to using :class:`_sql.type_coerce`, and will + usually not apply a ``CAST()``. e.g.:: @@ -2565,7 +2574,10 @@ def as_integer(self): return self._binary_w_type(Integer(), "as_integer") def as_float(self): - """Cast an indexed value as float. + """Consider an indexed value as float. + + This is similar to using :class:`_sql.type_coerce`, and will + usually not apply a ``CAST()``. e.g.:: @@ -2581,7 +2593,10 @@ def as_float(self): return self._binary_w_type(Float(), "as_float") def as_numeric(self, precision, scale, asdecimal=True): - """Cast an indexed value as numeric/decimal. + """Consider an indexed value as numeric/decimal. + + This is similar to using :class:`_sql.type_coerce`, and will + usually not apply a ``CAST()``. e.g.:: @@ -2600,7 +2615,10 @@ def as_numeric(self, precision, scale, asdecimal=True): ) def as_json(self): - """Cast an indexed value as JSON. + """Consider an indexed value as JSON. + + This is similar to using :class:`_sql.type_coerce`, and will + usually not apply a ``CAST()``. e.g.:: From 2ef2c65db914c42cda6a79cc21b6acbe7550c7d8 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 18 Jun 2024 15:04:17 -0400 Subject: [PATCH 232/544] - 2.0.31 --- doc/build/changelog/changelog_20.rst | 98 ++++++++++++++++++++- doc/build/changelog/unreleased_20/11285.rst | 7 -- doc/build/changelog/unreleased_20/11361.rst | 6 -- doc/build/changelog/unreleased_20/11365.rst | 9 -- doc/build/changelog/unreleased_20/11374.rst | 7 -- doc/build/changelog/unreleased_20/11417.rst | 7 -- doc/build/changelog/unreleased_20/11422.rst | 6 -- doc/build/changelog/unreleased_20/11423.rst | 6 -- doc/build/changelog/unreleased_20/11426.rst | 8 -- doc/build/changelog/unreleased_20/11446.rst | 8 -- doc/build/changelog/unreleased_20/11449.rst | 12 --- doc/build/changelog/unreleased_20/11509.rst | 9 -- doc/build/conf.py | 4 +- 13 files changed, 99 insertions(+), 88 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11285.rst delete mode 100644 doc/build/changelog/unreleased_20/11361.rst delete mode 100644 doc/build/changelog/unreleased_20/11365.rst delete mode 100644 doc/build/changelog/unreleased_20/11374.rst delete mode 100644 doc/build/changelog/unreleased_20/11417.rst delete mode 100644 doc/build/changelog/unreleased_20/11422.rst delete mode 100644 doc/build/changelog/unreleased_20/11423.rst delete mode 100644 doc/build/changelog/unreleased_20/11426.rst delete mode 100644 doc/build/changelog/unreleased_20/11446.rst delete mode 100644 doc/build/changelog/unreleased_20/11449.rst delete mode 100644 doc/build/changelog/unreleased_20/11509.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index b0194baa5b8..44a9480d410 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,103 @@ .. changelog:: :version: 2.0.31 - :include_notes_from: unreleased_20 + :released: June 18, 2024 + + .. change:: + :tags: usecase, reflection, mysql + :tickets: 11285 + + Added missing foreign key reflection option ``SET DEFAULT`` + in the MySQL and MariaDB dialects. + Pull request courtesy of Quentin Roche. + + .. change:: + :tags: usecase, orm + :tickets: 11361 + + Added missing parameter :paramref:`_orm.with_polymorphic.name` that + allows specifying the name of returned :class:`_orm.AliasedClass`. + + .. change:: + :tags: bug, orm + :tickets: 11365 + + Fixed issue where a :class:`.MetaData` collection would not be + serializable, if an :class:`.Enum` or :class:`.Boolean` datatype were + present which had been adapted. This specific scenario in turn could occur + when using the :class:`.Enum` or :class:`.Boolean` within ORM Annotated + Declarative form where type objects frequently get copied. + + .. change:: + :tags: schema, usecase + :tickets: 11374 + + Added :paramref:`_schema.Column.insert_default` as an alias of + :paramref:`_schema.Column.default` for compatibility with + :func:`_orm.mapped_column`. + + .. change:: + :tags: bug, general + :tickets: 11417 + + Set up full Python 3.13 support to the extent currently possible, repairing + issues within internal language helpers as well as the serializer extension + module. + + .. change:: + :tags: bug, sql + :tickets: 11422 + + Fixed issue when serializing an :func:`_sql.over` clause with + unbounded range or rows. + + .. change:: + :tags: bug, sql + :tickets: 11423 + + Added missing methods :meth:`_sql.FunctionFilter.within_group` + and :meth:`_sql.WithinGroup.filter` + + .. change:: + :tags: bug, sql + :tickets: 11426 + + Fixed bug in :meth:`_sql.FunctionFilter.filter` that would mutate + the existing function in-place. It now behaves like the rest of the + SQLAlchemy API, returning a new instance instead of mutating the + original one. + + .. change:: + :tags: bug, orm + :tickets: 11446 + + Fixed issue where the :func:`_orm.selectinload` and + :func:`_orm.subqueryload` loader options would fail to take effect when + made against an inherited subclass that itself included a subclass-specific + :paramref:`_orm.Mapper.with_polymorphic` setting. + + .. change:: + :tags: bug, orm + :tickets: 11449 + + Fixed very old issue involving the :paramref:`_orm.joinedload.innerjoin` + parameter where making use of this parameter mixed into a query that also + included joined eager loads along a self-referential or other cyclical + relationship, along with complicating factors like inner joins added for + secondary tables and such, would have the chance of splicing a particular + inner join to the wrong part of the query. Additional state has been added + to the internal method that does this splice to make a better decision as + to where splicing should proceed. + + .. change:: + :tags: bug, orm, regression + :tickets: 11509 + + Fixed bug in ORM Declarative where the ``__table__`` directive could not be + declared as a class function with :func:`_orm.declared_attr` on a + superclass, including an ``__abstract__`` class as well as coming from the + declarative base itself. This was a regression since 1.4 where this was + working, and there were apparently no tests for this particular use case. .. changelog:: :version: 2.0.30 diff --git a/doc/build/changelog/unreleased_20/11285.rst b/doc/build/changelog/unreleased_20/11285.rst deleted file mode 100644 index a965799c172..00000000000 --- a/doc/build/changelog/unreleased_20/11285.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: usecase, reflection, mysql - :tickets: 11285 - - Added missing foreign key reflection option ``SET DEFAULT`` - in the MySQL and MariaDB dialects. - Pull request courtesy of Quentin Roche. diff --git a/doc/build/changelog/unreleased_20/11361.rst b/doc/build/changelog/unreleased_20/11361.rst deleted file mode 100644 index bd9fe1d3ff4..00000000000 --- a/doc/build/changelog/unreleased_20/11361.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: usecase, orm - :tickets: 11361 - - Added missing parameter :paramref:`_orm.with_polymorphic.name` that - allows specifying the name of returned :class:`_orm.AliasedClass`. diff --git a/doc/build/changelog/unreleased_20/11365.rst b/doc/build/changelog/unreleased_20/11365.rst deleted file mode 100644 index d2b353e9123..00000000000 --- a/doc/build/changelog/unreleased_20/11365.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11365 - - Fixed issue where a :class:`.MetaData` collection would not be - serializable, if an :class:`.Enum` or :class:`.Boolean` datatype were - present which had been adapted. This specific scenario in turn could occur - when using the :class:`.Enum` or :class:`.Boolean` within ORM Annotated - Declarative form where type objects frequently get copied. diff --git a/doc/build/changelog/unreleased_20/11374.rst b/doc/build/changelog/unreleased_20/11374.rst deleted file mode 100644 index d52da2e7670..00000000000 --- a/doc/build/changelog/unreleased_20/11374.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: schema, usecase - :tickets: 11374 - - Added :paramref:`_schema.Column.insert_default` as an alias of - :paramref:`_schema.Column.default` for compatibility with - :func:`_orm.mapped_column`. diff --git a/doc/build/changelog/unreleased_20/11417.rst b/doc/build/changelog/unreleased_20/11417.rst deleted file mode 100644 index 8e27d059237..00000000000 --- a/doc/build/changelog/unreleased_20/11417.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, general - :tickets: 11417 - - Set up full Python 3.13 support to the extent currently possible, repairing - issues within internal language helpers as well as the serializer extension - module. diff --git a/doc/build/changelog/unreleased_20/11422.rst b/doc/build/changelog/unreleased_20/11422.rst deleted file mode 100644 index bde78793382..00000000000 --- a/doc/build/changelog/unreleased_20/11422.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 11422 - - Fixed issue when serializing an :func:`_sql.over` clause with - unbounded range or rows. diff --git a/doc/build/changelog/unreleased_20/11423.rst b/doc/build/changelog/unreleased_20/11423.rst deleted file mode 100644 index ed6f988460e..00000000000 --- a/doc/build/changelog/unreleased_20/11423.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 11423 - - Added missing methods :meth:`_sql.FunctionFilter.within_group` - and :meth:`_sql.WithinGroup.filter` diff --git a/doc/build/changelog/unreleased_20/11426.rst b/doc/build/changelog/unreleased_20/11426.rst deleted file mode 100644 index c9018b02f45..00000000000 --- a/doc/build/changelog/unreleased_20/11426.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 11426 - - Fixed bug in :meth:`_sql.FunctionFilter.filter` that would mutate - the existing function in-place. It now behaves like the rest of the - SQLAlchemy API, returning a new instance instead of mutating the - original one. diff --git a/doc/build/changelog/unreleased_20/11446.rst b/doc/build/changelog/unreleased_20/11446.rst deleted file mode 100644 index 747230b869f..00000000000 --- a/doc/build/changelog/unreleased_20/11446.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11446 - - Fixed issue where the :func:`_orm.selectinload` and - :func:`_orm.subqueryload` loader options would fail to take effect when - made against an inherited subclass that itself included a subclass-specific - :paramref:`_orm.Mapper.with_polymorphic` setting. diff --git a/doc/build/changelog/unreleased_20/11449.rst b/doc/build/changelog/unreleased_20/11449.rst deleted file mode 100644 index f7974cfd76f..00000000000 --- a/doc/build/changelog/unreleased_20/11449.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11449 - - Fixed very old issue involving the :paramref:`_orm.joinedload.innerjoin` - parameter where making use of this parameter mixed into a query that also - included joined eager loads along a self-referential or other cyclical - relationship, along with complicating factors like inner joins added for - secondary tables and such, would have the chance of splicing a particular - inner join to the wrong part of the query. Additional state has been added - to the internal method that does this splice to make a better decision as - to where splicing should proceed. diff --git a/doc/build/changelog/unreleased_20/11509.rst b/doc/build/changelog/unreleased_20/11509.rst deleted file mode 100644 index 1761c2bf7ad..00000000000 --- a/doc/build/changelog/unreleased_20/11509.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm, regression - :tickets: 11509 - - Fixed bug in ORM Declarative where the ``__table__`` directive could not be - declared as a class function with :func:`_orm.declared_attr` on a - superclass, including an ``__abstract__`` class as well as coming from the - declarative base itself. This was a regression since 1.4 where this was - working, and there were apparently no tests for this particular use case. diff --git a/doc/build/conf.py b/doc/build/conf.py index 9193ba8ac71..054fe873d85 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.30" +release = "2.0.31" -release_date = "May 5, 2024" +release_date = "June 18, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 03a2bc7e8b0a1e323f5deb2305d09071b36dc953 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 18 Jun 2024 17:39:11 -0400 Subject: [PATCH 233/544] Version 2.0.32 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 44a9480d410..ec885b1a488 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.32 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.31 :released: June 18, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 0751c2482ff..b15c99062a5 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.31" +__version__ = "2.0.32" def __go(lcls: Any) -> None: From 9553604fcc80fa175832433f4eff657ac95ae058 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 19 Jun 2024 11:44:54 -0400 Subject: [PATCH 234/544] pin setuptools below 69.3 and prepare for "build" for releases Change-Id: Ib70446cc3c7d7d8acb264ffa2237a0c7aac5a0f5 (cherry picked from commit f6283dd6d902fd0d8b5a7ecc6c37c4ebde4d93f3) --- pyproject.toml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 780c9bf689a..0f53594dfaf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,11 @@ [build-system] - build-backend = "setuptools.build_meta" - requires = [ - "setuptools>=47", - "cython>=0.29.24; platform_python_implementation == 'CPython'", # Skip cython when using pypy - ] +build-backend = "setuptools.build_meta" +requires = [ + # avoid moving to https://github.com/pypa/setuptools/issues/3593 + # until we're ready + "setuptools>=61.0,<69.3", + "cython>=0.29.24; platform_python_implementation == 'CPython'", # Skip cython when using pypy +] [tool.black] line-length = 79 From 9524e4bffc9c8545fdb8698ef029c420374ac00f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 19 Jun 2024 11:03:25 -0400 Subject: [PATCH 235/544] use literal execute for SQL Server frame parameters Fixed issue where SQL Server drivers don't support bound parameters when rendering the "frame specification" for a window function, e.g. "ROWS BETWEEN", etc. Fixes: #11514 Change-Id: I0664f4076a2a8266434a4670949b8b44cd261f44 (cherry picked from commit c088b6426f1d73efe7de3e42b3e86f8027076bc3) --- doc/build/changelog/unreleased_14/11514.rst | 8 ++++ lib/sqlalchemy/dialects/mssql/base.py | 4 ++ lib/sqlalchemy/testing/suite/test_select.py | 51 +++++++++++++++++++++ 3 files changed, 63 insertions(+) create mode 100644 doc/build/changelog/unreleased_14/11514.rst diff --git a/doc/build/changelog/unreleased_14/11514.rst b/doc/build/changelog/unreleased_14/11514.rst new file mode 100644 index 00000000000..81f0ddeddc0 --- /dev/null +++ b/doc/build/changelog/unreleased_14/11514.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, mssql + :tickets: 11514 + + Fixed issue where SQL Server drivers don't support bound parameters when + rendering the "frame specification" for a window function, e.g. "ROWS + BETWEEN", etc. + diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 872f8584da4..ddee9a5a739 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1988,6 +1988,10 @@ def __init__(self, *args, **kwargs): self.tablealiases = {} super().__init__(*args, **kwargs) + def _format_frame_clause(self, range_, **kw): + kw["literal_execute"] = True + return super()._format_frame_clause(range_, **kw) + def _with_legacy_schema_aliasing(fn): def decorate(self, *arg, **kw): if self.dialect.legacy_schema_aliasing: diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index 866bf09cb5d..c31613fcf58 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -1886,3 +1886,54 @@ def test_is_or_is_not_distinct_from( len(result), expected_row_count_for_is_not, ) + + +class WindowFunctionTest(fixtures.TablesTest): + __requires__ = ("window_functions",) + + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table( + "some_table", + metadata, + Column("id", Integer, primary_key=True), + Column("col1", Integer), + Column("col2", Integer), + ) + + @classmethod + def insert_data(cls, connection): + connection.execute( + cls.tables.some_table.insert(), + [{"id": i, "col1": i, "col2": i * 5} for i in range(1, 50)], + ) + + def test_window(self, connection): + some_table = self.tables.some_table + rows = connection.execute( + select( + func.max(some_table.c.col2).over( + order_by=[some_table.c.col1.desc()] + ) + ).where(some_table.c.col1 < 20) + ).all() + + eq_(rows, [(95,) for i in range(19)]) + + def test_window_rows_between(self, connection): + some_table = self.tables.some_table + + # note the rows are part of the cache key right now, not handled + # as binds. this is issue #11515 + rows = connection.execute( + select( + func.max(some_table.c.col2).over( + order_by=[some_table.c.col1], + rows=(-5, 0), + ) + ) + ).all() + + eq_(rows, [(i,) for i in range(5, 250, 5)]) From d482abbd80cf3347ccad12098d30f5d264889829 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 23 Jun 2024 12:08:31 +0200 Subject: [PATCH 236/544] Bump pypa/cibuildwheel from 2.19.0 to 2.19.1 (#11491) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.19.0 to 2.19.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.19.0...v2.19.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit a98eed3c6288b197a2d1f26daaac1a8bc194f81e) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 8601af8e46f..0e7c593ca1a 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.19.0 + uses: pypa/cibuildwheel@v2.19.1 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From b685bc7d211515711a3885fcdac4654de16db407 Mon Sep 17 00:00:00 2001 From: Andreas Motl Date: Sun, 23 Jun 2024 12:14:15 +0200 Subject: [PATCH 237/544] Documentation: Update package name for CrateDB dialect (#11503) The CrateDB SQLAlchemy dialect needs more love, so it was separated from the DBAPI HTTP driver. The new canonical package for the SQLAlchemy CrateDB dialect on PyPI is: https://pypi.org/project/sqlalchemy-cratedb/ (cherry picked from commit d4b28a4409ca233039896225f4e882a8b07e5b56) --- doc/build/dialects/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index d065bcf5b34..564656ec513 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -79,7 +79,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | CockroachDB | sqlalchemy-cockroachdb_ | +------------------------------------------------+---------------------------------------+ -| CrateDB | crate-python_ | +| CrateDB | sqlalchemy-cratedb_ | +------------------------------------------------+---------------------------------------+ | Databend | databend-sqlalchemy_ | +------------------------------------------------+---------------------------------------+ @@ -150,7 +150,7 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _sqlalchemy-monetdb: https://github.com/gijzelaerr/sqlalchemy-monetdb .. _snowflake-sqlalchemy: https://github.com/snowflakedb/snowflake-sqlalchemy .. _sqlalchemy-tds: https://github.com/m32/sqlalchemy-tds -.. _crate-python: https://github.com/crate/crate-python +.. _sqlalchemy-cratedb: https://github.com/crate/sqlalchemy-cratedb .. _sqlalchemy-access: https://pypi.org/project/sqlalchemy-access/ .. _elasticsearch-dbapi: https://github.com/preset-io/elasticsearch-dbapi/ .. _pydruid: https://github.com/druid-io/pydruid From 8bb309b75f2d021f2db1d9fa73ee35a444c06978 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 23 Jun 2024 10:18:47 -0400 Subject: [PATCH 238/544] fix default label style doc Change-Id: I793f7b62c6c0b551ab1957cabcff685885b6e51c (cherry picked from commit c43238252f96a1f9370d1bc7ff440897b751b2b8) --- lib/sqlalchemy/sql/selectable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index c6e598504b3..a178458480b 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -3856,7 +3856,7 @@ def set_label_style(self, style: SelectLabelStyle) -> Self: :attr:`_sql.SelectLabelStyle.LABEL_STYLE_DISAMBIGUATE_ONLY`, :attr:`_sql.SelectLabelStyle.LABEL_STYLE_TABLENAME_PLUS_COL`, and :attr:`_sql.SelectLabelStyle.LABEL_STYLE_NONE`. The default style is - :attr:`_sql.SelectLabelStyle.LABEL_STYLE_TABLENAME_PLUS_COL`. + :attr:`_sql.SelectLabelStyle.LABEL_STYLE_DISAMBIGUATE_ONLY`. In modern SQLAlchemy, there is not generally a need to change the labeling style, as per-expression labels are more effectively used by From ac9047ec79e9c4ac5efebe83753c51f8165800bf Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 24 Jun 2024 15:07:41 -0400 Subject: [PATCH 239/544] create JoinedDispatcher subclasses up front Fixed additional issues in the event system triggered by unpickling of a :class:`.Enum` datatype, continuing from :ticket:`11365` and :ticket:`11360`, where dynamically generated elements of the event structure would not be present when unpickling in a new process. Fixes: #11530 Change-Id: Ie1f2b3453d4891051f8719f6d3f6703302d5a86e (cherry picked from commit dffd96e7545348d6d1830cdfc4fcf231237010d2) --- doc/build/changelog/unreleased_20/11530.rst | 8 ++ lib/sqlalchemy/event/base.py | 91 +++++++++++---------- test/sql/test_types.py | 57 +++++++++++++ 3 files changed, 111 insertions(+), 45 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11530.rst diff --git a/doc/build/changelog/unreleased_20/11530.rst b/doc/build/changelog/unreleased_20/11530.rst new file mode 100644 index 00000000000..30c60cd1524 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11530.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, events + :tickets: 11530 + + Fixed additional issues in the event system triggered by unpickling of a + :class:`.Enum` datatype, continuing from :ticket:`11365` and + :ticket:`11360`, where dynamically generated elements of the event + structure would not be present when unpickling in a new process. diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py index 434886316f0..cddfc982a6c 100644 --- a/lib/sqlalchemy/event/base.py +++ b/lib/sqlalchemy/event/base.py @@ -191,16 +191,7 @@ def _join(self, other: _DispatchCommon[_ET]) -> _JoinedDispatcher[_ET]: :class:`._Dispatch` objects. """ - if "_joined_dispatch_cls" not in self.__class__.__dict__: - cls = type( - "Joined%s" % self.__class__.__name__, - (_JoinedDispatcher,), - {"__slots__": self._event_names}, - ) - self.__class__._joined_dispatch_cls = cls - - # establish pickle capability by adding it to this module - globals()[cls.__name__] = cls + assert "_joined_dispatch_cls" in self.__class__.__dict__ return self._joined_dispatch_cls(self, other) @@ -332,6 +323,51 @@ def _create_dispatcher_class( else: dispatch_target_cls.dispatch = dispatcher(cls) + klass = type( + "Joined%s" % dispatch_cls.__name__, + (_JoinedDispatcher,), + {"__slots__": event_names}, + ) + dispatch_cls._joined_dispatch_cls = klass + + # establish pickle capability by adding it to this module + globals()[klass.__name__] = klass + + +class _JoinedDispatcher(_DispatchCommon[_ET]): + """Represent a connection between two _Dispatch objects.""" + + __slots__ = "local", "parent", "_instance_cls" + + local: _DispatchCommon[_ET] + parent: _DispatchCommon[_ET] + _instance_cls: Optional[Type[_ET]] + + def __init__( + self, local: _DispatchCommon[_ET], parent: _DispatchCommon[_ET] + ): + self.local = local + self.parent = parent + self._instance_cls = self.local._instance_cls + + def __reduce__(self) -> Any: + return (self.__class__, (self.local, self.parent)) + + def __getattr__(self, name: str) -> _JoinedListener[_ET]: + # Assign _JoinedListeners as attributes on demand + # to reduce startup time for new dispatch objects. + ls = getattr(self.local, name) + jl = _JoinedListener(self.parent, ls.name, ls) + setattr(self, ls.name, jl) + return jl + + def _listen(self, event_key: _EventKey[_ET], **kw: Any) -> None: + return self.parent._listen(event_key, **kw) + + @property + def _events(self) -> Type[_HasEventsDispatch[_ET]]: + return self.parent._events + class Events(_HasEventsDispatch[_ET]): """Define event listening functions for a particular target type.""" @@ -386,41 +422,6 @@ def _clear(cls) -> None: cls.dispatch._clear() -class _JoinedDispatcher(_DispatchCommon[_ET]): - """Represent a connection between two _Dispatch objects.""" - - __slots__ = "local", "parent", "_instance_cls" - - local: _DispatchCommon[_ET] - parent: _DispatchCommon[_ET] - _instance_cls: Optional[Type[_ET]] - - def __init__( - self, local: _DispatchCommon[_ET], parent: _DispatchCommon[_ET] - ): - self.local = local - self.parent = parent - self._instance_cls = self.local._instance_cls - - def __reduce__(self) -> Any: - return (self.__class__, (self.local, self.parent)) - - def __getattr__(self, name: str) -> _JoinedListener[_ET]: - # Assign _JoinedListeners as attributes on demand - # to reduce startup time for new dispatch objects. - ls = getattr(self.local, name) - jl = _JoinedListener(self.parent, ls.name, ls) - setattr(self, ls.name, jl) - return jl - - def _listen(self, event_key: _EventKey[_ET], **kw: Any) -> None: - return self.parent._listen(event_key, **kw) - - @property - def _events(self) -> Type[_HasEventsDispatch[_ET]]: - return self.parent._events - - class dispatcher(Generic[_ET]): """Descriptor used by target classes to deliver the _Dispatch class at the class level diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 5214ebac53c..36c6a74c27e 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -3,6 +3,10 @@ import importlib import operator import os +import pickle +import subprocess +import sys +from tempfile import mkstemp import sqlalchemy as sa from sqlalchemy import and_ @@ -531,6 +535,59 @@ def test_pickle_types(self, name, type_, use_adapt): loads(dumps(column_type)) loads(dumps(meta)) + @testing.combinations( + ("Str", String()), + ("Tex", Text()), + ("Uni", Unicode()), + ("Boo", Boolean()), + ("Dat", DateTime()), + ("Dat", Date()), + ("Tim", Time()), + ("Lar", LargeBinary()), + ("Pic", PickleType()), + ("Int", Interval()), + ("Enu", Enum("one", "two", "three")), + argnames="name,type_", + id_="ar", + ) + @testing.variation("use_adapt", [True, False]) + def test_pickle_types_other_process(self, name, type_, use_adapt): + """test for #11530 + + this does a full exec of python interpreter so the number of variations + here is reduced to just a single pickler, else each case takes + a full second. + + """ + + if use_adapt: + type_ = type_.copy() + + column_type = Column(name, type_) + meta = MetaData() + Table("foo", meta, column_type) + + for target in column_type, meta: + f, name = mkstemp("pkl") + with os.fdopen(f, "wb") as f: + pickle.dump(target, f) + + name = name.replace(os.sep, "/") + code = ( + "import sqlalchemy; import pickle; " + f"pickle.load(open('''{name}''', 'rb'))" + ) + parts = list(sys.path) + if os.environ.get("PYTHONPATH"): + parts.append(os.environ["PYTHONPATH"]) + pythonpath = os.pathsep.join(parts) + proc = subprocess.run( + [sys.executable, "-c", code], + env={**os.environ, "PYTHONPATH": pythonpath}, + ) + eq_(proc.returncode, 0) + os.unlink(name) + class _UserDefinedTypeFixture: @classmethod From 673152560e53d9f60ab97b1cdb15fa52e01a8831 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Fri, 15 Mar 2024 19:15:27 +0100 Subject: [PATCH 240/544] Warn if an engine bind left an open transaction Added a warning noting when an :meth:`_engine.ConnectionEvents.engine_connect` event may be leaving a transaction open, which can alter the behavior of a class:`_orm.Session` using such an engine as bind. On SQLAlchemy 2.1 :paramref:`_orm.Session.join_transaction_mode` will instead be ignored in all cases when the session bind is an :class:`_engine.Engine`. Fixes: #11163 Change-Id: I10147876d07352f2dab898d615e98a9acd6eb91b (cherry picked from commit 69a2d0903e427e99acceedcd4e29a17d0b012bbe) --- doc/build/changelog/unreleased_20/11163.rst | 11 ++++++ lib/sqlalchemy/orm/session.py | 11 ++++++ test/orm/test_transaction.py | 43 +++++++++++++++++++-- 3 files changed, 62 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11163.rst diff --git a/doc/build/changelog/unreleased_20/11163.rst b/doc/build/changelog/unreleased_20/11163.rst new file mode 100644 index 00000000000..da21b45378a --- /dev/null +++ b/doc/build/changelog/unreleased_20/11163.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: orm + :tickets: 11163 + + Added a warning noting when an + :meth:`_engine.ConnectionEvents.engine_connect` event may be leaving + a transaction open, which can alter the behavior of a + :class:`_orm.Session` using such an engine as bind. + On SQLAlchemy 2.1 :paramref:`_orm.Session.join_transaction_mode` will + instead be ignored in all cases when the session bind is + an :class:`_engine.Engine`. diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index c455ffac930..eb81f16e01e 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -1211,6 +1211,17 @@ def _connection_for_bind( else: join_transaction_mode = "rollback_only" + if local_connect: + util.warn( + "The engine provided as bind produced a " + "connection that is already in a transaction. " + "This is usually caused by a core event, " + "such as 'engine_connect', that has left a " + "transaction open. The effective join " + "transaction mode used by this session is " + f"{join_transaction_mode!r}. To silence this " + "warning, do not leave transactions open" + ) if join_transaction_mode in ( "control_fully", "rollback_only", diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py index e502a888330..47bcf69b571 100644 --- a/test/orm/test_transaction.py +++ b/test/orm/test_transaction.py @@ -108,7 +108,7 @@ def test_external_nested_transaction(self, connection_no_trans): trans.commit() assert len(sess.query(User).all()) == 1 - @testing.variation( + join_transaction_mode = testing.variation( "join_transaction_mode", [ "none", @@ -118,6 +118,8 @@ def test_external_nested_transaction(self, connection_no_trans): "rollback_only", ], ) + + @join_transaction_mode @testing.variation("operation", ["commit", "close", "rollback", "nothing"]) @testing.variation("external_state", ["none", "transaction", "savepoint"]) def test_join_transaction_modes( @@ -243,6 +245,36 @@ def test_join_transaction_modes( else: external_state.fail() + @join_transaction_mode + def test_join_transaction_mode_with_event(self, join_transaction_mode): + eng = engines.testing_engine() + + @event.listens_for(eng, "engine_connect") + def make_transaction(conn): + conn.begin() + + if join_transaction_mode.none: + s = Session(eng) + else: + s = Session(eng, join_transaction_mode=join_transaction_mode.name) + if ( + join_transaction_mode.none + or join_transaction_mode.conditional_savepoint + ): + with expect_warnings( + "The engine provided as bind produced a " + "connection that is already in a transaction. " + "This is usually caused by a core event, " + "such as 'engine_connect', that has left a " + "transaction open. The effective join " + "transaction mode used by this session is " + "'rollback_only'. To silence this " + "warning, do not leave transactions open" + ): + s.connection() + else: + s.connection() + def test_subtransaction_on_external_commit(self, connection_no_trans): users, User = self.tables.users, self.classes.User @@ -839,7 +871,10 @@ def test_execution_options_begin_transaction(self): return_value=mock.Mock( _is_future=False, execution_options=mock.Mock( - return_value=mock.Mock(_is_future=False) + return_value=mock.Mock( + _is_future=False, + in_transaction=mock.Mock(return_value=False), + ) ), ) ) @@ -857,7 +892,9 @@ def test_execution_options_begin_transaction(self): def test_execution_options_ignored_mid_transaction(self): bind = mock.Mock() - conn = mock.Mock(engine=bind) + conn = mock.Mock( + engine=bind, in_transaction=mock.Mock(return_value=False) + ) bind.connect = mock.Mock(return_value=conn) sess = Session(bind=bind) sess.execute(text("select 1")) From 860a3969e810546447a7cfff40209006f81cb98f Mon Sep 17 00:00:00 2001 From: Wouter Kayser Date: Sun, 23 Jun 2024 06:18:05 -0400 Subject: [PATCH 241/544] set type of type_of to be same as input argument Fixes: #11371 Fixes the of_type method so that it does not return a class with unset generic. See the original issue for a more detailed explanation. Closes: #11416 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11416 Pull-request-sha: ed8d0edebb6b84b9dcffcf24c52f113c37e7fedd Change-Id: I35637491d6d9c573825f6d13299712626dd521c5 (cherry picked from commit 7d8dfa10df3be8d138dd954708efca7d6ed0e503) --- lib/sqlalchemy/orm/attributes.py | 2 +- test/typing/plain_files/orm/relationship.py | 27 +++++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 5b16ce3d6b3..33cca564927 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -401,7 +401,7 @@ def adapt_to_entity(self, adapt_to_entity: AliasedInsp[Any]) -> Self: parententity=adapt_to_entity, ) - def of_type(self, entity: _EntityType[Any]) -> QueryableAttribute[_T]: + def of_type(self, entity: _EntityType[_T]) -> QueryableAttribute[_T]: return QueryableAttribute( self.class_, self.key, diff --git a/test/typing/plain_files/orm/relationship.py b/test/typing/plain_files/orm/relationship.py index 5caf57de7bd..683e347f19f 100644 --- a/test/typing/plain_files/orm/relationship.py +++ b/test/typing/plain_files/orm/relationship.py @@ -106,6 +106,30 @@ class SelfReferential(Base): ) +class Employee(Base): + __tablename__ = "employee" + id: Mapped[int] = mapped_column(primary_key=True) + team_id: Mapped[int] = mapped_column(ForeignKey("team.id")) + team: Mapped["Team"] = relationship(back_populates="employees") + + __mapper_args__ = { + "polymorphic_on": "type", + "polymorphic_identity": "employee", + } + + +class Team(Base): + __tablename__ = "team" + id: Mapped[int] = mapped_column(primary_key=True) + employees: Mapped[list[Employee]] = relationship("Employee") + + +class Engineer(Employee): + engineer_info: Mapped[str] + + __mapper_args__ = {"polymorphic_identity": "engineer"} + + if typing.TYPE_CHECKING: # EXPECTED_RE_TYPE: sqlalchemy.*.InstrumentedAttribute\[Union\[builtins.str, None\]\] reveal_type(User.extra) @@ -137,6 +161,9 @@ class SelfReferential(Base): # EXPECTED_RE_TYPE: sqlalchemy.*.InstrumentedAttribute\[builtins.set\*?\[relationship.MoreMail\]\] reveal_type(Address.rel_style_one_anno_only) + # EXPECTED_RE_TYPE: sqlalchemy.*.QueryableAttribute\[relationship.Engineer\] + reveal_type(Team.employees.of_type(Engineer)) + mapper_registry: registry = registry() From ec43cb2c4ba96f4aae0d049da1fa970b021224e1 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Sun, 23 Jun 2024 05:37:24 -0400 Subject: [PATCH 242/544] Handle "SSL SYSCALL error: Success" in psycopg2 Added "SSL SYSCALL error: Success" to the list of exceptions that are considered a "disconnect" in psycopg2. Fixes: #11522 Closes: #11523 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11523 Pull-request-sha: 63ad54e49dc3daa459caa29da8cffcb3e47a3f8c Change-Id: I0db49d5c4db418a8e634f5370c76b99aaa3d3af6 (cherry picked from commit 03d2832fbfd053b6f58f0879e823920ca9d71cbb) --- doc/build/changelog/unreleased_20/11522.rst | 7 +++ .../dialects/postgresql/psycopg2.py | 56 +++++++++++-------- test/dialect/postgresql/test_dialect.py | 1 + 3 files changed, 41 insertions(+), 23 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11522.rst diff --git a/doc/build/changelog/unreleased_20/11522.rst b/doc/build/changelog/unreleased_20/11522.rst new file mode 100644 index 00000000000..279197a779b --- /dev/null +++ b/doc/build/changelog/unreleased_20/11522.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, postgresql + :tickets: 11522 + + It is now considered a pool-invalidating disconnect event when psycopg2 + throws an "SSL SYSCALL error: Success" error message, which can occur when + the SSL connection to Postgres is terminated abnormally. \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 6c492a5b250..fc05aca9078 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -844,33 +844,43 @@ def is_disconnect(self, e, connection, cursor): # checks based on strings. in the case that .closed # didn't cut it, fall back onto these. str_e = str(e).partition("\n")[0] - for msg in [ - # these error messages from libpq: interfaces/libpq/fe-misc.c - # and interfaces/libpq/fe-secure.c. - "terminating connection", - "closed the connection", - "connection not open", - "could not receive data from server", - "could not send data to server", - # psycopg2 client errors, psycopg2/connection.h, - # psycopg2/cursor.h - "connection already closed", - "cursor already closed", - # not sure where this path is originally from, it may - # be obsolete. It really says "losed", not "closed". - "losed the connection unexpectedly", - # these can occur in newer SSL - "connection has been closed unexpectedly", - "SSL error: decryption failed or bad record mac", - "SSL SYSCALL error: Bad file descriptor", - "SSL SYSCALL error: EOF detected", - "SSL SYSCALL error: Operation timed out", - "SSL SYSCALL error: Bad address", - ]: + for msg in self._is_disconnect_messages: idx = str_e.find(msg) if idx >= 0 and '"' not in str_e[:idx]: return True return False + @util.memoized_property + def _is_disconnect_messages(self): + return ( + # these error messages from libpq: interfaces/libpq/fe-misc.c + # and interfaces/libpq/fe-secure.c. + "terminating connection", + "closed the connection", + "connection not open", + "could not receive data from server", + "could not send data to server", + # psycopg2 client errors, psycopg2/connection.h, + # psycopg2/cursor.h + "connection already closed", + "cursor already closed", + # not sure where this path is originally from, it may + # be obsolete. It really says "losed", not "closed". + "losed the connection unexpectedly", + # these can occur in newer SSL + "connection has been closed unexpectedly", + "SSL error: decryption failed or bad record mac", + "SSL SYSCALL error: Bad file descriptor", + "SSL SYSCALL error: EOF detected", + "SSL SYSCALL error: Operation timed out", + "SSL SYSCALL error: Bad address", + # This can occur in OpenSSL 1 when an unexpected EOF occurs. + # https://www.openssl.org/docs/man1.1.1/man3/SSL_get_error.html#BUGS + # It may also occur in newer OpenSSL for a non-recoverable I/O + # error as a result of a system call that does not set 'errno' + # in libc. + "SSL SYSCALL error: Success", + ) + dialect = PGDialect_psycopg2 diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index eae1b55d6e9..3f55c085fb4 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -365,6 +365,7 @@ class Error(Exception): "SSL SYSCALL error: EOF detected", "SSL SYSCALL error: Operation timed out", "SSL SYSCALL error: Bad address", + "SSL SYSCALL error: Success", ]: eq_(dialect.is_disconnect(Error(error), None, None), True) From fa1d936177160b474fd4e57a0825a663bbf64691 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 26 Jun 2024 12:13:28 -0400 Subject: [PATCH 243/544] add additional test cases re: #11371 Just want to make sure aliased() and with_polymorphic() still work in the of_type() context here, since that's likely why we had Any for this parameter in the first place Change-Id: I0a2c4445bc3b91039b3446d31b4a02db28feaee7 (cherry picked from commit 9b631dff45bbf4539c78eb73529b960acda80efd) --- test/typing/plain_files/orm/relationship.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/test/typing/plain_files/orm/relationship.py b/test/typing/plain_files/orm/relationship.py index 683e347f19f..44090ad53b4 100644 --- a/test/typing/plain_files/orm/relationship.py +++ b/test/typing/plain_files/orm/relationship.py @@ -16,6 +16,7 @@ from sqlalchemy import Integer from sqlalchemy import select from sqlalchemy import Table +from sqlalchemy.orm import aliased from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import joinedload from sqlalchemy.orm import Mapped @@ -24,6 +25,7 @@ from sqlalchemy.orm import Relationship from sqlalchemy.orm import relationship from sqlalchemy.orm import Session +from sqlalchemy.orm import with_polymorphic class Base(DeclarativeBase): @@ -164,6 +166,15 @@ class Engineer(Employee): # EXPECTED_RE_TYPE: sqlalchemy.*.QueryableAttribute\[relationship.Engineer\] reveal_type(Team.employees.of_type(Engineer)) + # EXPECTED_RE_TYPE: sqlalchemy.*.QueryableAttribute\[relationship.Employee\] + reveal_type(Team.employees.of_type(aliased(Employee))) + + # EXPECTED_RE_TYPE: sqlalchemy.*.QueryableAttribute\[relationship.Engineer\] + reveal_type(Team.employees.of_type(aliased(Engineer))) + + # EXPECTED_RE_TYPE: sqlalchemy.*.QueryableAttribute\[relationship.Employee\] + reveal_type(Team.employees.of_type(with_polymorphic(Employee, [Engineer]))) + mapper_registry: registry = registry() From 0b84b9026ce9886b4ab54d0c2dd15984ba42ce7f Mon Sep 17 00:00:00 2001 From: lonkeknol Date: Thu, 27 Jun 2024 09:03:30 -0400 Subject: [PATCH 244/544] Docs: simplify language use for "Working with Transactions and the DBAPI" ### Description This is my first pull request to sqlalchemy. It changes the writing style of two paragraphs in the unified tutorial [here](https://docs.sqlalchemy.org/en/20/tutorial/dbapi_transactions.html#working-with-transactions-and-the-dbapi). My goals were to. 1. Make them easier to read 2. Not change the meaning of the text. 3. Get feedback on whether this type of contribution is considered useful for sqlalchemy. If this is a useful type of contribution, it might be good to discuss some general guidelines for me to adhere to as I continue. For instance: - Prefer using present simple tense - Remove superfluous words where possible - Keep the pull requests to one or two h2 sections at a time, to make the review easier ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. I'm curious to hear what you all think. **Have a nice day!** Closes: #11541 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11541 Pull-request-sha: 3179690e6a5b47de99a4486a7a15cffbfacd380b Change-Id: I9b47f6ce4fd00c44c4b0e19957acf250f5e46d2f (cherry picked from commit fc2cb4496d35c0b8bb7d59aa74b553f07210eded) --- doc/build/tutorial/dbapi_transactions.rst | 34 +++++++++++------------ 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/doc/build/tutorial/dbapi_transactions.rst b/doc/build/tutorial/dbapi_transactions.rst index ade14eb4fb3..a8895dd3c54 100644 --- a/doc/build/tutorial/dbapi_transactions.rst +++ b/doc/build/tutorial/dbapi_transactions.rst @@ -11,32 +11,32 @@ Working with Transactions and the DBAPI -With the :class:`_engine.Engine` object ready to go, we may now proceed -to dive into the basic operation of an :class:`_engine.Engine` and -its primary interactive endpoints, the :class:`_engine.Connection` and -:class:`_engine.Result`. We will additionally introduce the ORM's -:term:`facade` for these objects, known as the :class:`_orm.Session`. +With the :class:`_engine.Engine` object ready to go, we can +dive into the basic operation of an :class:`_engine.Engine` and +its primary endpoints, the :class:`_engine.Connection` and +:class:`_engine.Result`. We'll also introduce the ORM's :term:`facade` +for these objects, known as the :class:`_orm.Session`. .. container:: orm-header **Note to ORM readers** - When using the ORM, the :class:`_engine.Engine` is managed by another - object called the :class:`_orm.Session`. The :class:`_orm.Session` in - modern SQLAlchemy emphasizes a transactional and SQL execution pattern that - is largely identical to that of the :class:`_engine.Connection` discussed - below, so while this subsection is Core-centric, all of the concepts here - are essentially relevant to ORM use as well and is recommended for all ORM + When using the ORM, the :class:`_engine.Engine` is managed by the + :class:`_orm.Session`. The :class:`_orm.Session` in modern SQLAlchemy + emphasizes a transactional and SQL execution pattern that is largely + identical to that of the :class:`_engine.Connection` discussed below, + so while this subsection is Core-centric, all of the concepts here + are relevant to ORM use as well and is recommended for all ORM learners. The execution pattern used by the :class:`_engine.Connection` - will be contrasted with that of the :class:`_orm.Session` at the end + will be compared to the :class:`_orm.Session` at the end of this section. As we have yet to introduce the SQLAlchemy Expression Language that is the -primary feature of SQLAlchemy, we will make use of one simple construct within -this package called the :func:`_sql.text` construct, which allows us to write -SQL statements as **textual SQL**. Rest assured that textual SQL in -day-to-day SQLAlchemy use is by far the exception rather than the rule for most -tasks, even though it always remains fully available. +primary feature of SQLAlchemy, we'll use a simple construct within +this package called the :func:`_sql.text` construct, to write +SQL statements as **textual SQL**. Rest assured that textual SQL is the +exception rather than the rule in day-to-day SQLAlchemy use, but it's +always available. .. rst-class:: core-header From 522baa306fc788cf02acf29bf08e86a431a7050e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 27 Jun 2024 18:17:47 -0400 Subject: [PATCH 245/544] cache key share; support correct traverse of 'of' Fixed caching issue where the :paramref:`_sql.Select.with_for_update.key_share` element of :meth:`_sql.Select.with_for_update` was not considered as part of the cache key, leading to incorrect caching if different variations of this parameter were used with an otherwise identical statement. Also repairs a traversal issue where the ``of`` element of ``ForUpdateArg`` when set to ``None`` cannot be compared against a non-None element because the traversal defines it as a clauselist. Traversal in this case is adjusted to accommodate for this case so that we dont need to create a risky-to-backport change to ``ForUpdateArg`` itself. Fixes: #11544 Change-Id: Ie8a50716df06977af58b0c22a8c10e1b64d972b9 (cherry picked from commit 6d2f43e14f2fe25cdc811355b7bd6d11f8eee381) --- lib/sqlalchemy/sql/selectable.py | 1 + lib/sqlalchemy/sql/traversals.py | 2 ++ test/sql/test_compare.py | 15 +++++++++++++++ 3 files changed, 18 insertions(+) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index a178458480b..aae1e47e090 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -3081,6 +3081,7 @@ class ForUpdateArg(ClauseElement): ("nowait", InternalTraversal.dp_boolean), ("read", InternalTraversal.dp_boolean), ("skip_locked", InternalTraversal.dp_boolean), + ("key_share", InternalTraversal.dp_boolean), ] of: Optional[Sequence[ClauseElement]] diff --git a/lib/sqlalchemy/sql/traversals.py b/lib/sqlalchemy/sql/traversals.py index 3ca3caf9e2c..8bb2939cb31 100644 --- a/lib/sqlalchemy/sql/traversals.py +++ b/lib/sqlalchemy/sql/traversals.py @@ -562,6 +562,8 @@ def compare( return False else: continue + elif right_child is None: + return False comparison = dispatch( left_attrname, left, left_child, right, right_child, **kw diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index c1f6e7f1136..2a7e41387bf 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -478,6 +478,21 @@ class CoreFixtures: select(table_a.c.a) .where(table_a.c.b == 5) .with_for_update(nowait=True), + select(table_a.c.a) + .where(table_a.c.b == 5) + .with_for_update(nowait=True, skip_locked=True), + select(table_a.c.a) + .where(table_a.c.b == 5) + .with_for_update(nowait=True, read=True), + select(table_a.c.a) + .where(table_a.c.b == 5) + .with_for_update(of=table_a.c.a), + select(table_a.c.a) + .where(table_a.c.b == 5) + .with_for_update(of=table_a.c.b), + select(table_a.c.a) + .where(table_a.c.b == 5) + .with_for_update(nowait=True, key_share=True), select(table_a.c.a).where(table_a.c.b == 5).correlate(table_b), select(table_a.c.a) .where(table_a.c.b == 5) From 416c27757a11034c202b408756f52baad9e23098 Mon Sep 17 00:00:00 2001 From: lonkeknol Date: Fri, 28 Jun 2024 12:27:33 -0400 Subject: [PATCH 246/544] Docs: simplify language in getting a connection & committing changes ### Description Simplifies language use in [Getting a Connection](https://docs.sqlalchemy.org/en/20/tutorial/dbapi_transactions.html#getting-a-connection) and [Committing Changes](https://docs.sqlalchemy.org/en/20/tutorial/dbapi_transactions.html#committing-changes) ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11542 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11542 Pull-request-sha: d706e69fb6058d3483fce98cfacbbf36ca12d78e Change-Id: I7788f2a16a5127b3c9623f7b00f06f649b04e0fb (cherry picked from commit 82d14a7515187ad744037ca9017ced1782314854) --- doc/build/tutorial/dbapi_transactions.rst | 88 +++++++++++------------ 1 file changed, 42 insertions(+), 46 deletions(-) diff --git a/doc/build/tutorial/dbapi_transactions.rst b/doc/build/tutorial/dbapi_transactions.rst index a8895dd3c54..5525acfe510 100644 --- a/doc/build/tutorial/dbapi_transactions.rst +++ b/doc/build/tutorial/dbapi_transactions.rst @@ -45,17 +45,15 @@ always available. Getting a Connection --------------------- -The sole purpose of the :class:`_engine.Engine` object from a user-facing -perspective is to provide a unit of -connectivity to the database called the :class:`_engine.Connection`. When -working with the Core directly, the :class:`_engine.Connection` object -is how all interaction with the database is done. As the :class:`_engine.Connection` -represents an open resource against the database, we want to always limit -the scope of our use of this object to a specific context, and the best -way to do that is by using Python context manager form, also known as -`the with statement `_. -Below we illustrate "Hello World", using a textual SQL statement. Textual -SQL is emitted using a construct called :func:`_sql.text` that will be discussed +The purpose of the :class:`_engine.Engine` is to connect to the database by +providing a :class:`_engine.Connection` object. When working with the Core +directly, the :class:`_engine.Connection` object is how all interaction with the +database is done. Because the :class:`_engine.Connection` creates an open +resource against the database, we want to limit our use of this object to a +specific context. The best way to do that is with a Python context manager, also +known as `the with statement `_. +Below we use a textual SQL statement to show "Hello World". Textual SQL is +created with a construct called :func:`_sql.text` which we'll discuss in more detail later: .. sourcecode:: pycon+sql @@ -71,21 +69,21 @@ in more detail later: {stop}[('hello world',)] {execsql}ROLLBACK{stop} -In the above example, the context manager provided for a database connection -and also framed the operation inside of a transaction. The default behavior of -the Python DBAPI includes that a transaction is always in progress; when the -scope of the connection is :term:`released`, a ROLLBACK is emitted to end the -transaction. The transaction is **not committed automatically**; when we want -to commit data we normally need to call :meth:`_engine.Connection.commit` +In the example above, the context manager creates a database connection +and executes the operation in a transaction. The default behavior of +the Python DBAPI is that a transaction is always in progress; when the +connection is :term:`released`, a ROLLBACK is emitted to end the +transaction. The transaction is **not committed automatically**; if we want +to commit data we need to call :meth:`_engine.Connection.commit` as we'll see in the next section. .. tip:: "autocommit" mode is available for special cases. The section :ref:`dbapi_autocommit` discusses this. -The result of our SELECT was also returned in an object called -:class:`_engine.Result` that will be discussed later, however for the moment -we'll add that it's best to ensure this object is consumed within the -"connect" block, and is not passed along outside of the scope of our connection. +The result of our SELECT was returned in an object called +:class:`_engine.Result` that will be discussed later. For the moment +we'll add that it's best to use this object within the "connect" block, +and to not use it outside of the scope of our connection. .. rst-class:: core-header @@ -94,11 +92,11 @@ we'll add that it's best to ensure this object is consumed within the Committing Changes ------------------ -We just learned that the DBAPI connection is non-autocommitting. What if -we want to commit some data? We can alter our above example to create a -table and insert some data, and the transaction is then committed using -the :meth:`_engine.Connection.commit` method, invoked **inside** the block -where we acquired the :class:`_engine.Connection` object: +We just learned that the DBAPI connection doesn't commit automatically. +What if we want to commit some data? We can change our example above to create a +table, insert some data and then commit the transaction using +the :meth:`_engine.Connection.commit` method, **inside** the block +where we have the :class:`_engine.Connection` object: .. sourcecode:: pycon+sql @@ -119,24 +117,22 @@ where we acquired the :class:`_engine.Connection` object: COMMIT -Above, we emitted two SQL statements that are generally transactional, a -"CREATE TABLE" statement [1]_ and an "INSERT" statement that's parameterized -(the parameterization syntax above is discussed a few sections below in -:ref:`tutorial_multiple_parameters`). As we want the work we've done to be -committed within our block, we invoke the +Above, we execute two SQL statements, a "CREATE TABLE" statement [1]_ +and an "INSERT" statement that's parameterized (we discuss the parameterization syntax +later in :ref:`tutorial_multiple_parameters`). +To commit the work we've done in our block, we call the :meth:`_engine.Connection.commit` method which commits the transaction. After -we call this method inside the block, we can continue to run more SQL -statements and if we choose we may call :meth:`_engine.Connection.commit` -again for subsequent statements. SQLAlchemy refers to this style as **commit as +this, we can continue to run more SQL statements and call :meth:`_engine.Connection.commit` +again for those statements. SQLAlchemy refers to this style as **commit as you go**. -There is also another style of committing data, which is that we can declare -our "connect" block to be a transaction block up front. For this mode of -operation, we use the :meth:`_engine.Engine.begin` method to acquire the -connection, rather than the :meth:`_engine.Engine.connect` method. This method -will both manage the scope of the :class:`_engine.Connection` and also -enclose everything inside of a transaction with COMMIT at the end, assuming -a successful block, or ROLLBACK in case of exception raise. This style +There's also another style to commit data. We can declare +our "connect" block to be a transaction block up front. To do this, we use the +:meth:`_engine.Engine.begin` method to get the connection, rather than the +:meth:`_engine.Engine.connect` method. This method +will manage the scope of the :class:`_engine.Connection` and also +enclose everything inside of a transaction with either a COMMIT at the end +if the block was successful, or a ROLLBACK if an exception was raised. This style is known as **begin once**: .. sourcecode:: pycon+sql @@ -153,9 +149,9 @@ is known as **begin once**: COMMIT -"Begin once" style is often preferred as it is more succinct and indicates the -intention of the entire block up front. However, within this tutorial we will -normally use "commit as you go" style as it is more flexible for demonstration +You should mostly prefer the "begin once" style because it's shorter and shows the +intention of the entire block up front. However, in this tutorial we'll +use "commit as you go" style as it's more flexible for demonstration purposes. .. topic:: What's "BEGIN (implicit)"? @@ -169,8 +165,8 @@ purposes. .. [1] :term:`DDL` refers to the subset of SQL that instructs the database to create, modify, or remove schema-level constructs such as tables. DDL - such as "CREATE TABLE" is recommended to be within a transaction block that - ends with COMMIT, as many databases uses transactional DDL such that the + such as "CREATE TABLE" should be in a transaction block that + ends with COMMIT, as many databases use transactional DDL such that the schema changes don't take place until the transaction is committed. However, as we'll see later, we usually let SQLAlchemy run DDL sequences for us as part of a higher level operation where we don't generally need to worry From 2ee2e230b6cd1e71681e99dae0286188d0842780 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 28 Jun 2024 16:30:57 -0400 Subject: [PATCH 247/544] handle DBAPI error for fetchall() Fixed issue in "insertmanyvalues" feature where a particular call to ``cursor.fetchall()`` were not wrapped in SQLAlchemy's exception wrapper, which apparently can raise a database exception during fetch when using pyodbc. Fixes: #11532 Change-Id: Ic07d3e79dd597e18d87a56b45ddffa25e762beb9 (cherry picked from commit fb47dbbc74f59d0be3411d52bc27155095b50631) --- doc/build/changelog/unreleased_20/11532.rst | 8 ++++++++ lib/sqlalchemy/engine/base.py | 2 ++ lib/sqlalchemy/engine/default.py | 21 +++++++++++++++++++-- lib/sqlalchemy/engine/interfaces.py | 1 + lib/sqlalchemy/testing/fixtures/sql.py | 14 ++++++++++++-- test/sql/test_insert_exec.py | 21 +++++++++++++++++++++ 6 files changed, 63 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11532.rst diff --git a/doc/build/changelog/unreleased_20/11532.rst b/doc/build/changelog/unreleased_20/11532.rst new file mode 100644 index 00000000000..141463d5835 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11532.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, engine + :tickets: 11532 + + Fixed issue in "insertmanyvalues" feature where a particular call to + ``cursor.fetchall()`` were not wrapped in SQLAlchemy's exception wrapper, + which apparently can raise a database exception during fetch when using + pyodbc. diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 83d1cc1317a..983bdae037f 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -2031,6 +2031,7 @@ def _exec_insertmany_context( rowcount = 0 for imv_batch in dialect._deliver_insertmanyvalues_batches( + self, cursor, str_statement, effective_parameters, @@ -2051,6 +2052,7 @@ def _exec_insertmany_context( imv_batch.replaced_parameters, None, context, + is_sub_exec=True, ) sub_stmt = imv_batch.replaced_statement diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index b8eacc032ed..d42a3138bc1 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -58,6 +58,7 @@ from ..sql import dml from ..sql import expression from ..sql import type_api +from ..sql import util as sql_util from ..sql._typing import is_tuple_type from ..sql.base import _NoArg from ..sql.compiler import DDLCompiler @@ -762,7 +763,13 @@ def do_release_savepoint(self, connection, name): connection.execute(expression.ReleaseSavepointClause(name)) def _deliver_insertmanyvalues_batches( - self, cursor, statement, parameters, generic_setinputsizes, context + self, + connection, + cursor, + statement, + parameters, + generic_setinputsizes, + context, ): context = cast(DefaultExecutionContext, context) compiled = cast(SQLCompiler, context.compiled) @@ -813,7 +820,17 @@ def _deliver_insertmanyvalues_batches( if is_returning: - rows = context.fetchall_for_returning(cursor) + try: + rows = context.fetchall_for_returning(cursor) + except BaseException as be: + connection._handle_dbapi_exception( + be, + sql_util._long_statement(imv_batch.replaced_statement), + imv_batch.replaced_parameters, + None, + context, + is_sub_exec=True, + ) # I would have thought "is_returning: Final[bool]" # would have assured this but pylance thinks not diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index d1657b8b010..7a152c3305c 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -2147,6 +2147,7 @@ def do_recover_twophase(self, connection: Connection) -> List[Any]: def _deliver_insertmanyvalues_batches( self, + connection: Connection, cursor: DBAPICursor, statement: str, parameters: _DBAPIMultiExecuteParams, diff --git a/lib/sqlalchemy/testing/fixtures/sql.py b/lib/sqlalchemy/testing/fixtures/sql.py index 830fa276593..39e5b084465 100644 --- a/lib/sqlalchemy/testing/fixtures/sql.py +++ b/lib/sqlalchemy/testing/fixtures/sql.py @@ -470,12 +470,22 @@ def fetchall(self): return rows def _deliver_insertmanyvalues_batches( - cursor, statement, parameters, generic_setinputsizes, context + connection, + cursor, + statement, + parameters, + generic_setinputsizes, + context, ): if randomize_rows: cursor = RandomCursor(cursor) for batch in orig_dialect( - cursor, statement, parameters, generic_setinputsizes, context + connection, + cursor, + statement, + parameters, + generic_setinputsizes, + context, ): if warn_on_downgraded and batch.is_downgraded: util.warn("Batches were downgraded for sorted INSERT") diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py index ebb0b23a5f6..f80b4c447ea 100644 --- a/test/sql/test_insert_exec.py +++ b/test/sql/test_insert_exec.py @@ -771,6 +771,27 @@ def define_tables(cls, metadata): Column("x_value", String(50)), Column("y_value", String(50)), ) + Table( + "uniq_cons", + metadata, + Column("id", Integer, primary_key=True), + Column("data", String(50), unique=True), + ) + + @testing.variation("use_returning", [True, False]) + def test_returning_integrity_error(self, connection, use_returning): + """test for #11532""" + + stmt = self.tables.uniq_cons.insert() + if use_returning: + stmt = stmt.returning(self.tables.uniq_cons.c.id) + + # pymssql thought it would be funny to use OperationalError for + # a unique key violation. + with expect_raises((exc.IntegrityError, exc.OperationalError)): + connection.execute( + stmt, [{"data": "the data"}, {"data": "the data"}] + ) def test_insert_unicode_keys(self, connection): table = self.tables["Unitéble2"] From 88a7f05b62b6e7945fdbb076453b27f30e14e94b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 2 Jul 2024 13:57:47 -0400 Subject: [PATCH 248/544] call iter() on detached/transient dynamic session Fixed regression going back to 1.4 where accessing a collection using the "dynamic" strategy on a transient object and attempting to query would raise an internal error rather than the expected :class:`.NoResultFound` that occurred in 1.3. Fixes: #11562 Change-Id: I650305963a17592413520d8d1049c601761a0acc (cherry picked from commit d9d98eacca11490b7df878ef399b92fbb2df2f47) --- doc/build/changelog/unreleased_14/11562.rst | 8 ++++++ lib/sqlalchemy/orm/dynamic.py | 10 +++++--- test/orm/test_dynamic.py | 27 +++++++++++++++++++++ 3 files changed, 41 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_14/11562.rst diff --git a/doc/build/changelog/unreleased_14/11562.rst b/doc/build/changelog/unreleased_14/11562.rst new file mode 100644 index 00000000000..15ccd0df6d2 --- /dev/null +++ b/doc/build/changelog/unreleased_14/11562.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm, regression + :tickets: 11562 + + Fixed regression going back to 1.4 where accessing a collection using the + "dynamic" strategy on a transient object and attempting to query would + raise an internal error rather than the expected :class:`.NoResultFound` + that occurred in 1.3. diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index 7496e5c30da..ad1b239c13c 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -161,10 +161,12 @@ def _iter(self) -> Union[result.ScalarResult[_T], result.Result[_T]]: return result.IteratorResult( result.SimpleResultMetaData([self.attr.class_.__name__]), - self.attr._get_collection_history( # type: ignore[arg-type] - attributes.instance_state(self.instance), - PassiveFlag.PASSIVE_NO_INITIALIZE, - ).added_items, + iter( + self.attr._get_collection_history( + attributes.instance_state(self.instance), + PassiveFlag.PASSIVE_NO_INITIALIZE, + ).added_items + ), _source_supports_scalars=True, ).scalars() else: diff --git a/test/orm/test_dynamic.py b/test/orm/test_dynamic.py index cce3f8c18a8..465e29929e9 100644 --- a/test/orm/test_dynamic.py +++ b/test/orm/test_dynamic.py @@ -275,6 +275,33 @@ def my_filter(self, arg): use_default_dialect=True, ) + @testing.combinations( + ("all", []), + ("one", exc.NoResultFound), + ("one_or_none", None), + argnames="method, expected", + ) + @testing.variation("add_to_session", [True, False]) + def test_transient_raise( + self, user_address_fixture, method, expected, add_to_session + ): + """test 11562""" + User, Address = user_address_fixture() + + u1 = User(name="u1") + if add_to_session: + sess = fixture_session() + sess.add(u1) + + meth = getattr(u1.addresses, method) + if expected is exc.NoResultFound: + with expect_raises_message( + exc.NoResultFound, "No row was found when one was required" + ): + meth() + else: + eq_(meth(), expected) + def test_detached_raise(self, user_address_fixture): """so filtering on a detached dynamic list raises an error...""" From 021ebfd833082ca8987c2a5beb7667ea341d45d3 Mon Sep 17 00:00:00 2001 From: Sergio Oller Moreno Date: Tue, 2 Jul 2024 04:39:00 -0400 Subject: [PATCH 249/544] Fix table reflection on oracle <10.2 Fixed table reflection on Oracle 10.2 and older where compression options are not supported. Fixes: #11557 Closes: #11558 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11558 Pull-request-sha: 44e4f51a3616388f39b0f5edee3124d389765da3 Change-Id: I34c6a367bef158a0c6cf01f185e18392552b7cc7 (cherry picked from commit e4aa3bedf83a0325f747eca187e623f48f036c9d) --- doc/build/changelog/unreleased_20/11557.txt | 6 ++++++ lib/sqlalchemy/dialects/oracle/base.py | 12 ++++++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11557.txt diff --git a/doc/build/changelog/unreleased_20/11557.txt b/doc/build/changelog/unreleased_20/11557.txt new file mode 100644 index 00000000000..be270a6f251 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11557.txt @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, reflection, oracle + :tickets: 11557 + + Fixed table reflection on Oracle 10.2 and older where compression options + are not supported. diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index a548b344997..a92bb5f844c 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -2036,8 +2036,16 @@ def _table_options_query( ): query = select( dictionary.all_tables.c.table_name, - dictionary.all_tables.c.compression, - dictionary.all_tables.c.compress_for, + ( + dictionary.all_tables.c.compression + if self._supports_table_compression + else sql.null().label("compression") + ), + ( + dictionary.all_tables.c.compress_for + if self._supports_table_compress_for + else sql.null().label("compress_for") + ), ).where(dictionary.all_tables.c.owner == owner) if has_filter_names: query = query.where( From 1fb4b28a1a5abd3a378e88f67f0982cc9ef1049b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jul 2024 22:18:06 +0200 Subject: [PATCH 250/544] Bump pypa/cibuildwheel from 2.19.1 to 2.19.2 (#11561) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.19.1 to 2.19.2. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.19.1...v2.19.2) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit fb388a18fb45025150b2c64cdb2a08694d993c97) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 0e7c593ca1a..4a0cd9e19be 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.19.1 + uses: pypa/cibuildwheel@v2.19.2 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From 24ff619094b675784ad8bb487527cc116ffc9200 Mon Sep 17 00:00:00 2001 From: Eric Atkin Date: Wed, 3 Jul 2024 16:05:04 -0400 Subject: [PATCH 251/544] Allow flat for join with name The :paramref:`_orm.aliased.name` parameter to :func:`_orm.aliased` may now be combined with the :paramref:`_orm.aliased.flat` parameter, producing per-table names based on a name-prefixed naming convention. Pull request courtesy Eric Atkin. Fixes: #11575 Closes: #11531 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11531 Pull-request-sha: f85535464be7b04d5f9745848d28f87dcd248b86 Change-Id: If79679c7a9598fffe99c033894b7dffecef13939 (cherry picked from commit 0e40962bf300bb26c873d00d80813a735fb7447f) --- doc/build/changelog/unreleased_20/11575.rst | 8 +++ lib/sqlalchemy/orm/_orm_constructors.py | 10 ++++ lib/sqlalchemy/sql/selectable.py | 20 ++++++-- test/orm/test_core_compilation.py | 55 +++++++++++++++++++++ test/sql/test_selectable.py | 28 +++++++++++ 5 files changed, 117 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11575.rst diff --git a/doc/build/changelog/unreleased_20/11575.rst b/doc/build/changelog/unreleased_20/11575.rst new file mode 100644 index 00000000000..4eb56655fad --- /dev/null +++ b/doc/build/changelog/unreleased_20/11575.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: usecase, orm + :tickets: 11575 + + The :paramref:`_orm.aliased.name` parameter to :func:`_orm.aliased` may now + be combined with the :paramref:`_orm.aliased.flat` parameter, producing + per-table names based on a name-prefixed naming convention. Pull request + courtesy Eric Atkin. diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index 38ea2b2f25f..783ca271d8f 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -2295,6 +2295,16 @@ def aliased( supported by all modern databases with regards to right-nested joins and generally produces more efficient queries. + When :paramref:`_orm.aliased.flat` is combined with + :paramref:`_orm.aliased.name`, the resulting joins will alias individual + tables using a naming scheme similar to ``_``. This + naming scheme is for visibility / debugging purposes only and the + specific scheme is subject to change without notice. + + .. versionadded:: 2.0.32 added support for combining + :paramref:`_orm.aliased.name` with :paramref:`_orm.aliased.flat`. + Previously, this would raise ``NotImplementedError``. + :param adapt_on_names: if True, more liberal "matching" will be used when mapping the mapped columns of the ORM entity to those of the given selectable - a name-based match will be performed if the diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index aae1e47e090..93b23f15e24 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -1516,11 +1516,23 @@ def _anonymous_fromclause( ) -> TODO_Any: sqlutil = util.preloaded.sql_util if flat: - if name is not None: - raise exc.ArgumentError("Can't send name argument with flat") + if isinstance(self.left, (FromGrouping, Join)): + left_name = name # will recurse + else: + if name and isinstance(self.left, NamedFromClause): + left_name = f"{name}_{self.left.name}" + else: + left_name = name + if isinstance(self.right, (FromGrouping, Join)): + right_name = name # will recurse + else: + if name and isinstance(self.right, NamedFromClause): + right_name = f"{name}_{self.right.name}" + else: + right_name = name left_a, right_a = ( - self.left._anonymous_fromclause(flat=True), - self.right._anonymous_fromclause(flat=True), + self.left._anonymous_fromclause(name=left_name, flat=flat), + self.right._anonymous_fromclause(name=right_name, flat=flat), ) adapter = sqlutil.ClauseAdapter(left_a).chain( sqlutil.ClauseAdapter(right_a) diff --git a/test/orm/test_core_compilation.py b/test/orm/test_core_compilation.py index 915c9747f8f..81aa760d9b2 100644 --- a/test/orm/test_core_compilation.py +++ b/test/orm/test_core_compilation.py @@ -2604,6 +2604,61 @@ def test_cte_recursive_handles_dupe_columns(self): "anon_1.primary_language FROM anon_1", ) + @testing.variation("named", [True, False]) + @testing.variation("flat", [True, False]) + def test_aliased_joined_entities(self, named, flat): + Company = self.classes.Company + Engineer = self.classes.Engineer + + if named: + e1 = aliased(Engineer, flat=flat, name="myengineer") + else: + e1 = aliased(Engineer, flat=flat) + + q = select(Company.name, e1.primary_language).join( + Company.employees.of_type(e1) + ) + + if not flat: + name = "anon_1" if not named else "myengineer" + + self.assert_compile( + q, + "SELECT companies.name, " + f"{name}.engineers_primary_language FROM companies " + "JOIN (SELECT people.person_id AS people_person_id, " + "people.company_id AS people_company_id, " + "people.name AS people_name, people.type AS people_type, " + "engineers.person_id AS engineers_person_id, " + "engineers.status AS engineers_status, " + "engineers.engineer_name AS engineers_engineer_name, " + "engineers.primary_language AS engineers_primary_language " + "FROM people JOIN engineers " + "ON people.person_id = engineers.person_id) AS " + f"{name} " + f"ON companies.company_id = {name}.people_company_id", + ) + elif named: + self.assert_compile( + q, + "SELECT companies.name, " + "myengineer_engineers.primary_language " + "FROM companies JOIN (people AS myengineer_people " + "JOIN engineers AS myengineer_engineers " + "ON myengineer_people.person_id = " + "myengineer_engineers.person_id) " + "ON companies.company_id = myengineer_people.company_id", + ) + else: + self.assert_compile( + q, + "SELECT companies.name, engineers_1.primary_language " + "FROM companies JOIN (people AS people_1 " + "JOIN engineers AS engineers_1 " + "ON people_1.person_id = engineers_1.person_id) " + "ON companies.company_id = people_1.company_id", + ) + class RawSelectTest(QueryTest, AssertsCompiledSQL): """older tests from test_query. Here, they are converted to use diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index 0c0c23b8700..4a252930a38 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -2045,6 +2045,16 @@ def test_join_standalone_alias_flat(self): "a AS a_1 JOIN b AS b_1 ON a_1.a = b_1.b", ) + def test_join_alias_name_flat(self): + a = table("a", column("a")) + b = table("b", column("b")) + self.assert_compile( + a.join(b, a.c.a == b.c.b)._anonymous_fromclause( + name="foo", flat=True + ), + "a AS foo_a JOIN b AS foo_b ON foo_a.a = foo_b.b", + ) + def test_composed_join_alias_flat(self): a = table("a", column("a")) b = table("b", column("b")) @@ -2063,6 +2073,24 @@ def test_composed_join_alias_flat(self): "ON b_1.b = c_1.c", ) + def test_composed_join_alias_name_flat(self): + a = table("a", column("a")) + b = table("b", column("b")) + c = table("c", column("c")) + d = table("d", column("d")) + + j1 = a.join(b, a.c.a == b.c.b) + j2 = c.join(d, c.c.c == d.c.d) + + self.assert_compile( + j1.join(j2, b.c.b == c.c.c)._anonymous_fromclause( + name="foo", flat=True + ), + "a AS foo_a JOIN b AS foo_b ON foo_a.a = foo_b.b JOIN " + "(c AS foo_c JOIN d AS foo_d ON foo_c.c = foo_d.d) " + "ON foo_b.b = foo_c.c", + ) + def test_composed_join_alias(self): a = table("a", column("a")) b = table("b", column("b")) From 2db07e0c8ffb542df6140ca3e31596d0b33bfb00 Mon Sep 17 00:00:00 2001 From: opkna Date: Thu, 4 Jul 2024 22:13:10 +0200 Subject: [PATCH 252/544] Added valid types to server_onupdate (#11555) * Added valid types to server_onupdate and mapped_column kwargs mypy tests * Joined mapped_column test files * Set _ServerOnUpdateArgument to _ServerDefaultArgument Fixes: #11546 (cherry picked from commit f979aff468a4bdc32aad7b073583823cddf8f21c) --- lib/sqlalchemy/orm/_orm_constructors.py | 4 +- lib/sqlalchemy/sql/schema.py | 4 +- test/typing/plain_files/orm/mapped_column.py | 88 ++++++++++++++++++++ test/typing/plain_files/sql/core_ddl.py | 10 ++- 4 files changed, 102 insertions(+), 4 deletions(-) diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index 38ea2b2f25f..06561f8ddf6 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -70,7 +70,7 @@ from ..sql._typing import _TypeEngineArgument from ..sql.elements import ColumnElement from ..sql.schema import _ServerDefaultArgument - from ..sql.schema import FetchedValue + from ..sql.schema import _ServerOnUpdateArgument from ..sql.selectable import Alias from ..sql.selectable import Subquery @@ -127,7 +127,7 @@ def mapped_column( onupdate: Optional[Any] = None, insert_default: Optional[Any] = _NoArg.NO_ARG, server_default: Optional[_ServerDefaultArgument] = None, - server_onupdate: Optional[FetchedValue] = None, + server_onupdate: Optional[_ServerOnUpdateArgument] = None, active_history: bool = False, quote: Optional[bool] = None, system: bool = False, diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index eda4a97cc2d..a54252b2fbd 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -125,6 +125,8 @@ "FetchedValue", str, TextClause, ColumnElement[Any] ] +_ServerOnUpdateArgument = _ServerDefaultArgument + class SchemaConst(Enum): RETAIN_SCHEMA = 1 @@ -1528,7 +1530,7 @@ def __init__( onupdate: Optional[Any] = None, primary_key: bool = False, server_default: Optional[_ServerDefaultArgument] = None, - server_onupdate: Optional[FetchedValue] = None, + server_onupdate: Optional[_ServerOnUpdateArgument] = None, quote: Optional[bool] = None, system: bool = False, comment: Optional[str] = None, diff --git a/test/typing/plain_files/orm/mapped_column.py b/test/typing/plain_files/orm/mapped_column.py index 26f5722a6fc..81080a4faa5 100644 --- a/test/typing/plain_files/orm/mapped_column.py +++ b/test/typing/plain_files/orm/mapped_column.py @@ -1,13 +1,20 @@ from typing import Optional +from sqlalchemy import Boolean +from sqlalchemy import FetchedValue from sqlalchemy import ForeignKey +from sqlalchemy import func from sqlalchemy import Index from sqlalchemy import Integer +from sqlalchemy import literal_column from sqlalchemy import String +from sqlalchemy import text +from sqlalchemy import true from sqlalchemy import UniqueConstraint from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column +from sqlalchemy.sql.schema import SchemaConst class Base(DeclarativeBase): @@ -94,3 +101,84 @@ class X(Base): ) __table_args__ = (UniqueConstraint(a, b, name="uq1"), Index("ix1", c, d)) + + +mapped_column() +mapped_column( + init=True, + repr=True, + default=42, + compare=True, + kw_only=True, + primary_key=True, + deferred=True, + deferred_group="str", + deferred_raiseload=True, + use_existing_column=True, + name="str", + type_=Integer(), + doc="str", + key="str", + index=True, + unique=True, + info={"str": 42}, + active_history=True, + quote=True, + system=True, + comment="str", + sort_order=-1, + any_kwarg="str", + another_kwarg=42, +) + +mapped_column(default_factory=lambda: 1) +mapped_column(default_factory=lambda: "str") + +mapped_column(nullable=True) +mapped_column(nullable=SchemaConst.NULL_UNSPECIFIED) + +mapped_column(autoincrement=True) +mapped_column(autoincrement="auto") +mapped_column(autoincrement="ignore_fk") + +mapped_column(onupdate=1) +mapped_column(onupdate="str") + +mapped_column(insert_default=1) +mapped_column(insert_default="str") + +mapped_column(server_default=FetchedValue()) +mapped_column(server_default=true()) +mapped_column(server_default=func.now()) +mapped_column(server_default="NOW()") +mapped_column(server_default=text("NOW()")) +mapped_column(server_default=literal_column("false", Boolean)) + +mapped_column(server_onupdate=FetchedValue()) +mapped_column(server_onupdate=true()) +mapped_column(server_onupdate=func.now()) +mapped_column(server_onupdate="NOW()") +mapped_column(server_onupdate=text("NOW()")) +mapped_column(server_onupdate=literal_column("false", Boolean)) + +mapped_column( + default=None, + nullable=None, + primary_key=None, + deferred_group=None, + deferred_raiseload=None, + name=None, + type_=None, + doc=None, + key=None, + index=None, + unique=None, + info=None, + onupdate=None, + insert_default=None, + server_default=None, + server_onupdate=None, + quote=None, + comment=None, + any_kwarg=None, +) diff --git a/test/typing/plain_files/sql/core_ddl.py b/test/typing/plain_files/sql/core_ddl.py index b7e0ec5350f..549375d0af2 100644 --- a/test/typing/plain_files/sql/core_ddl.py +++ b/test/typing/plain_files/sql/core_ddl.py @@ -138,10 +138,18 @@ Column(Integer, server_default=literal_column("42", Integer), nullable=False) # server_onupdate -Column("name", server_onupdate=FetchedValue(), nullable=False) Column(server_onupdate=FetchedValue(), nullable=False) +Column(server_onupdate="now()", nullable=False) +Column("name", server_onupdate=FetchedValue(), nullable=False) Column("name", Integer, server_onupdate=FetchedValue(), nullable=False) +Column("name", Integer, server_onupdate=text("now()"), nullable=False) +Column(Boolean, nullable=False, server_default=true()) Column(Integer, server_onupdate=FetchedValue(), nullable=False) +Column(DateTime, server_onupdate="now()") +Column(DateTime, server_onupdate=text("now()")) +Column(DateTime, server_onupdate=FetchedValue()) +Column(Boolean, server_onupdate=literal_column("false", Boolean)) +Column(Integer, server_onupdate=literal_column("42", Integer), nullable=False) # TypeEngine.with_variant should accept both a TypeEngine instance and the Concrete Type Integer().with_variant(Integer, "mysql") From 5e495e16da87644bcb07aa76c9021d486053b81d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 6 Jul 2024 12:24:51 -0400 Subject: [PATCH 253/544] ensure intro to "functions have types" is not misleading make sure it's clear that there is normally not a return type for SQL functions Fixes: #11578 Change-Id: Ia0b66e7fe685dad427822345dd232eb47a0fc44f (cherry picked from commit e9d3e49601d011f9a3471921729728ca688e04b9) --- doc/build/tutorial/data_select.rst | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/doc/build/tutorial/data_select.rst b/doc/build/tutorial/data_select.rst index aa77539b97b..d9d51c7f51f 100644 --- a/doc/build/tutorial/data_select.rst +++ b/doc/build/tutorial/data_select.rst @@ -1410,11 +1410,18 @@ as opposed to the "return type" of a Python function. The SQL return type of any SQL function may be accessed, typically for debugging purposes, by referring to the :attr:`_functions.Function.type` -attribute:: +attribute; this will be pre-configured for a **select few** of extremely +common SQL functions, but for most SQL functions is the "null" datatype +if not otherwise specified:: + >>> # pre-configured SQL function (only a few dozen of these) >>> func.now().type DateTime() + >>> # arbitrary SQL function (all other SQL functions) + >>> func.run_some_calculation().type + NullType() + These SQL return types are significant when making use of the function expression in the context of a larger expression; that is, math operators will work better when the datatype of the expression is From 7831d6de0121ec25152d822bce4a4501ce64263b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 5 Jul 2024 09:30:10 -0400 Subject: [PATCH 254/544] alter the collation of string type for collate() Fixed issue where the :func:`_sql.collate` construct, which explicitly sets a collation for a given expression, would maintain collation settings for the underlying type object from the expression, causing SQL expressions to have both collations stated at once when used in further expressions for specific dialects that render explicit type casts, such as that of asyncpg. The :func:`_sql.collate` construct now assigns its own type to explicitly include the new collation, assuming it's a string type. Fixes: #11576 Change-Id: I6fc8904d2bcbc21f11bbca57e4a451ed0edbd879 (cherry picked from commit 35c178c405c44798810ceac540faf8385b4632c4) --- doc/build/changelog/unreleased_20/11576.rst | 11 +++ lib/sqlalchemy/sql/elements.py | 12 ++- lib/sqlalchemy/sql/sqltypes.py | 5 ++ lib/sqlalchemy/sql/type_api.py | 29 +++++-- test/sql/test_types.py | 88 +++++++++++++++++++++ 5 files changed, 137 insertions(+), 8 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11576.rst diff --git a/doc/build/changelog/unreleased_20/11576.rst b/doc/build/changelog/unreleased_20/11576.rst new file mode 100644 index 00000000000..93cfe3bf036 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11576.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, postgresql + :tickets: 11576 + + Fixed issue where the :func:`_sql.collate` construct, which explicitly sets + a collation for a given expression, would maintain collation settings for + the underlying type object from the expression, causing SQL expressions to + have both collations stated at once when used in further expressions for + specific dialects that render explicit type casts, such as that of asyncpg. + The :func:`_sql.collate` construct now assigns its own type to explicitly + include the new collation, assuming it's a string type. diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index b56d5ebe1ae..5277c814927 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -5108,15 +5108,25 @@ class CollationClause(ColumnElement[str]): ] @classmethod + @util.preload_module("sqlalchemy.sql.sqltypes") def _create_collation_expression( cls, expression: _ColumnExpressionArgument[str], collation: str ) -> BinaryExpression[str]: + + sqltypes = util.preloaded.sql_sqltypes + expr = coercions.expect(roles.ExpressionElementRole[str], expression) + + if expr.type._type_affinity is sqltypes.String: + collate_type = expr.type._with_collation(collation) + else: + collate_type = expr.type + return BinaryExpression( expr, CollationClause(collation), operators.collate, - type_=expr.type, + type_=collate_type, ) def __init__(self, collation): diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index c1c2b1159aa..c20df07a6b8 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -217,6 +217,11 @@ def __init__( self.length = length self.collation = collation + def _with_collation(self, collation): + new_type = self.copy() + new_type.collation = collation + return new_type + def _resolve_for_literal(self, value): # I was SO PROUD of my regex trick, but we dont need it. # re.search(r"[^\u0000-\u007F]", value) diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 4233e7f16e8..e4f5f3f20a7 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -755,6 +755,10 @@ def _resolve_for_python_type( return self + def _with_collation(self, collation: str) -> Self: + """set up error handling for the collate expression""" + raise NotImplementedError("this datatype does not support collation") + @util.ro_memoized_property def _type_affinity(self) -> Optional[Type[TypeEngine[_T]]]: """Return a rudimental 'affinity' value expressing the general class @@ -1732,6 +1736,16 @@ def comparator_factory( # type: ignore # mypy properties bug {}, ) + def _copy_with_check(self) -> Self: + tt = self.copy() + if not isinstance(tt, self.__class__): + raise AssertionError( + "Type object %s does not properly " + "implement the copy() method, it must " + "return an object of type %s" % (self, self.__class__) + ) + return tt + def _gen_dialect_impl(self, dialect: Dialect) -> TypeEngine[_T]: if dialect.name in self._variant_mapping: adapted = dialect.type_descriptor( @@ -1746,16 +1760,17 @@ def _gen_dialect_impl(self, dialect: Dialect) -> TypeEngine[_T]: # to a copy of this TypeDecorator and return # that. typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect) - tt = self.copy() - if not isinstance(tt, self.__class__): - raise AssertionError( - "Type object %s does not properly " - "implement the copy() method, it must " - "return an object of type %s" % (self, self.__class__) - ) + tt = self._copy_with_check() tt.impl = tt.impl_instance = typedesc return tt + def _with_collation(self, collation: str) -> Self: + tt = self._copy_with_check() + tt.impl = tt.impl_instance = self.impl_instance._with_collation( + collation + ) + return tt + @util.ro_non_memoized_property def _type_affinity(self) -> Optional[Type[TypeEngine[Any]]]: return self.impl_instance._type_affinity diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 36c6a74c27e..999919c5f51 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -19,6 +19,7 @@ from sqlalchemy import cast from sqlalchemy import CHAR from sqlalchemy import CLOB +from sqlalchemy import collate from sqlalchemy import DATE from sqlalchemy import Date from sqlalchemy import DATETIME @@ -66,9 +67,11 @@ import sqlalchemy.dialects.oracle as oracle import sqlalchemy.dialects.postgresql as pg from sqlalchemy.engine import default +from sqlalchemy.engine import interfaces from sqlalchemy.schema import AddConstraint from sqlalchemy.schema import CheckConstraint from sqlalchemy.sql import column +from sqlalchemy.sql import compiler from sqlalchemy.sql import ddl from sqlalchemy.sql import elements from sqlalchemy.sql import null @@ -3365,6 +3368,91 @@ def test_control(self, connection): ], ) + @testing.fixture + def renders_bind_cast(self): + class MyText(Text): + render_bind_cast = True + + class MyCompiler(compiler.SQLCompiler): + def render_bind_cast(self, type_, dbapi_type, sqltext): + return f"""{sqltext}->BINDCAST->[{ + self.dialect.type_compiler_instance.process( + dbapi_type, identifier_preparer=self.preparer + ) + }]""" + + class MyDialect(default.DefaultDialect): + bind_typing = interfaces.BindTyping.RENDER_CASTS + colspecs = {Text: MyText} + statement_compiler = MyCompiler + + return MyDialect() + + @testing.combinations( + (lambda c1: c1.like("qpr"), "q LIKE :q_1->BINDCAST->[TEXT]"), + ( + lambda c2: c2.like("qpr"), + 'q LIKE :q_1->BINDCAST->[TEXT COLLATE "xyz"]', + ), + ( + # new behavior, a type with no collation passed into collate() + # now has a new type with that collation, so we get the collate + # on the right side bind-cast. previous to #11576 we'd only + # get TEXT for the bindcast. + lambda c1: collate(c1, "abc").like("qpr"), + '(q COLLATE abc) LIKE :param_1->BINDCAST->[TEXT COLLATE "abc"]', + ), + ( + lambda c2: collate(c2, "abc").like("qpr"), + '(q COLLATE abc) LIKE :param_1->BINDCAST->[TEXT COLLATE "abc"]', + ), + argnames="testcase,expected", + ) + @testing.variation("use_type_decorator", [True, False]) + def test_collate_type_interaction( + self, renders_bind_cast, testcase, expected, use_type_decorator + ): + """test #11576. + + This involves dialects that use the render_bind_cast feature only, + currently asycnpg and psycopg. However, the implementation of the + feature is mostly in Core, so a fixture dialect / compiler is used so + that the test is agnostic of those dialects. + + """ + + if use_type_decorator: + + class MyTextThing(TypeDecorator): + cache_ok = True + impl = Text + + c1 = Column("q", MyTextThing()) + c2 = Column("q", MyTextThing(collation="xyz")) + else: + c1 = Column("q", Text()) + c2 = Column("q", Text(collation="xyz")) + + expr = testing.resolve_lambda(testcase, c1=c1, c2=c2) + if use_type_decorator: + assert isinstance(expr.left.type, MyTextThing) + self.assert_compile(expr, expected, dialect=renders_bind_cast) + + # original types still work, have not been modified + eq_(c1.type.collation, None) + eq_(c2.type.collation, "xyz") + + self.assert_compile( + c1.like("qpr"), + "q LIKE :q_1->BINDCAST->[TEXT]", + dialect=renders_bind_cast, + ) + self.assert_compile( + c2.like("qpr"), + 'q LIKE :q_1->BINDCAST->[TEXT COLLATE "xyz"]', + dialect=renders_bind_cast, + ) + def test_bind_adapt(self, connection): # test an untyped bind gets the left side's type From 0d5e8272e27c2fcdb13d51495f8ea9f55acf0d6b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sun, 7 Jul 2024 11:56:56 +0200 Subject: [PATCH 255/544] Improve generated reflection in sqlite Fixed reflection of computed column in SQLite to properly account for complex expressions. Fixes: #11582 Change-Id: I8e9fdda3e47c04b376973ee245b3175374a08f56 (cherry picked from commit e67a0b77a82667e2199e333bae0606d143fa228e) --- doc/build/changelog/unreleased_14/11582.rst | 6 ++ lib/sqlalchemy/dialects/sqlite/base.py | 13 ++- test/dialect/test_sqlite.py | 98 +++++++++++++++++++++ 3 files changed, 116 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_14/11582.rst diff --git a/doc/build/changelog/unreleased_14/11582.rst b/doc/build/changelog/unreleased_14/11582.rst new file mode 100644 index 00000000000..935af9b2444 --- /dev/null +++ b/doc/build/changelog/unreleased_14/11582.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, reflection, sqlite + :tickets: 11582 + + Fixed reflection of computed column in SQLite to properly account + for complex expressions. diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 6db8214652a..8e3f7a560e0 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -2231,6 +2231,14 @@ def get_columns(self, connection, table_name, schema=None, **kw): tablesql = self._get_table_sql( connection, table_name, schema, **kw ) + # remove create table + match = re.match( + r"create table .*?\((.*)\)$", + tablesql.strip(), + re.DOTALL | re.IGNORECASE, + ) + assert match, f"create table not found in {tablesql}" + tablesql = match.group(1).strip() columns.append( self._get_column_info( @@ -2285,7 +2293,10 @@ def _get_column_info( if generated: sqltext = "" if tablesql: - pattern = r"[^,]*\s+AS\s+\(([^,]*)\)\s*(?:virtual|stored)?" + pattern = ( + r"[^,]*\s+GENERATED\s+ALWAYS\s+AS" + r"\s+\((.*)\)\s*(?:virtual|stored)?" + ) match = re.search( re.escape(name) + pattern, tablesql, re.IGNORECASE ) diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 07612480f2a..33be95e6d9e 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -53,6 +53,7 @@ from sqlalchemy.testing import expect_warnings from sqlalchemy.testing import fixtures from sqlalchemy.testing import is_ +from sqlalchemy.testing import is_true from sqlalchemy.testing import mock from sqlalchemy.types import Boolean from sqlalchemy.types import Date @@ -3566,3 +3567,100 @@ def test_get_temp_view_names(self, connection): eq_(res, ["sqlitetempview"]) finally: connection.exec_driver_sql("DROP VIEW sqlitetempview") + + +class ComputedReflectionTest(fixtures.TestBase): + __only_on__ = "sqlite" + __backend__ = True + + @classmethod + def setup_test_class(cls): + tables = [ + """CREATE TABLE test1 ( + s VARCHAR, + x VARCHAR GENERATED ALWAYS AS (s || 'x') + );""", + """CREATE TABLE test2 ( + s VARCHAR, + x VARCHAR GENERATED ALWAYS AS (s || 'x'), + y VARCHAR GENERATED ALWAYS AS (s || 'y') + );""", + """CREATE TABLE test3 ( + s VARCHAR, + x INTEGER GENERATED ALWAYS AS (INSTR(s, ",")) + );""", + """CREATE TABLE test4 ( + s VARCHAR, + x INTEGER GENERATED ALWAYS AS (INSTR(s, ",")), + y INTEGER GENERATED ALWAYS AS (INSTR(x, ",")));""", + """CREATE TABLE test5 ( + s VARCHAR, + x VARCHAR GENERATED ALWAYS AS (s || 'x') STORED + );""", + """CREATE TABLE test6 ( + s VARCHAR, + x VARCHAR GENERATED ALWAYS AS (s || 'x') STORED, + y VARCHAR GENERATED ALWAYS AS (s || 'y') STORED + );""", + """CREATE TABLE test7 ( + s VARCHAR, + x INTEGER GENERATED ALWAYS AS (INSTR(s, ",")) STORED + );""", + """CREATE TABLE test8 ( + s VARCHAR, + x INTEGER GENERATED ALWAYS AS (INSTR(s, ",")) STORED, + y INTEGER GENERATED ALWAYS AS (INSTR(x, ",")) STORED + );""", + ] + + with testing.db.begin() as conn: + for ct in tables: + conn.exec_driver_sql(ct) + + @classmethod + def teardown_test_class(cls): + with testing.db.begin() as conn: + for tn in cls.res: + conn.exec_driver_sql(f"DROP TABLE {tn}") + + res = { + "test1": {"x": {"text": "s || 'x'", "stored": False}}, + "test2": { + "x": {"text": "s || 'x'", "stored": False}, + "y": {"text": "s || 'y'", "stored": False}, + }, + "test3": {"x": {"text": 'INSTR(s, ",")', "stored": False}}, + "test4": { + "x": {"text": 'INSTR(s, ",")', "stored": False}, + "y": {"text": 'INSTR(x, ",")', "stored": False}, + }, + "test5": {"x": {"text": "s || 'x'", "stored": True}}, + "test6": { + "x": {"text": "s || 'x'", "stored": True}, + "y": {"text": "s || 'y'", "stored": True}, + }, + "test7": {"x": {"text": 'INSTR(s, ",")', "stored": True}}, + "test8": { + "x": {"text": 'INSTR(s, ",")', "stored": True}, + "y": {"text": 'INSTR(x, ",")', "stored": True}, + }, + } + + def test_reflection(self, connection): + meta = MetaData() + meta.reflect(connection) + eq_(len(meta.tables), len(self.res)) + for tbl in meta.tables.values(): + data = self.res[tbl.name] + seen = set() + for col in tbl.c: + if col.name not in data: + is_(col.computed, None) + else: + info = data[col.name] + seen.add(col.name) + msg = f"{tbl.name}-{col.name}" + is_true(bool(col.computed)) + eq_(col.computed.sqltext.text, info["text"], msg) + eq_(col.computed.persisted, info["stored"], msg) + eq_(seen, data.keys()) From 5519852de8db28e21892956906e2bbad53e2509a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 10 Jul 2024 10:32:44 -0400 Subject: [PATCH 256/544] include nulls_first, nulls_last in order_by_label_element Fixed bug where the :meth:`.Operators.nulls_first()` and :meth:`.Operators.nulls_last()` modifiers would not be treated the same way as :meth:`.Operators.desc()` and :meth:`.Operators.asc()` when determining if an ORDER BY should be against a label name already in the statement. All four modifiers are now treated the same within ORDER BY. Fixes: #11592 Change-Id: I1de1aff679c56af1abfdfd07f9bcbc45ecc5a8cc (cherry picked from commit 96f1172812f858fead45cdc7874abac76f45b339) --- doc/build/changelog/unreleased_20/11592.rst | 9 ++ lib/sqlalchemy/sql/elements.py | 2 +- lib/sqlalchemy/sql/operators.py | 6 ++ test/sql/test_compiler.py | 110 +++++++++++++------- 4 files changed, 87 insertions(+), 40 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11592.rst diff --git a/doc/build/changelog/unreleased_20/11592.rst b/doc/build/changelog/unreleased_20/11592.rst new file mode 100644 index 00000000000..616eb1e2865 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11592.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, sql + :tickets: 11592 + + Fixed bug where the :meth:`.Operators.nulls_first()` and + :meth:`.Operators.nulls_last()` modifiers would not be treated the same way + as :meth:`.Operators.desc()` and :meth:`.Operators.asc()` when determining + if an ORDER BY should be against a label name already in the statement. All + four modifiers are now treated the same within ORDER BY. diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 5277c814927..048a9ea6228 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -3689,7 +3689,7 @@ def _create_bitwise_not( @property def _order_by_label_element(self) -> Optional[Label[Any]]: - if self.modifier in (operators.desc_op, operators.asc_op): + if operators.is_order_by_modifier(self.modifier): return self.element._order_by_label_element else: return None diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 9fb096e470c..5939f124948 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -2474,6 +2474,12 @@ def is_associative(op: OperatorType) -> bool: return op in _associative +def is_order_by_modifier(op: Optional[OperatorType]) -> bool: + return op in _order_by_modifier + + +_order_by_modifier = {desc_op, asc_op, nulls_first_op, nulls_last_op} + _natural_self_precedent = _associative.union( [getitem, json_getitem_op, json_path_getitem_op] ) diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 9d9f69bdb9b..3e8fca59a88 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -44,6 +44,10 @@ from sqlalchemy import MetaData from sqlalchemy import not_ from sqlalchemy import null +from sqlalchemy import nulls_first +from sqlalchemy import nulls_last +from sqlalchemy import nullsfirst +from sqlalchemy import nullslast from sqlalchemy import Numeric from sqlalchemy import or_ from sqlalchemy import outerjoin @@ -1668,44 +1672,85 @@ def test_label_comparison_two(self): "foo || :param_1", ) - def test_order_by_labels_enabled(self): + def test_order_by_labels_enabled_negative_cases(self): + """test order_by_labels enabled but the cases where we expect + ORDER BY the expression without the label name""" + lab1 = (table1.c.myid + 12).label("foo") lab2 = func.somefunc(table1.c.name).label("bar") dialect = default.DefaultDialect() + # binary expressions render as the expression without labels self.assert_compile( - select(lab1, lab2).order_by(lab1, desc(lab2)), + select(lab1, lab2).order_by(lab1 + "test"), "SELECT mytable.myid + :myid_1 AS foo, " "somefunc(mytable.name) AS bar FROM mytable " - "ORDER BY foo, bar DESC", + "ORDER BY mytable.myid + :myid_1 + :param_1", dialect=dialect, ) - # the function embedded label renders as the function + # labels within functions in the columns clause render + # with the expression self.assert_compile( - select(lab1, lab2).order_by(func.hoho(lab1), desc(lab2)), + select(lab1, func.foo(lab1)).order_by(lab1, func.foo(lab1)), "SELECT mytable.myid + :myid_1 AS foo, " - "somefunc(mytable.name) AS bar FROM mytable " - "ORDER BY hoho(mytable.myid + :myid_1), bar DESC", + "foo(mytable.myid + :myid_1) AS foo_1 FROM mytable " + "ORDER BY foo, foo(mytable.myid + :myid_1)", dialect=dialect, ) - # binary expressions render as the expression without labels + # here, 'name' is implicitly available, but w/ #3882 we don't + # want to render a name that isn't specifically a Label elsewhere + # in the query self.assert_compile( - select(lab1, lab2).order_by(lab1 + "test"), + select(table1.c.myid).order_by(table1.c.name.label("name")), + "SELECT mytable.myid FROM mytable ORDER BY mytable.name", + ) + + # as well as if it doesn't match + self.assert_compile( + select(table1.c.myid).order_by( + func.lower(table1.c.name).label("name") + ), + "SELECT mytable.myid FROM mytable ORDER BY lower(mytable.name)", + ) + + @testing.combinations( + (desc, "DESC"), + (asc, "ASC"), + (nulls_first, "NULLS FIRST"), + (nulls_last, "NULLS LAST"), + (nullsfirst, "NULLS FIRST"), + (nullslast, "NULLS LAST"), + (lambda c: c.desc().nulls_last(), "DESC NULLS LAST"), + (lambda c: c.desc().nullslast(), "DESC NULLS LAST"), + (lambda c: c.nulls_first().asc(), "NULLS FIRST ASC"), + ) + def test_order_by_labels_enabled(self, operator, expected): + """test positive cases with order_by_labels enabled. this is + multipled out to all the ORDER BY modifier operators + (see #11592) + + + """ + lab1 = (table1.c.myid + 12).label("foo") + lab2 = func.somefunc(table1.c.name).label("bar") + dialect = default.DefaultDialect() + + self.assert_compile( + select(lab1, lab2).order_by(lab1, operator(lab2)), "SELECT mytable.myid + :myid_1 AS foo, " "somefunc(mytable.name) AS bar FROM mytable " - "ORDER BY mytable.myid + :myid_1 + :param_1", + f"ORDER BY foo, bar {expected}", dialect=dialect, ) - # labels within functions in the columns clause render - # with the expression + # the function embedded label renders as the function self.assert_compile( - select(lab1, func.foo(lab1)).order_by(lab1, func.foo(lab1)), + select(lab1, lab2).order_by(func.hoho(lab1), operator(lab2)), "SELECT mytable.myid + :myid_1 AS foo, " - "foo(mytable.myid + :myid_1) AS foo_1 FROM mytable " - "ORDER BY foo, foo(mytable.myid + :myid_1)", + "somefunc(mytable.name) AS bar FROM mytable " + f"ORDER BY hoho(mytable.myid + :myid_1), bar {expected}", dialect=dialect, ) @@ -1713,62 +1758,49 @@ def test_order_by_labels_enabled(self): ly = (func.lower(table1.c.name) + table1.c.description).label("ly") self.assert_compile( - select(lx, ly).order_by(lx, ly.desc()), + select(lx, ly).order_by(lx, operator(ly)), "SELECT mytable.myid + mytable.myid AS lx, " "lower(mytable.name) || mytable.description AS ly " - "FROM mytable ORDER BY lx, ly DESC", + f"FROM mytable ORDER BY lx, ly {expected}", dialect=dialect, ) # expression isn't actually the same thing (even though label is) self.assert_compile( select(lab1, lab2).order_by( - table1.c.myid.label("foo"), desc(table1.c.name.label("bar")) + table1.c.myid.label("foo"), + operator(table1.c.name.label("bar")), ), "SELECT mytable.myid + :myid_1 AS foo, " "somefunc(mytable.name) AS bar FROM mytable " - "ORDER BY mytable.myid, mytable.name DESC", + f"ORDER BY mytable.myid, mytable.name {expected}", dialect=dialect, ) # it's also an exact match, not aliased etc. self.assert_compile( select(lab1, lab2).order_by( - desc(table1.alias().c.name.label("bar")) + operator(table1.alias().c.name.label("bar")) ), "SELECT mytable.myid + :myid_1 AS foo, " "somefunc(mytable.name) AS bar FROM mytable " - "ORDER BY mytable_1.name DESC", + f"ORDER BY mytable_1.name {expected}", dialect=dialect, ) # but! it's based on lineage lab2_lineage = lab2.element._clone() self.assert_compile( - select(lab1, lab2).order_by(desc(lab2_lineage.label("bar"))), + select(lab1, lab2).order_by(operator(lab2_lineage.label("bar"))), "SELECT mytable.myid + :myid_1 AS foo, " "somefunc(mytable.name) AS bar FROM mytable " - "ORDER BY bar DESC", + f"ORDER BY bar {expected}", dialect=dialect, ) - # here, 'name' is implicitly available, but w/ #3882 we don't - # want to render a name that isn't specifically a Label elsewhere - # in the query - self.assert_compile( - select(table1.c.myid).order_by(table1.c.name.label("name")), - "SELECT mytable.myid FROM mytable ORDER BY mytable.name", - ) - - # as well as if it doesn't match - self.assert_compile( - select(table1.c.myid).order_by( - func.lower(table1.c.name).label("name") - ), - "SELECT mytable.myid FROM mytable ORDER BY lower(mytable.name)", - ) - def test_order_by_labels_disabled(self): + """test when the order_by_labels feature is disabled entirely""" + lab1 = (table1.c.myid + 12).label("foo") lab2 = func.somefunc(table1.c.name).label("bar") dialect = default.DefaultDialect() From 4cc32ae1f730af19c4b010537b5bb342f28a951d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 15 Jul 2024 09:15:36 -0400 Subject: [PATCH 257/544] remove redundant(?) Isolation / isolation References: https://github.com/sqlalchemy/sqlalchemy/discussions/11609 Change-Id: I8ada4b8ed64a6d6b9cb923503fda6d7b4888f429 (cherry picked from commit e44e805506fa71318e23a2bfad733fbbf5a9ee59) --- doc/build/glossary.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst index d6aaba83826..a7422bd97ba 100644 --- a/doc/build/glossary.rst +++ b/doc/build/glossary.rst @@ -1051,7 +1051,6 @@ Glossary isolation isolated - Isolation isolation level The isolation property of the :term:`ACID` model ensures that the concurrent execution From cdc5305dd346101e8157f7a47ae1e0de111f089f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 19 Jul 2024 23:05:43 -0400 Subject: [PATCH 258/544] add missing changelog for #11544 Fixes: #11544 Change-Id: Ibf57f6ee0fee105672b03c2bf6690cad6bb0932d (cherry picked from commit 800932af467109f06c0196c42ae86272a5d7f96a) --- doc/build/changelog/unreleased_14/11544.rst | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/build/changelog/unreleased_14/11544.rst diff --git a/doc/build/changelog/unreleased_14/11544.rst b/doc/build/changelog/unreleased_14/11544.rst new file mode 100644 index 00000000000..82639e54e84 --- /dev/null +++ b/doc/build/changelog/unreleased_14/11544.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, sql + :tickets: 11544 + + Fixed caching issue where the + :paramref:`_sql.Select.with_for_update.key_share` element of + :meth:`_sql.Select.with_for_update` was not considered as part of the cache + key, leading to incorrect caching if different variations of this parameter + were used with an otherwise identical statement. From 0345f0392d412fec2764fd51778fc8f42d9911f8 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 26 Jul 2024 23:28:57 -0400 Subject: [PATCH 259/544] fix test for oracledb 2.3.0 Separate from adding support for 2pc for oracledb, get the test suite to pass cleanly for all oracledb versions first Change-Id: I39ba91e81f2afe5e9610a65fdcc88025f207089b (cherry picked from commit a40aeef5c553212fa227b315c7da7d32fbef933c) --- test/orm/test_transaction.py | 8 ++++++-- test/profiles.txt | 14 -------------- test/requirements.py | 6 +++--- tox.ini | 4 ++-- 4 files changed, 11 insertions(+), 21 deletions(-) diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py index 47bcf69b571..faa311cc8a2 100644 --- a/test/orm/test_transaction.py +++ b/test/orm/test_transaction.py @@ -5,7 +5,6 @@ from typing import Optional from typing import TYPE_CHECKING -from sqlalchemy import Column from sqlalchemy import event from sqlalchemy import exc as sa_exc from sqlalchemy import func @@ -39,6 +38,7 @@ from sqlalchemy.testing import mock from sqlalchemy.testing.config import Variation from sqlalchemy.testing.fixtures import fixture_session +from sqlalchemy.testing.schema import Column from sqlalchemy.testing.util import gc_collect from test.orm._fixtures import FixtureTest @@ -2492,7 +2492,11 @@ def setup_test(self): self.metadata = MetaData() self.table = Table( - "t1", self.metadata, Column("id", Integer, primary_key=True) + "t1", + self.metadata, + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), ) with self.connection.begin(): self.table.create(self.connection, checkfirst=True) diff --git a/test/profiles.txt b/test/profiles.txt index 370d895b627..976949e7b73 100644 --- a/test/profiles.txt +++ b/test/profiles.txt @@ -358,8 +358,6 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 110 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 108 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 110 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 108 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 110 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 108 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 110 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 108 @@ -371,8 +369,6 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 9 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 8 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 9 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 8 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 9 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 8 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 9 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 8 @@ -384,8 +380,6 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_6 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 14671 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 2669 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 14676 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 3815 -test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 15822 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 2649 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 14656 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 2614 @@ -449,8 +443,6 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpy test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 5307 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 279 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 5281 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 1504 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 6506 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 299 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 5301 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 272 @@ -462,8 +454,6 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cp test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 5307 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 279 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 5281 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 1504 -test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 6506 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 299 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 5301 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 272 @@ -475,8 +465,6 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 5662 test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 660 test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 5667 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 1806 -test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 6813 test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 640 test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 5647 test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 605 @@ -488,8 +476,6 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpytho test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_mariadb_mysqldb_dbapiunicode_nocextensions 5662 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_cextensions 660 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_mssql_pyodbc_dbapiunicode_nocextensions 5667 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_cextensions 1806 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_oracle_oracledb_dbapiunicode_nocextensions 6813 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_cextensions 640 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_postgresql_psycopg2_dbapiunicode_nocextensions 5647 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.11_sqlite_pysqlite_dbapiunicode_cextensions 605 diff --git a/test/requirements.py b/test/requirements.py index 2e80884bc17..0dde542372a 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -883,7 +883,7 @@ def pg_prepared_transaction(config): ), ] ) - + self.fail_on_oracledb_thin + + self.skip_on_oracledb_thin ) @property @@ -1878,14 +1878,14 @@ def oracle5x(self): ) @property - def fail_on_oracledb_thin(self): + def skip_on_oracledb_thin(self): def go(config): if against(config, "oracle+oracledb"): with config.db.connect() as conn: return config.db.dialect.is_thin_mode(conn) return False - return fails_if(go) + return skip_if(go) @property def computed_columns(self): diff --git a/tox.ini b/tox.ini index a5b82c034b0..e80fe4ccec0 100644 --- a/tox.ini +++ b/tox.ini @@ -192,7 +192,7 @@ commands= deps= greenlet != 0.4.17 importlib_metadata; python_version < '3.8' - mypy >= 1.6.0 + mypy >= 1.6.0,<1.11.0 # temporary, REMOVE upper bound commands = mypy {env:MYPY_COLOR} ./lib/sqlalchemy # pyright changes too often with not-exactly-correct errors @@ -208,7 +208,7 @@ deps= pytest-xdist greenlet != 0.4.17 importlib_metadata; python_version < '3.8' - mypy >= 1.2.0 + mypy >= 1.2.0,<1.11.0 patch==1.* extras= {[greenletextras]extras} From 6ed8e6920ad9d29cd32b29ad237caf7b8d52bcf6 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 22 Jul 2024 23:17:45 +0200 Subject: [PATCH 260/544] update typing for mypy 1.11; pin plugin to <1.11 Fixed internal typing issues to establish compatibility with mypy 1.11.0. Note that this does not include issues which have arisen with the deprecated mypy plugin used by SQLAlchemy 1.4-style code; see the addiional change note for this plugin indicating revised compatibility. The legacy mypy plugin is no longer fully functional with the latest series of mypy 1.11.0, as changes in the mypy interpreter are no longer compatible with the approach used by the plugin. If code is dependent on the legacy mypy plugin with sqlalchemy2-stubs, it's recommended to pin mypy to be below the 1.11.0 series. Seek upgrading to the 2.0 series of SQLAlchemy and migrating to the modern type annotations. Change-Id: Ib8fef93ede588430dc0f7ed44ef887649a415821 (cherry picked from commit 156fef61135a55c6ad17765b64155801f1dbea66) --- .../changelog/unreleased_14/mypy1110.rst | 14 ++++ .../changelog/unreleased_20/mypy1110.rst | 7 ++ doc/build/orm/extensions/mypy.rst | 9 ++- lib/sqlalchemy/engine/interfaces.py | 11 +++- lib/sqlalchemy/ext/mypy/util.py | 21 +++++- lib/sqlalchemy/orm/descriptor_props.py | 4 +- lib/sqlalchemy/orm/mapped_collection.py | 27 ++++---- lib/sqlalchemy/orm/query.py | 2 +- lib/sqlalchemy/orm/util.py | 2 +- lib/sqlalchemy/sql/base.py | 2 +- lib/sqlalchemy/sql/coercions.py | 64 ++++++++++++------- lib/sqlalchemy/sql/compiler.py | 8 ++- lib/sqlalchemy/sql/crud.py | 2 +- lib/sqlalchemy/sql/elements.py | 2 +- lib/sqlalchemy/sql/sqltypes.py | 14 ++-- lib/sqlalchemy/util/compat.py | 2 +- lib/sqlalchemy/util/langhelpers.py | 8 +++ test/ext/mypy/test_mypy_plugin_py3k.py | 7 +- tox.ini | 3 +- 19 files changed, 146 insertions(+), 63 deletions(-) create mode 100644 doc/build/changelog/unreleased_14/mypy1110.rst create mode 100644 doc/build/changelog/unreleased_20/mypy1110.rst diff --git a/doc/build/changelog/unreleased_14/mypy1110.rst b/doc/build/changelog/unreleased_14/mypy1110.rst new file mode 100644 index 00000000000..1dc5e0dc3ec --- /dev/null +++ b/doc/build/changelog/unreleased_14/mypy1110.rst @@ -0,0 +1,14 @@ +.. change:: + :tags: bug, mypy + :versions: 2.0 + + The deprecated mypy plugin is no longer fully functional with the latest + series of mypy 1.11.0, as changes in the mypy interpreter are no longer + compatible with the approach used by the plugin. If code is dependent on + the mypy plugin with sqlalchemy2-stubs, it's recommended to pin mypy to be + below the 1.11.0 series. Seek upgrading to the 2.0 series of SQLAlchemy + and migrating to the modern type annotations. + + .. seealso:: + + :ref:`mypy_toplevel` diff --git a/doc/build/changelog/unreleased_20/mypy1110.rst b/doc/build/changelog/unreleased_20/mypy1110.rst new file mode 100644 index 00000000000..f722c407f25 --- /dev/null +++ b/doc/build/changelog/unreleased_20/mypy1110.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, mypy + + Fixed internal typing issues to establish compatibility with mypy 1.11.0. + Note that this does not include issues which have arisen with the + deprecated mypy plugin used by SQLAlchemy 1.4-style code; see the addiional + change note for this plugin indicating revised compatibility. diff --git a/doc/build/orm/extensions/mypy.rst b/doc/build/orm/extensions/mypy.rst index afd34929af6..dbca3f35f91 100644 --- a/doc/build/orm/extensions/mypy.rst +++ b/doc/build/orm/extensions/mypy.rst @@ -13,7 +13,8 @@ the :func:`_orm.mapped_column` construct introduced in SQLAlchemy 2.0. **The SQLAlchemy Mypy Plugin is DEPRECATED, and will be removed possibly as early as the SQLAlchemy 2.1 release. We would urge users to please - migrate away from it ASAP.** + migrate away from it ASAP. The mypy plugin also works only up until + mypy version 1.10.1. version 1.11.0 and greater may not work properly.** This plugin cannot be maintained across constantly changing releases of mypy and its stability going forward CANNOT be guaranteed. @@ -24,7 +25,11 @@ the :func:`_orm.mapped_column` construct introduced in SQLAlchemy 2.0. .. topic:: SQLAlchemy Mypy Plugin Status Update - **Updated July 2023** + **Updated July 2024** + + The mypy plugin is supported **only up until mypy 1.10.1, and it will have + issues running with 1.11.0 or greater**. Use with mypy 1.11.0 or greater + may have error conditions which currently cannot be resolved. For SQLAlchemy 2.0, the Mypy plugin continues to work at the level at which it reached in the SQLAlchemy 1.4 release. SQLAlchemy 2.0 however features diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 7a152c3305c..17a133f27a9 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -1253,8 +1253,7 @@ def import_dbapi(cls) -> ModuleType: """ raise NotImplementedError() - @classmethod - def type_descriptor(cls, typeobj: TypeEngine[_T]) -> TypeEngine[_T]: + def type_descriptor(self, typeobj: TypeEngine[_T]) -> TypeEngine[_T]: """Transform a generic type to a dialect-specific type. Dialect classes will usually use the @@ -1316,6 +1315,7 @@ def get_columns( def get_multi_columns( self, connection: Connection, + *, schema: Optional[str] = None, filter_names: Optional[Collection[str]] = None, **kw: Any, @@ -1364,6 +1364,7 @@ def get_pk_constraint( def get_multi_pk_constraint( self, connection: Connection, + *, schema: Optional[str] = None, filter_names: Optional[Collection[str]] = None, **kw: Any, @@ -1410,6 +1411,7 @@ def get_foreign_keys( def get_multi_foreign_keys( self, connection: Connection, + *, schema: Optional[str] = None, filter_names: Optional[Collection[str]] = None, **kw: Any, @@ -1569,6 +1571,7 @@ def get_indexes( def get_multi_indexes( self, connection: Connection, + *, schema: Optional[str] = None, filter_names: Optional[Collection[str]] = None, **kw: Any, @@ -1615,6 +1618,7 @@ def get_unique_constraints( def get_multi_unique_constraints( self, connection: Connection, + *, schema: Optional[str] = None, filter_names: Optional[Collection[str]] = None, **kw: Any, @@ -1662,6 +1666,7 @@ def get_check_constraints( def get_multi_check_constraints( self, connection: Connection, + *, schema: Optional[str] = None, filter_names: Optional[Collection[str]] = None, **kw: Any, @@ -1704,6 +1709,7 @@ def get_table_options( def get_multi_table_options( self, connection: Connection, + *, schema: Optional[str] = None, filter_names: Optional[Collection[str]] = None, **kw: Any, @@ -1755,6 +1761,7 @@ def get_table_comment( def get_multi_table_comment( self, connection: Connection, + *, schema: Optional[str] = None, filter_names: Optional[Collection[str]] = None, **kw: Any, diff --git a/lib/sqlalchemy/ext/mypy/util.py b/lib/sqlalchemy/ext/mypy/util.py index 7f04c481d34..af0882bc307 100644 --- a/lib/sqlalchemy/ext/mypy/util.py +++ b/lib/sqlalchemy/ext/mypy/util.py @@ -80,7 +80,7 @@ def serialize(self) -> JsonDict: "name": self.name, "line": self.line, "column": self.column, - "type": self.type.serialize(), + "type": serialize_type(self.type), } def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None: @@ -336,3 +336,22 @@ def info_for_cls( return sym.node return cls.info + + +def serialize_type(typ: Type) -> Union[str, JsonDict]: + try: + return typ.serialize() + except Exception: + pass + if hasattr(typ, "args"): + typ.args = tuple( + ( + a.resolve_string_annotation() + if hasattr(a, "resolve_string_annotation") + else a + ) + for a in typ.args + ) + elif hasattr(typ, "resolve_string_annotation"): + typ = typ.resolve_string_annotation() + return typ.serialize() diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index a3650f5f001..faf287cce6c 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -781,7 +781,9 @@ def _bulk_update_tuples( elif isinstance(self.prop.composite_class, type) and isinstance( value, self.prop.composite_class ): - values = self.prop._composite_values_from_instance(value) + values = self.prop._composite_values_from_instance( + value # type: ignore[arg-type] + ) else: raise sa_exc.ArgumentError( "Can't UPDATE composite attribute %s to %r" diff --git a/lib/sqlalchemy/orm/mapped_collection.py b/lib/sqlalchemy/orm/mapped_collection.py index 13c6b689e1d..0d3079fb5ab 100644 --- a/lib/sqlalchemy/orm/mapped_collection.py +++ b/lib/sqlalchemy/orm/mapped_collection.py @@ -29,6 +29,8 @@ from ..sql import coercions from ..sql import expression from ..sql import roles +from ..util.langhelpers import Missing +from ..util.langhelpers import MissingOr from ..util.typing import Literal if TYPE_CHECKING: @@ -40,8 +42,6 @@ _KT = TypeVar("_KT", bound=Any) _VT = TypeVar("_VT", bound=Any) -_F = TypeVar("_F", bound=Callable[[Any], Any]) - class _PlainColumnGetter(Generic[_KT]): """Plain column getter, stores collection of Column objects @@ -70,7 +70,7 @@ def __reduce__( def _cols(self, mapper: Mapper[_KT]) -> Sequence[ColumnElement[_KT]]: return self.cols - def __call__(self, value: _KT) -> Union[_KT, Tuple[_KT, ...]]: + def __call__(self, value: _KT) -> MissingOr[Union[_KT, Tuple[_KT, ...]]]: state = base.instance_state(value) m = base._state_mapper(state) @@ -83,7 +83,7 @@ def __call__(self, value: _KT) -> Union[_KT, Tuple[_KT, ...]]: else: obj = key[0] if obj is None: - return _UNMAPPED_AMBIGUOUS_NONE + return Missing else: return obj @@ -198,9 +198,6 @@ def column_keyed_dict( ) -_UNMAPPED_AMBIGUOUS_NONE = object() - - class _AttrGetter: __slots__ = ("attr_name", "getter") @@ -217,9 +214,9 @@ def __call__(self, mapped_object: Any) -> Any: dict_ = state.dict obj = dict_.get(self.attr_name, base.NO_VALUE) if obj is None: - return _UNMAPPED_AMBIGUOUS_NONE + return Missing else: - return _UNMAPPED_AMBIGUOUS_NONE + return Missing return obj @@ -277,7 +274,7 @@ def attribute_keyed_dict( def keyfunc_mapping( - keyfunc: _F, + keyfunc: Callable[[Any], Any], *, ignore_unpopulated_attribute: bool = False, ) -> Type[KeyFuncDict[_KT, Any]]: @@ -353,7 +350,7 @@ class KeyFuncDict(Dict[_KT, _VT]): def __init__( self, - keyfunc: _F, + keyfunc: Callable[[Any], Any], *dict_args: Any, ignore_unpopulated_attribute: bool = False, ) -> None: @@ -377,7 +374,7 @@ def __init__( @classmethod def _unreduce( cls, - keyfunc: _F, + keyfunc: Callable[[Any], Any], values: Dict[_KT, _KT], adapter: Optional[CollectionAdapter] = None, ) -> "KeyFuncDict[_KT, _KT]": @@ -464,7 +461,7 @@ def set( ) else: return - elif key is _UNMAPPED_AMBIGUOUS_NONE: + elif key is Missing: if not self.ignore_unpopulated_attribute: self._raise_for_unpopulated( value, _sa_initiator, warn_only=True @@ -492,7 +489,7 @@ def remove( value, _sa_initiator, warn_only=False ) return - elif key is _UNMAPPED_AMBIGUOUS_NONE: + elif key is Missing: if not self.ignore_unpopulated_attribute: self._raise_for_unpopulated( value, _sa_initiator, warn_only=True @@ -514,7 +511,7 @@ def remove( def _mapped_collection_cls( - keyfunc: _F, ignore_unpopulated_attribute: bool + keyfunc: Callable[[Any], Any], ignore_unpopulated_attribute: bool ) -> Type[KeyFuncDict[_KT, _KT]]: class _MKeyfuncMapped(KeyFuncDict[_KT, _KT]): def __init__(self, *dict_args: Any) -> None: diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 1dfc9cb3459..4f0b4891fd6 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -731,7 +731,7 @@ def label(self, name: Optional[str]) -> Label[Any]: ) @overload - def as_scalar( + def as_scalar( # type: ignore[overload-overlap] self: Query[Tuple[_MAYBE_ENTITY]], ) -> ScalarSelect[_MAYBE_ENTITY]: ... diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 9835f824470..0b4ad88ed8b 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1681,7 +1681,7 @@ def entity_namespace( c: ReadOnlyColumnCollection[str, KeyedColumnElement[Any]] """An alias for :attr:`.Bundle.columns`.""" - def _clone(self): + def _clone(self, **kw): cloned = self.__class__.__new__(self.__class__) cloned.__dict__.update(self.__dict__) return cloned diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 8ad17e2c1a4..e4a7256b5d8 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -2137,7 +2137,7 @@ def __eq__(self, other): l.append(c == local) return elements.and_(*l) - def __hash__(self): + def __hash__(self): # type: ignore[override] return hash(tuple(x for x in self)) diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 22d60915522..0c998c667f2 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -493,6 +493,7 @@ def _raise_for_expected( element: Any, argname: Optional[str] = None, resolved: Optional[Any] = None, + *, advice: Optional[str] = None, code: Optional[str] = None, err: Optional[Exception] = None, @@ -595,7 +596,7 @@ def _no_text_coercion( class _NoTextCoercion(RoleImpl): __slots__ = () - def _literal_coercion(self, element, argname=None, **kw): + def _literal_coercion(self, element, *, argname=None, **kw): if isinstance(element, str) and issubclass( elements.TextClause, self._role_class ): @@ -613,7 +614,7 @@ class _CoerceLiterals(RoleImpl): def _text_coercion(self, element, argname=None): return _no_text_coercion(element, argname) - def _literal_coercion(self, element, argname=None, **kw): + def _literal_coercion(self, element, *, argname=None, **kw): if isinstance(element, str): if self._coerce_star and element == "*": return elements.ColumnClause("*", is_literal=True) @@ -641,7 +642,8 @@ def _implicit_coercions( self, element, resolved, - argname, + argname=None, + *, type_=None, literal_execute=False, **kw, @@ -659,7 +661,7 @@ def _implicit_coercions( literal_execute=literal_execute, ) - def _literal_coercion(self, element, argname=None, type_=None, **kw): + def _literal_coercion(self, element, **kw): return element @@ -671,6 +673,7 @@ def _raise_for_expected( element: Any, argname: Optional[str] = None, resolved: Optional[Any] = None, + *, advice: Optional[str] = None, code: Optional[str] = None, err: Optional[Exception] = None, @@ -745,7 +748,7 @@ class ExpressionElementImpl(_ColumnCoercions, RoleImpl): __slots__ = () def _literal_coercion( - self, element, name=None, type_=None, argname=None, is_crud=False, **kw + self, element, *, name=None, type_=None, is_crud=False, **kw ): if ( element is None @@ -787,15 +790,22 @@ def _raise_for_expected(self, element, argname=None, resolved=None, **kw): class BinaryElementImpl(ExpressionElementImpl, RoleImpl): __slots__ = () - def _literal_coercion( - self, element, expr, operator, bindparam_type=None, argname=None, **kw + def _literal_coercion( # type: ignore[override] + self, + element, + *, + expr, + operator, + bindparam_type=None, + argname=None, + **kw, ): try: return expr._bind_param(operator, element, type_=bindparam_type) except exc.ArgumentError as err: self._raise_for_expected(element, err=err) - def _post_coercion(self, resolved, expr, bindparam_type=None, **kw): + def _post_coercion(self, resolved, *, expr, bindparam_type=None, **kw): if resolved.type._isnull and not expr.type._isnull: resolved = resolved._with_binary_element_type( bindparam_type if bindparam_type is not None else expr.type @@ -833,7 +843,9 @@ def _warn_for_implicit_coercion(self, elem): % (elem.__class__.__name__) ) - def _literal_coercion(self, element, expr, operator, **kw): + def _literal_coercion( # type: ignore[override] + self, element, *, expr, operator, **kw + ): if util.is_non_string_iterable(element): non_literal_expressions: Dict[ Optional[operators.ColumnOperators], @@ -867,7 +879,7 @@ def _literal_coercion(self, element, expr, operator, **kw): else: self._raise_for_expected(element, **kw) - def _post_coercion(self, element, expr, operator, **kw): + def _post_coercion(self, element, *, expr, operator, **kw): if element._is_select_base: # for IN, we are doing scalar_subquery() coercion without # a warning @@ -893,12 +905,10 @@ class OnClauseImpl(_ColumnCoercions, RoleImpl): _coerce_consts = True - def _literal_coercion( - self, element, name=None, type_=None, argname=None, is_crud=False, **kw - ): + def _literal_coercion(self, element, **kw): self._raise_for_expected(element) - def _post_coercion(self, resolved, original_element=None, **kw): + def _post_coercion(self, resolved, *, original_element=None, **kw): # this is a hack right now as we want to use coercion on an # ORM InstrumentedAttribute, but we want to return the object # itself if it is one, not its clause element. @@ -983,7 +993,7 @@ def _implicit_coercions( class DMLColumnImpl(_ReturnsStringKey, RoleImpl): __slots__ = () - def _post_coercion(self, element, as_key=False, **kw): + def _post_coercion(self, element, *, as_key=False, **kw): if as_key: return element.key else: @@ -993,7 +1003,7 @@ def _post_coercion(self, element, as_key=False, **kw): class ConstExprImpl(RoleImpl): __slots__ = () - def _literal_coercion(self, element, argname=None, **kw): + def _literal_coercion(self, element, *, argname=None, **kw): if element is None: return elements.Null() elif element is False: @@ -1019,7 +1029,7 @@ def _implicit_coercions( else: self._raise_for_expected(element, argname, resolved) - def _literal_coercion(self, element, argname=None, **kw): + def _literal_coercion(self, element, **kw): """coerce the given value to :class:`._truncated_label`. Existing :class:`._truncated_label` and @@ -1069,7 +1079,9 @@ def _implicit_coercions( else: self._raise_for_expected(element, argname, resolved) - def _literal_coercion(self, element, name, type_, **kw): + def _literal_coercion( # type: ignore[override] + self, element, *, name, type_, **kw + ): if element is None: return None else: @@ -1111,7 +1123,7 @@ class ColumnsClauseImpl(_SelectIsNotFrom, _CoerceLiterals, RoleImpl): _guess_straight_column = re.compile(r"^\w\S*$", re.I) def _raise_for_expected( - self, element, argname=None, resolved=None, advice=None, **kw + self, element, argname=None, resolved=None, *, advice=None, **kw ): if not advice and isinstance(element, list): advice = ( @@ -1149,7 +1161,9 @@ class ReturnsRowsImpl(RoleImpl): class StatementImpl(_CoerceLiterals, RoleImpl): __slots__ = () - def _post_coercion(self, resolved, original_element, argname=None, **kw): + def _post_coercion( + self, resolved, *, original_element, argname=None, **kw + ): if resolved is not original_element and not isinstance( original_element, str ): @@ -1215,7 +1229,7 @@ class JoinTargetImpl(RoleImpl): _skip_clauseelement_for_target_match = True - def _literal_coercion(self, element, argname=None, **kw): + def _literal_coercion(self, element, *, argname=None, **kw): self._raise_for_expected(element, argname) def _implicit_coercions( @@ -1223,6 +1237,7 @@ def _implicit_coercions( element: Any, resolved: Any, argname: Optional[str] = None, + *, legacy: bool = False, **kw: Any, ) -> Any: @@ -1256,6 +1271,7 @@ def _implicit_coercions( element: Any, resolved: Any, argname: Optional[str] = None, + *, explicit_subquery: bool = False, allow_select: bool = True, **kw: Any, @@ -1277,7 +1293,7 @@ def _implicit_coercions( else: self._raise_for_expected(element, argname, resolved) - def _post_coercion(self, element, deannotate=False, **kw): + def _post_coercion(self, element, *, deannotate=False, **kw): if deannotate: return element._deannotate() else: @@ -1292,7 +1308,7 @@ def _implicit_coercions( element: Any, resolved: Any, argname: Optional[str] = None, - explicit_subquery: bool = False, + *, allow_select: bool = False, **kw: Any, ) -> Any: @@ -1312,7 +1328,7 @@ def _implicit_coercions( class AnonymizedFromClauseImpl(StrictFromClauseImpl): __slots__ = () - def _post_coercion(self, element, flat=False, name=None, **kw): + def _post_coercion(self, element, *, flat=False, name=None, **kw): assert name is None return element._anonymous_fromclause(flat=flat) diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 6d6d8278af6..634e5ce118d 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -6489,8 +6489,10 @@ def visit_json_getitem_op_binary(self, binary, operator, **kw): def visit_json_path_getitem_op_binary(self, binary, operator, **kw): return self.visit_getitem_binary(binary, operator, **kw) - def visit_sequence(self, seq, **kw): - return "" % self.preparer.format_sequence(seq) + def visit_sequence(self, sequence, **kw): + return ( + f"" + ) def returning_clause( self, @@ -6524,7 +6526,7 @@ def delete_extra_from_clause( for t in extra_froms ) - def visit_empty_set_expr(self, type_, **kw): + def visit_empty_set_expr(self, element_types, **kw): return "SELECT 1 WHERE 1!=1" def get_from_hint_text(self, table, text): diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py index 499a19d97cc..d1426658239 100644 --- a/lib/sqlalchemy/sql/crud.py +++ b/lib/sqlalchemy/sql/crud.py @@ -1286,7 +1286,7 @@ def __init__(self, original, index): def compare(self, other, **kw): raise NotImplementedError() - def _copy_internals(self, other, **kw): + def _copy_internals(self, **kw): raise NotImplementedError() def __eq__(self, other): diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 048a9ea6228..70c9e01da57 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -3999,7 +3999,7 @@ def __init__(self, start, stop, step, _name=None): self.type = type_api.NULLTYPE def self_group(self, against: Optional[OperatorType] = None) -> Self: - assert against is operator.getitem # type: ignore[comparison-overlap] + assert against is operator.getitem return self diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index c20df07a6b8..f29131c933c 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -1011,7 +1011,7 @@ def __init__( if _adapted_from: self.dispatch = self.dispatch._join(_adapted_from.dispatch) - def _set_parent(self, column, **kw): + def _set_parent(self, parent, **kw): # set parent hook is when this type is associated with a column. # Column calls it for all SchemaEventTarget instances, either the # base type and/or variants in _variant_mapping. @@ -1025,7 +1025,7 @@ def _set_parent(self, column, **kw): # on_table/metadata_create/drop in this method, which is used by # "native" types with a separate CREATE/DROP e.g. Postgresql.ENUM - column._on_table_attach(util.portable_instancemethod(self._set_table)) + parent._on_table_attach(util.portable_instancemethod(self._set_table)) def _variant_mapping_for_set_table(self, column): if column.type._variant_mapping: @@ -1669,10 +1669,10 @@ def adapt_to_emulated(self, impltype, **kw): assert "_enums" in kw return impltype(**kw) - def adapt(self, impltype, **kw): + def adapt(self, cls, **kw): kw["_enums"] = self._enums_argument kw["_disable_warnings"] = True - return super().adapt(impltype, **kw) + return super().adapt(cls, **kw) def _should_create_constraint(self, compiler, **kw): if not self._is_impl_for_variant(compiler.dialect, kw): @@ -3065,13 +3065,13 @@ def python_type(self): def compare_values(self, x, y): return x == y - def _set_parent(self, column, outer=False, **kw): + def _set_parent(self, parent, outer=False, **kw): """Support SchemaEventTarget""" if not outer and isinstance(self.item_type, SchemaEventTarget): - self.item_type._set_parent(column, **kw) + self.item_type._set_parent(parent, **kw) - def _set_parent_with_dispatch(self, parent): + def _set_parent_with_dispatch(self, parent, **kw): """Support SchemaEventTarget""" super()._set_parent_with_dispatch(parent, outer=True) diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index e1b5e661433..fea881e730a 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -58,7 +58,7 @@ class FullArgSpec(typing.NamedTuple): varkw: Optional[str] defaults: Optional[Tuple[Any, ...]] kwonlyargs: List[str] - kwonlydefaults: Dict[str, Any] + kwonlydefaults: Optional[Dict[str, Any]] annotations: Dict[str, Any] diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 72cb28d1122..9312976e71f 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -2208,3 +2208,11 @@ def has_compiled_ext(raise_=False): ) else: return False + + +class _Missing(enum.Enum): + Missing = enum.auto() + + +Missing = _Missing.Missing +MissingOr = Union[_T, Literal[_Missing.Missing]] diff --git a/test/ext/mypy/test_mypy_plugin_py3k.py b/test/ext/mypy/test_mypy_plugin_py3k.py index f1b36ac52bb..e1aa1f96551 100644 --- a/test/ext/mypy/test_mypy_plugin_py3k.py +++ b/test/ext/mypy/test_mypy_plugin_py3k.py @@ -1,4 +1,5 @@ import os +import pathlib import shutil from sqlalchemy import testing @@ -25,8 +26,12 @@ def _incremental_dirs(): class MypyPluginTest(fixtures.MypyTest): @testing.combinations( - *[(pathname) for pathname in _incremental_dirs()], + *[ + (pathlib.Path(pathname).name, pathname) + for pathname in _incremental_dirs() + ], argnames="pathname", + id_="ia", ) @testing.requires.patch_library def test_incremental(self, mypy_runner, per_func_cachedir, pathname): diff --git a/tox.ini b/tox.ini index e80fe4ccec0..93b86dd5f75 100644 --- a/tox.ini +++ b/tox.ini @@ -192,7 +192,8 @@ commands= deps= greenlet != 0.4.17 importlib_metadata; python_version < '3.8' - mypy >= 1.6.0,<1.11.0 # temporary, REMOVE upper bound + mypy >= 1.6.0 + types-greenlet commands = mypy {env:MYPY_COLOR} ./lib/sqlalchemy # pyright changes too often with not-exactly-correct errors From d69b2a0a017bcf76ef3fdebf9b5af62714f73c3b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 19 Jul 2024 22:59:35 -0400 Subject: [PATCH 261/544] restore transfer of mapper.local_table to DML for some cases Fixed regression appearing in 2.0.21 caused by :ticket:`10279` where using a :func:`_sql.delete` or :func:`_sql.update` against an ORM class that is the base of an inheritance hierarchy, while also specifying that subclasses should be loaded polymorphically, would leak the polymorphic joins into the UPDATE or DELETE statement as well creating incorrect SQL. This re-introduces logic to set the `.table` of an ORM update or delete back to `mapper.local_table` that was removed in d18ccdc997185b74 by :ticket:`10279`; the logic is qualified to only take place for a statement that's directly against a mapper and not one that's against an aliased object. Fixes: #11625 Change-Id: Ia228c99809370733f111925554167e39bcd6be1d (cherry picked from commit e82660aba0b9ced0b3c65fd8fc4496e4e371fce0) --- doc/build/changelog/unreleased_20/11625.rst | 9 ++++ lib/sqlalchemy/orm/bulk_persistence.py | 6 +++ test/orm/dml/test_update_delete_where.py | 49 +++++++++++++++++++++ 3 files changed, 64 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11625.rst diff --git a/doc/build/changelog/unreleased_20/11625.rst b/doc/build/changelog/unreleased_20/11625.rst new file mode 100644 index 00000000000..c32a90ad822 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11625.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm, regression + :tickets: 11625 + + Fixed regression appearing in 2.0.21 caused by :ticket:`10279` where using + a :func:`_sql.delete` or :func:`_sql.update` against an ORM class that is + the base of an inheritance hierarchy, while also specifying that subclasses + should be loaded polymorphically, would leak the polymorphic joins into the + UPDATE or DELETE statement as well creating incorrect SQL. diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 2ed6a4beaac..ff85650436e 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -1446,6 +1446,9 @@ def _setup_for_orm_update(self, statement, compiler, **kw): new_stmt = statement._clone() + if new_stmt.table._annotations["parententity"] is mapper: + new_stmt.table = mapper.local_table + # note if the statement has _multi_values, these # are passed through to the new statement, which will then raise # InvalidRequestError because UPDATE doesn't support multi_values @@ -1865,6 +1868,9 @@ def create_for_statement(cls, statement, compiler, **kw): new_stmt = statement._clone() + if new_stmt.table._annotations["parententity"] is mapper: + new_stmt.table = mapper.local_table + new_crit = cls._adjust_for_extra_criteria( self.global_attributes, mapper ) diff --git a/test/orm/dml/test_update_delete_where.py b/test/orm/dml/test_update_delete_where.py index cbf27d018b7..6e5d29fe97b 100644 --- a/test/orm/dml/test_update_delete_where.py +++ b/test/orm/dml/test_update_delete_where.py @@ -36,6 +36,7 @@ from sqlalchemy.sql.selectable import Select from sqlalchemy.testing import assert_raises from sqlalchemy.testing import assert_raises_message +from sqlalchemy.testing import AssertsCompiledSQL from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_raises from sqlalchemy.testing import fixtures @@ -2964,6 +2965,54 @@ def test_update_from_multitable(self, synchronize_session): ) +class InheritWPolyTest(fixtures.TestBase, AssertsCompiledSQL): + __dialect__ = "default" + + @testing.fixture + def inherit_fixture(self, decl_base): + def go(poly_type): + + class Person(decl_base): + __tablename__ = "person" + id = Column(Integer, primary_key=True) + type = Column(String(50)) + name = Column(String(50)) + + if poly_type.wpoly: + __mapper_args__ = {"with_polymorphic": "*"} + + class Engineer(Person): + __tablename__ = "engineer" + id = Column(Integer, ForeignKey("person.id"), primary_key=True) + engineer_name = Column(String(50)) + + if poly_type.inline: + __mapper_args__ = {"polymorphic_load": "inline"} + + return Person, Engineer + + return go + + @testing.variation("poly_type", ["wpoly", "inline", "none"]) + def test_update_base_only(self, poly_type, inherit_fixture): + Person, Engineer = inherit_fixture(poly_type) + + self.assert_compile( + update(Person).values(name="n1"), "UPDATE person SET name=:name" + ) + + @testing.variation("poly_type", ["wpoly", "inline", "none"]) + def test_delete_base_only(self, poly_type, inherit_fixture): + Person, Engineer = inherit_fixture(poly_type) + + self.assert_compile(delete(Person), "DELETE FROM person") + + self.assert_compile( + delete(Person).where(Person.id == 7), + "DELETE FROM person WHERE person.id = :id_1", + ) + + class SingleTablePolymorphicTest(fixtures.DeclarativeMappedTest): __backend__ = True From de67550100143e8d885e02fcbdeb871a368c9b49 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 29 Jul 2024 11:33:58 -0400 Subject: [PATCH 262/544] backport 1.4 changelogs we have a few issues that were fixed only in 1.4, not yet released. backport the changelog so that release mechanics proceed without issue Change-Id: I376aa5c854314e86134c8f935b80d6c0dd083033 (cherry picked from commit 50fbde72d0e6efe0862f780f14a72eb916ea630c) --- doc/build/changelog/unreleased_14/11417.rst | 11 +++++++++++ doc/build/changelog/unreleased_14/11499.rst | 6 ++++++ 2 files changed, 17 insertions(+) create mode 100644 doc/build/changelog/unreleased_14/11417.rst create mode 100644 doc/build/changelog/unreleased_14/11499.rst diff --git a/doc/build/changelog/unreleased_14/11417.rst b/doc/build/changelog/unreleased_14/11417.rst new file mode 100644 index 00000000000..5182c03ea86 --- /dev/null +++ b/doc/build/changelog/unreleased_14/11417.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, general + :tickets: 11417 + + Set up full Python 3.13 support to the extent currently possible, repairing + issues within internal language helpers as well as the serializer extension + module. + + For version 1.4, this also modernizes the "extras" names in setup.cfg + to use dashes and not underscores for two-word names. Underscore names + are still present to accommodate potential compatibility issues. diff --git a/doc/build/changelog/unreleased_14/11499.rst b/doc/build/changelog/unreleased_14/11499.rst new file mode 100644 index 00000000000..e03062c1911 --- /dev/null +++ b/doc/build/changelog/unreleased_14/11499.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, engine + :tickets: 11499 + + Adjustments to the C extensions, which are specific to the SQLAlchemy 1.x + series, to work under Python 3.13. Pull request courtesy Ben Beasley. From 94a0ab891ce72bf6d8cec244f37b1a5933c8a0ef Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 29 Jul 2024 11:58:38 -0400 Subject: [PATCH 263/544] add missing forwards port versions I totally forgot these meaning they wont show up in 2.0 changelogs, so this is a big mistake i have to stop making Change-Id: I5f998eecbfa8aceab3ee247bb3a00e13820af872 (cherry picked from commit 1ba11863398153760952261adff08d544a508c3a) --- doc/build/changelog/unreleased_14/11417.rst | 1 + doc/build/changelog/unreleased_14/11471.rst | 1 + doc/build/changelog/unreleased_14/11514.rst | 1 + doc/build/changelog/unreleased_14/11544.rst | 1 + doc/build/changelog/unreleased_14/11562.rst | 1 + doc/build/changelog/unreleased_14/11582.rst | 1 + doc/build/changelog/unreleased_14/greenlet_compat.rst | 1 + doc/build/changelog/unreleased_14/mypy1110.rst | 2 +- 8 files changed, 8 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/unreleased_14/11417.rst b/doc/build/changelog/unreleased_14/11417.rst index 5182c03ea86..b37af43e3d3 100644 --- a/doc/build/changelog/unreleased_14/11417.rst +++ b/doc/build/changelog/unreleased_14/11417.rst @@ -1,6 +1,7 @@ .. change:: :tags: bug, general :tickets: 11417 + :versions: 2.0.31 Set up full Python 3.13 support to the extent currently possible, repairing issues within internal language helpers as well as the serializer extension diff --git a/doc/build/changelog/unreleased_14/11471.rst b/doc/build/changelog/unreleased_14/11471.rst index f669eabc789..47fda837575 100644 --- a/doc/build/changelog/unreleased_14/11471.rst +++ b/doc/build/changelog/unreleased_14/11471.rst @@ -1,6 +1,7 @@ .. change:: :tags: bug, sql :tickets: 11471 + :versions: 2.0.31 Fixed caching issue where using the :meth:`.TextualSelect.add_cte` method of the :class:`.TextualSelect` construct would not set a correct cache key diff --git a/doc/build/changelog/unreleased_14/11514.rst b/doc/build/changelog/unreleased_14/11514.rst index 81f0ddeddc0..145f87f4384 100644 --- a/doc/build/changelog/unreleased_14/11514.rst +++ b/doc/build/changelog/unreleased_14/11514.rst @@ -1,6 +1,7 @@ .. change:: :tags: bug, mssql :tickets: 11514 + :versions: 2.0.32 Fixed issue where SQL Server drivers don't support bound parameters when rendering the "frame specification" for a window function, e.g. "ROWS diff --git a/doc/build/changelog/unreleased_14/11544.rst b/doc/build/changelog/unreleased_14/11544.rst index 82639e54e84..6bc3b9705f4 100644 --- a/doc/build/changelog/unreleased_14/11544.rst +++ b/doc/build/changelog/unreleased_14/11544.rst @@ -1,6 +1,7 @@ .. change:: :tags: bug, sql :tickets: 11544 + :versions: 2.0 Fixed caching issue where the :paramref:`_sql.Select.with_for_update.key_share` element of diff --git a/doc/build/changelog/unreleased_14/11562.rst b/doc/build/changelog/unreleased_14/11562.rst index 15ccd0df6d2..beaad363351 100644 --- a/doc/build/changelog/unreleased_14/11562.rst +++ b/doc/build/changelog/unreleased_14/11562.rst @@ -1,6 +1,7 @@ .. change:: :tags: bug, orm, regression :tickets: 11562 + :versions: 2.0.32 Fixed regression going back to 1.4 where accessing a collection using the "dynamic" strategy on a transient object and attempting to query would diff --git a/doc/build/changelog/unreleased_14/11582.rst b/doc/build/changelog/unreleased_14/11582.rst index 935af9b2444..6a2009cbae4 100644 --- a/doc/build/changelog/unreleased_14/11582.rst +++ b/doc/build/changelog/unreleased_14/11582.rst @@ -1,6 +1,7 @@ .. change:: :tags: bug, reflection, sqlite :tickets: 11582 + :versions: 2.0.32 Fixed reflection of computed column in SQLite to properly account for complex expressions. diff --git a/doc/build/changelog/unreleased_14/greenlet_compat.rst b/doc/build/changelog/unreleased_14/greenlet_compat.rst index d9eb51cd9c0..95ce98113df 100644 --- a/doc/build/changelog/unreleased_14/greenlet_compat.rst +++ b/doc/build/changelog/unreleased_14/greenlet_compat.rst @@ -1,5 +1,6 @@ .. change:: :tags: usecase, engine + :versions: 2.0.31 Modified the internal representation used for adapting asyncio calls to greenlets to allow for duck-typed compatibility with third party libraries diff --git a/doc/build/changelog/unreleased_14/mypy1110.rst b/doc/build/changelog/unreleased_14/mypy1110.rst index 1dc5e0dc3ec..3f1fe05ce2d 100644 --- a/doc/build/changelog/unreleased_14/mypy1110.rst +++ b/doc/build/changelog/unreleased_14/mypy1110.rst @@ -1,6 +1,6 @@ .. change:: :tags: bug, mypy - :versions: 2.0 + :versions: 2.0.32 The deprecated mypy plugin is no longer fully functional with the latest series of mypy 1.11.0, as changes in the mypy interpreter are no longer From dd3ca86c14f10f8e132e7651938e8b77ae3f05cb Mon Sep 17 00:00:00 2001 From: Takashi Kajinami Date: Mon, 29 Jul 2024 12:01:04 -0400 Subject: [PATCH 264/544] Import all legacy classes by sqlalchemy.orm.collections.* Restored legacy class names removed from ``sqlalalchemy.orm.collections.*``, including :class:`_orm.MappedCollection`, :func:`_orm.mapped_collection`, :func:`_orm.column_mapped_collection`, :func:`_orm.attribute_mapped_collection`. Pull request courtesy Takashi Kajinami. Fixes: #11435 Closes: #11432 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11432 Pull-request-sha: 66f20a8e2069f48665299d1ee220dfe57aedf79e Change-Id: I05172669fc9a44e737b3714001d1317bbbf0012f (cherry picked from commit b6e9ca40bddbd6e670d40bc4ae952e1ee67d8816) --- doc/build/changelog/unreleased_20/11435.rst | 13 +++++++++++++ lib/sqlalchemy/orm/collections.py | 8 +++++--- 2 files changed, 18 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11435.rst diff --git a/doc/build/changelog/unreleased_20/11435.rst b/doc/build/changelog/unreleased_20/11435.rst new file mode 100644 index 00000000000..8e9ac23396d --- /dev/null +++ b/doc/build/changelog/unreleased_20/11435.rst @@ -0,0 +1,13 @@ +.. change:: + :tags: bug, general, regression + :tickets: 11435 + + Restored legacy class names removed from + ``sqlalalchemy.orm.collections.*``, including + :class:`_orm.MappedCollection`, :func:`_orm.mapped_collection`, + :func:`_orm.column_mapped_collection`, + :func:`_orm.attribute_mapped_collection`. Pull request courtesy Takashi + Kajinami. + + + . diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index 6fefd787a82..d713abb0e9d 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -148,10 +148,12 @@ def shift(self): "keyfunc_mapping", "column_keyed_dict", "attribute_keyed_dict", - "column_keyed_dict", - "attribute_keyed_dict", - "MappedCollection", "KeyFuncDict", + # old names in < 2.0 + "mapped_collection", + "column_mapped_collection", + "attribute_mapped_collection", + "MappedCollection", ] __instrumentation_mutex = threading.Lock() From 79e3e7b50361bb4d88452e0139c2431a4e031a9a Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 15 Jun 2024 00:06:46 +0200 Subject: [PATCH 265/544] add CTE cache elements for CompoundSelect, more verify tests Follow up of :ticket:`11471` to fix caching issue where using the :meth:`.CompoundSelectState.add_cte` method of the :class:`.CompoundSelectState` construct would not set a correct cache key which distinguished between different CTE expressions. Also added tests that would detect issues similar to the one fixed in :ticket:`11544`. Fixes: #11471 Change-Id: Iae6a91077c987d83cd70ea826daff42855491330 (cherry picked from commit 881be0a21633b3fee101cb34cc611904b8cba618) --- doc/build/changelog/unreleased_20/11471.rst | 9 + lib/sqlalchemy/sql/schema.py | 7 - lib/sqlalchemy/sql/selectable.py | 32 ++- test/sql/test_compare.py | 294 +++++++++++++++++++- 4 files changed, 308 insertions(+), 34 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11471.rst diff --git a/doc/build/changelog/unreleased_20/11471.rst b/doc/build/changelog/unreleased_20/11471.rst new file mode 100644 index 00000000000..4170de02985 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11471.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, sql + :tickets: 11471 + + Follow up of :ticket:`11471` to fix caching issue where using the + :meth:`.CompoundSelectState.add_cte` method of the + :class:`.CompoundSelectState` construct would not set a correct cache key + which distinguished between different CTE expressions. Also added tests + that would detect issues similar to the one fixed in :ticket:`11544`. diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index a54252b2fbd..97d123007bd 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -77,7 +77,6 @@ from .selectable import TableClause from .type_api import to_instance from .visitors import ExternallyTraversible -from .visitors import InternalTraversal from .. import event from .. import exc from .. import inspection @@ -101,7 +100,6 @@ from .elements import BindParameter from .functions import Function from .type_api import TypeEngine - from .visitors import _TraverseInternalsType from .visitors import anon_map from ..engine import Connection from ..engine import Engine @@ -394,11 +392,6 @@ def foreign_keys(self) -> Set[ForeignKey]: ... """ - _traverse_internals: _TraverseInternalsType = ( - TableClause._traverse_internals - + [("schema", InternalTraversal.dp_string)] - ) - if TYPE_CHECKING: @util.ro_non_memoized_property diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 93b23f15e24..cb0fd6f71cd 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -3681,7 +3681,7 @@ class SelectStatementGrouping(GroupedElement, SelectBase, Generic[_SB]): __visit_name__ = "select_statement_grouping" _traverse_internals: _TraverseInternalsType = [ ("element", InternalTraversal.dp_clauseelement) - ] + ] + SupportsCloneAnnotations._clone_annotations_traverse_internals _is_select_container = True @@ -3761,6 +3761,10 @@ def selected_columns(self) -> ColumnCollection[str, ColumnElement[Any]]: def _from_objects(self) -> List[FromClause]: return self.element._from_objects + def add_cte(self, *ctes: CTE, nest_here: bool = False) -> Self: + # SelectStatementGrouping not generative: has no attribute '_generate' + raise NotImplementedError + class GenerativeSelect(SelectBase, Generative): """Base class for SELECT statements where additional elements can be @@ -4306,17 +4310,21 @@ class CompoundSelect(HasCompileState, GenerativeSelect, ExecutableReturnsRows): __visit_name__ = "compound_select" - _traverse_internals: _TraverseInternalsType = [ - ("selects", InternalTraversal.dp_clauseelement_list), - ("_limit_clause", InternalTraversal.dp_clauseelement), - ("_offset_clause", InternalTraversal.dp_clauseelement), - ("_fetch_clause", InternalTraversal.dp_clauseelement), - ("_fetch_clause_options", InternalTraversal.dp_plain_dict), - ("_order_by_clauses", InternalTraversal.dp_clauseelement_list), - ("_group_by_clauses", InternalTraversal.dp_clauseelement_list), - ("_for_update_arg", InternalTraversal.dp_clauseelement), - ("keyword", InternalTraversal.dp_string), - ] + SupportsCloneAnnotations._clone_annotations_traverse_internals + _traverse_internals: _TraverseInternalsType = ( + [ + ("selects", InternalTraversal.dp_clauseelement_list), + ("_limit_clause", InternalTraversal.dp_clauseelement), + ("_offset_clause", InternalTraversal.dp_clauseelement), + ("_fetch_clause", InternalTraversal.dp_clauseelement), + ("_fetch_clause_options", InternalTraversal.dp_plain_dict), + ("_order_by_clauses", InternalTraversal.dp_clauseelement_list), + ("_group_by_clauses", InternalTraversal.dp_clauseelement_list), + ("_for_update_arg", InternalTraversal.dp_clauseelement), + ("keyword", InternalTraversal.dp_string), + ] + + SupportsCloneAnnotations._clone_annotations_traverse_internals + + HasCTE._has_ctes_traverse_internals + ) selects: List[SelectBase] diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index 2a7e41387bf..88ac3c315ed 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -1,4 +1,5 @@ import importlib +from inspect import signature import itertools import random @@ -35,7 +36,6 @@ from sqlalchemy.sql import bindparam from sqlalchemy.sql import ColumnElement from sqlalchemy.sql import dml -from sqlalchemy.sql import elements from sqlalchemy.sql import False_ from sqlalchemy.sql import func from sqlalchemy.sql import operators @@ -43,10 +43,11 @@ from sqlalchemy.sql import True_ from sqlalchemy.sql import type_coerce from sqlalchemy.sql import visitors +from sqlalchemy.sql.annotation import Annotated from sqlalchemy.sql.base import HasCacheKey +from sqlalchemy.sql.base import SingletonConstant from sqlalchemy.sql.elements import _label_reference from sqlalchemy.sql.elements import _textual_label_reference -from sqlalchemy.sql.elements import Annotated from sqlalchemy.sql.elements import BindParameter from sqlalchemy.sql.elements import ClauseElement from sqlalchemy.sql.elements import ClauseList @@ -62,10 +63,10 @@ from sqlalchemy.sql.lambdas import LambdaElement from sqlalchemy.sql.lambdas import LambdaOptions from sqlalchemy.sql.selectable import _OffsetLimitParam -from sqlalchemy.sql.selectable import AliasedReturnsRows from sqlalchemy.sql.selectable import FromGrouping from sqlalchemy.sql.selectable import LABEL_STYLE_NONE from sqlalchemy.sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL +from sqlalchemy.sql.selectable import NoInit from sqlalchemy.sql.selectable import Select from sqlalchemy.sql.selectable import Selectable from sqlalchemy.sql.selectable import SelectStatementGrouping @@ -214,6 +215,34 @@ class CoreFixtures: .columns(a=Integer()) .add_cte(table_b.select().where(table_b.c.a > 5).cte()), ), + lambda: ( + union( + select(table_a).where(table_a.c.a > 1), + select(table_a).where(table_a.c.a < 1), + ).add_cte(select(table_b).where(table_b.c.a > 1).cte("ttt")), + union( + select(table_a).where(table_a.c.a > 1), + select(table_a).where(table_a.c.a < 1), + ).add_cte(select(table_b).where(table_b.c.a < 1).cte("ttt")), + union( + select(table_a).where(table_a.c.a > 1), + select(table_a).where(table_a.c.a < 1), + ) + .add_cte(select(table_b).where(table_b.c.a > 1).cte("ttt")) + ._annotate({"foo": "bar"}), + ), + lambda: ( + union( + select(table_a).where(table_a.c.a > 1), + select(table_a).where(table_a.c.a < 1), + ).self_group(), + union( + select(table_a).where(table_a.c.a > 1), + select(table_a).where(table_a.c.a < 1), + ) + .self_group() + ._annotate({"foo": "bar"}), + ), lambda: ( literal(1).op("+")(literal(1)), literal(1).op("-")(literal(1)), @@ -1370,6 +1399,246 @@ def test_generative_cache_key_regen_w_del(self): is_not(ck3, None) +def all_hascachekey_subclasses(ignore_subclasses=()): + def find_subclasses(cls: type): + for s in class_hierarchy(cls): + if ( + # class_hierarchy may return values that + # aren't subclasses of cls + not issubclass(s, cls) + or "_traverse_internals" not in s.__dict__ + or any(issubclass(s, ignore) for ignore in ignore_subclasses) + ): + continue + yield s + + return dict.fromkeys(find_subclasses(HasCacheKey)) + + +class HasCacheKeySubclass(fixtures.TestBase): + custom_traverse = { + "AnnotatedFunctionAsBinary": { + "sql_function", + "left_index", + "right_index", + "modifiers", + "_annotations", + }, + "Annotatednext_value": {"sequence", "_annotations"}, + "FunctionAsBinary": { + "sql_function", + "left_index", + "right_index", + "modifiers", + }, + "next_value": {"sequence"}, + } + + ignore_keys = { + "AnnotatedColumn": {"dialect_options"}, + "SelectStatementGrouping": { + "_independent_ctes", + "_independent_ctes_opts", + }, + } + + @testing.combinations(*all_hascachekey_subclasses()) + def test_traverse_internals(self, cls: type): + super_traverse = {} + # ignore_super = self.ignore_super.get(cls.__name__, set()) + for s in cls.mro()[1:]: + # if s.__name__ in ignore_super: + # continue + if s.__name__ == "Executable": + continue + for attr in s.__dict__: + if not attr.endswith("_traverse_internals"): + continue + for k, v in s.__dict__[attr]: + if k not in super_traverse: + super_traverse[k] = v + traverse_dict = dict(cls.__dict__["_traverse_internals"]) + eq_(len(cls.__dict__["_traverse_internals"]), len(traverse_dict)) + if cls.__name__ in self.custom_traverse: + eq_(traverse_dict.keys(), self.custom_traverse[cls.__name__]) + else: + ignore = self.ignore_keys.get(cls.__name__, set()) + + left_keys = traverse_dict.keys() | ignore + is_true( + left_keys >= super_traverse.keys(), + f"{left_keys} >= {super_traverse.keys()} - missing: " + f"{super_traverse.keys() - left_keys} - ignored {ignore}", + ) + + subset = { + k: v for k, v in traverse_dict.items() if k in super_traverse + } + eq_( + subset, + {k: v for k, v in super_traverse.items() if k not in ignore}, + ) + + # name -> (traverse names, init args) + custom_init = { + "BinaryExpression": ( + {"right", "operator", "type", "negate", "modifiers", "left"}, + {"right", "operator", "type_", "negate", "modifiers", "left"}, + ), + "BindParameter": ( + {"literal_execute", "type", "callable", "value", "key"}, + {"required", "isoutparam", "literal_execute", "type_", "callable_"} + | {"unique", "expanding", "quote", "value", "key"}, + ), + "Cast": ({"type", "clause"}, {"type_", "expression"}), + "ClauseList": ( + {"clauses", "operator"}, + {"group_contents", "group", "operator", "clauses"}, + ), + "ColumnClause": ( + {"is_literal", "type", "table", "name"}, + {"type_", "is_literal", "text"}, + ), + "ExpressionClauseList": ( + {"clauses", "operator"}, + {"type_", "operator", "clauses"}, + ), + "FromStatement": ( + {"_raw_columns", "_with_options", "element"} + | {"_propagate_attrs", "_with_context_options"}, + {"element", "entities"}, + ), + "FunctionAsBinary": ( + {"modifiers", "sql_function", "right_index", "left_index"}, + {"right_index", "left_index", "fn"}, + ), + "FunctionElement": ( + {"clause_expr", "_table_value_type", "_with_ordinality"}, + {"clauses"}, + ), + "Function": ( + {"_table_value_type", "clause_expr", "_with_ordinality"} + | {"packagenames", "type", "name"}, + {"type_", "packagenames", "name", "clauses"}, + ), + "Label": ({"_element", "type", "name"}, {"type_", "element", "name"}), + "LambdaElement": ( + {"_resolved"}, + {"role", "opts", "apply_propagate_attrs", "fn"}, + ), + "Load": ( + {"propagate_to_loaders", "additional_source_entities"} + | {"path", "context"}, + {"entity"}, + ), + "LoaderCriteriaOption": ( + {"where_criteria", "entity", "propagate_to_loaders"} + | {"root_entity", "include_aliases"}, + {"where_criteria", "include_aliases", "propagate_to_loaders"} + | {"entity_or_base", "loader_only", "track_closure_variables"}, + ), + "NullLambdaStatement": ({"_resolved"}, {"statement"}), + "ScalarFunctionColumn": ( + {"type", "fn", "name"}, + {"type_", "name", "fn"}, + ), + "ScalarValues": ( + {"_data", "_column_args", "literal_binds"}, + {"columns", "data", "literal_binds"}, + ), + "Select": ( + { + "_having_criteria", + "_distinct", + "_group_by_clauses", + "_fetch_clause", + "_limit_clause", + "_label_style", + "_order_by_clauses", + "_raw_columns", + "_correlate_except", + "_statement_hints", + "_hints", + "_independent_ctes", + "_distinct_on", + "_with_context_options", + "_setup_joins", + "_suffixes", + "_memoized_select_entities", + "_for_update_arg", + "_prefixes", + "_propagate_attrs", + "_with_options", + "_independent_ctes_opts", + "_offset_clause", + "_correlate", + "_where_criteria", + "_annotations", + "_fetch_clause_options", + "_from_obj", + }, + {"entities"}, + ), + "TableValuedColumn": ( + {"scalar_alias", "type", "name"}, + {"type_", "scalar_alias"}, + ), + "TableValueType": ({"_elements"}, {"elements"}), + "TextualSelect": ( + {"column_args", "_annotations", "_independent_ctes"} + | {"element", "_independent_ctes_opts"}, + {"positional", "columns", "text"}, + ), + "Tuple": ({"clauses", "operator"}, {"clauses", "types"}), + "TypeClause": ({"type"}, {"type_"}), + "TypeCoerce": ({"type", "clause"}, {"type_", "expression"}), + "UnaryExpression": ( + {"modifier", "element", "operator"}, + {"operator", "wraps_column_expression"} + | {"type_", "modifier", "element"}, + ), + "Values": ( + {"_column_args", "literal_binds", "name", "_data"}, + {"columns", "name", "literal_binds"}, + ), + "_FrameClause": ( + {"upper_integer_bind", "upper_type"} + | {"lower_type", "lower_integer_bind"}, + {"range_"}, + ), + "_MemoizedSelectEntities": ( + {"_with_options", "_raw_columns", "_setup_joins"}, + {"args"}, + ), + "next_value": ({"sequence"}, {"seq"}), + } + + @testing.combinations( + *all_hascachekey_subclasses( + ignore_subclasses=[Annotated, NoInit, SingletonConstant] + ) + ) + def test_init_args_in_traversal(self, cls: type): + sig = signature(cls.__init__) + init_args = set() + for p in sig.parameters.values(): + if ( + p.name == "self" + or p.name.startswith("_") + or p.kind in (p.VAR_KEYWORD,) + ): + continue + init_args.add(p.name) + + names = {n for n, _ in cls.__dict__["_traverse_internals"]} + if cls.__name__ in self.custom_init: + traverse, inits = self.custom_init[cls.__name__] + eq_(names, traverse) + eq_(init_args, inits) + else: + is_true(names.issuperset(init_args), f"{names} : {init_args}") + + class CompareAndCopyTest(CoreFixtures, fixtures.TestBase): @classmethod def setup_test_class(cls): @@ -1385,21 +1654,16 @@ def test_all_present(self): also included in the fixtures above. """ - need = { + need = set( cls - for cls in class_hierarchy(ClauseElement) - if issubclass(cls, (ColumnElement, Selectable, LambdaElement)) - and ( - "__init__" in cls.__dict__ - or issubclass(cls, AliasedReturnsRows) + for cls in all_hascachekey_subclasses( + ignore_subclasses=[Annotated, NoInit, SingletonConstant] ) - and not issubclass(cls, (Annotated, elements._OverrideBinds)) - and cls.__module__.startswith("sqlalchemy.") - and "orm" not in cls.__module__ + if "orm" not in cls.__module__ and "compiler" not in cls.__module__ - and "crud" not in cls.__module__ - and "dialects" not in cls.__module__ # TODO: dialects? - }.difference({ColumnElement, UnaryExpression}) + and "dialects" not in cls.__module__ + and issubclass(cls, (ColumnElement, Selectable, LambdaElement)) + ) for fixture in self.fixtures + self.dont_compare_values_fixtures: case_a = fixture() From 72c9889a9cc8019bb95419c62969ebc7897f3ba5 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 29 Jul 2024 23:52:04 +0200 Subject: [PATCH 266/544] Fixed compilation of bitwise operators on oracle and sqlite. Implemented bitwise operators for Oracle which was previously non-functional due to a non-standard syntax used by this database. Oracle's support for bitwise "or" and "xor" starts with server version 21. Additionally repaired the implementation of "xor" for SQLite. As part of this change, the dialect compliance test suite has been enhanced to include support for server-side bitwise tests; third party dialect authors should refer to new "supports_bitwise" methods in the requirements.py file to enable these tests. Fixes: #11663 Change-Id: I41040bd67992b6c89ed3592edca8965d5d59be9e (cherry picked from commit ce60f93a68f312c7401802820dd17f5d91f73a2c) --- doc/build/changelog/unreleased_20/11663.rst | 16 ++++++ lib/sqlalchemy/dialects/oracle/base.py | 25 +++++++++ lib/sqlalchemy/dialects/sqlite/base.py | 7 +++ lib/sqlalchemy/testing/requirements.py | 25 +++++++++ lib/sqlalchemy/testing/suite/test_select.py | 60 +++++++++++++++++++++ test/requirements.py | 25 +++++++++ 6 files changed, 158 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11663.rst diff --git a/doc/build/changelog/unreleased_20/11663.rst b/doc/build/changelog/unreleased_20/11663.rst new file mode 100644 index 00000000000..599cd744bf7 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11663.rst @@ -0,0 +1,16 @@ +.. change:: + :tags: bug, oracle, sqlite + :tickets: 11663 + + Implemented bitwise operators for Oracle which was previously + non-functional due to a non-standard syntax used by this database. + Oracle's support for bitwise "or" and "xor" starts with server version 21. + Additionally repaired the implementation of "xor" for SQLite. + + As part of this change, the dialect compliance test suite has been enhanced + to include support for server-side bitwise tests; third party dialect + authors should refer to new "supports_bitwise" methods in the + requirements.py file to enable these tests. + + + diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index a92bb5f844c..94cadd0c2ea 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -1244,6 +1244,31 @@ def visit_regexp_replace_op_binary(self, binary, operator, **kw): def visit_aggregate_strings_func(self, fn, **kw): return "LISTAGG%s" % self.function_argspec(fn, **kw) + def _visit_bitwise(self, binary, fn_name, custom_right=None, **kw): + left = self.process(binary.left, **kw) + right = self.process( + custom_right if custom_right is not None else binary.right, **kw + ) + return f"{fn_name}({left}, {right})" + + def visit_bitwise_xor_op_binary(self, binary, operator, **kw): + return self._visit_bitwise(binary, "BITXOR", **kw) + + def visit_bitwise_or_op_binary(self, binary, operator, **kw): + return self._visit_bitwise(binary, "BITOR", **kw) + + def visit_bitwise_and_op_binary(self, binary, operator, **kw): + return self._visit_bitwise(binary, "BITAND", **kw) + + def visit_bitwise_rshift_op_binary(self, binary, operator, **kw): + raise exc.CompileError("Cannot compile bitwise_rshift in oracle") + + def visit_bitwise_lshift_op_binary(self, binary, operator, **kw): + raise exc.CompileError("Cannot compile bitwise_lshift in oracle") + + def visit_bitwise_not_op_unary_operator(self, element, operator, **kw): + raise exc.CompileError("Cannot compile bitwise_not in oracle") + class OracleDDLCompiler(compiler.DDLCompiler): def define_constraint_cascades(self, constraint): diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 8e3f7a560e0..04e84a68d2e 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1528,6 +1528,13 @@ def visit_on_conflict_do_update(self, on_conflict, **kw): return "ON CONFLICT %s DO UPDATE SET %s" % (target_text, action_text) + def visit_bitwise_xor_op_binary(self, binary, operator, **kw): + # sqlite has no xor. Use "a XOR b" = "(a | b) - (a & b)". + kw["eager_grouping"] = True + or_ = self._generate_generic_binary(binary, " | ", **kw) + and_ = self._generate_generic_binary(binary, " & ", **kw) + return f"({or_} - {and_})" + class SQLiteDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 31aac741d48..273e5acab91 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1781,3 +1781,28 @@ def materialized_views(self): def materialized_views_reflect_pk(self): """Target database reflect MATERIALIZED VIEWs pks.""" return exclusions.closed() + + @property + def supports_bitwise_or(self): + """Target database supports bitwise or""" + return exclusions.closed() + + @property + def supports_bitwise_and(self): + """Target database supports bitwise and""" + return exclusions.closed() + + @property + def supports_bitwise_not(self): + """Target database supports bitwise not""" + return exclusions.closed() + + @property + def supports_bitwise_xor(self): + """Target database supports bitwise xor""" + return exclusions.closed() + + @property + def supports_bitwise_shift(self): + """Target database supports bitwise left or right shift""" + return exclusions.closed() diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index c31613fcf58..8e1ae79b220 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -1937,3 +1937,63 @@ def test_window_rows_between(self, connection): ).all() eq_(rows, [(i,) for i in range(5, 250, 5)]) + + +class BitwiseTest(fixtures.TablesTest): + __backend__ = True + run_inserts = run_deletes = "once" + + inserted_data = [{"a": i, "b": i + 1} for i in range(10)] + + @classmethod + def define_tables(cls, metadata): + Table("bitwise", metadata, Column("a", Integer), Column("b", Integer)) + + @classmethod + def insert_data(cls, connection): + connection.execute(cls.tables.bitwise.insert(), cls.inserted_data) + + @testing.combinations( + ( + lambda a: a.bitwise_xor(5), + [i for i in range(10) if i != 5], + testing.requires.supports_bitwise_xor, + ), + ( + lambda a: a.bitwise_or(1), + list(range(10)), + testing.requires.supports_bitwise_or, + ), + ( + lambda a: a.bitwise_and(4), + list(range(4, 8)), + testing.requires.supports_bitwise_and, + ), + ( + lambda a: (a - 2).bitwise_not(), + [0], + testing.requires.supports_bitwise_not, + ), + ( + lambda a: a.bitwise_lshift(1), + list(range(1, 10)), + testing.requires.supports_bitwise_shift, + ), + ( + lambda a: a.bitwise_rshift(2), + list(range(4, 10)), + testing.requires.supports_bitwise_shift, + ), + argnames="case, expected", + ) + def test_bitwise(self, case, expected, connection): + tbl = self.tables.bitwise + + a = tbl.c.a + + op = testing.resolve_lambda(case, a=a) + + stmt = select(tbl).where(op > 0).order_by(a) + + res = connection.execute(stmt).mappings().all() + eq_(res, [self.inserted_data[i] for i in expected]) diff --git a/test/requirements.py b/test/requirements.py index 0dde542372a..1b6b43ed27e 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -2075,3 +2075,28 @@ def rowcount_always_cached_on_insert(self): statement. """ return only_on(["mssql"]) + + @property + def supports_bitwise_and(self): + """Target database supports bitwise and""" + return exclusions.open() + + @property + def supports_bitwise_or(self): + """Target database supports bitwise or""" + return fails_on(["oracle<21"]) + + @property + def supports_bitwise_not(self): + """Target database supports bitwise not""" + return fails_on(["oracle", "mysql", "mariadb"]) + + @property + def supports_bitwise_xor(self): + """Target database supports bitwise xor""" + return fails_on(["oracle<21"]) + + @property + def supports_bitwise_shift(self): + """Target database supports bitwise left or right shift""" + return fails_on(["oracle"]) From c698875c7aa419734b4bf8803d0b9149ea6edea4 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 10 Jun 2024 21:20:56 +0200 Subject: [PATCH 267/544] Add support for two-phase commit in oracledb. Implemented two-phase transactions for the oracledb dialect. Historically, this feature never worked with the cx_Oracle dialect, however recent improvements to the oracledb successor now allow this to be possible. The two phase transaction API is available at the Core level via the :meth:`_engine.Connection.begin_twophase` method. As part of this change, added new facility for testing that allows a test to skip if a certain step takes too long, allowing for a separate cleanup step. this is needed as oracle tpc wont allow commit recovery if transaction is older than about 1 second, could not find any docs on how to increase this timeout. Fixed an execute call in the PostgreSQL dialect's provisioning that drops old tpc transactions which was non-working, which indicates that we've apparently never had any PG tpc transactions needing to be cleaned up in CI for some years now, so that's good Fixes: #11480 Change-Id: If3ad19cc29999e70f07f767b88afd330f6e5a4be (cherry picked from commit a9c0487c024410d446b8be3f528e051318dd150e) --- doc/build/changelog/unreleased_20/11480.rst | 9 +++ lib/sqlalchemy/dialects/oracle/base.py | 3 +- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 19 ++--- lib/sqlalchemy/dialects/oracle/oracledb.py | 76 ++++++++++++++++++- .../dialects/postgresql/provision.py | 2 +- lib/sqlalchemy/testing/__init__.py | 1 + lib/sqlalchemy/testing/util.py | 18 +++++ test/engine/test_transaction.py | 37 ++++++--- test/requirements.py | 59 ++++++-------- 9 files changed, 157 insertions(+), 67 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11480.rst diff --git a/doc/build/changelog/unreleased_20/11480.rst b/doc/build/changelog/unreleased_20/11480.rst new file mode 100644 index 00000000000..7a653a6b69f --- /dev/null +++ b/doc/build/changelog/unreleased_20/11480.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: usecase, oracle + :tickets: 11480 + + Implemented two-phase transactions for the oracledb dialect. Historically, + this feature never worked with the cx_Oracle dialect, however recent + improvements to the oracledb successor now allow this to be possible. The + two phase transaction API is available at the Core level via the + :meth:`_engine.Connection.begin_twophase` method. diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index a92bb5f844c..db0811f1cea 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -326,7 +326,6 @@ on parity with other backends. - ON UPDATE CASCADE ----------------- @@ -467,7 +466,7 @@ .. _oracle_table_options: Oracle Table Options -------------------------- +-------------------- The CREATE TABLE phrase supports the following options with Oracle in conjunction with the :class:`_schema.Table` construct: diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 93462246647..873d943371d 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -377,14 +377,12 @@ def _remove_clob(inputsizes, cursor, statement, parameters, context): ``auto_convert_lobs=False`` may be passed to :func:`_sa.create_engine`, which takes place only engine-wide. -Two Phase Transactions Not Supported -------------------------------------- +Two Phase Transactions Not Supported (use oracledb) +--------------------------------------------------- -Two phase transactions are **not supported** under cx_Oracle due to poor -driver support. As of cx_Oracle 6.0b1, the interface for -two phase transactions has been changed to be more of a direct pass-through -to the underlying OCI layer with less automation. The additional logic -to support this system is not implemented in SQLAlchemy. +Two phase transactions are **not supported** under cx_Oracle due to poor driver +support. The newer :ref:`oracledb` dialect however **does** support two phase +transactions and should be preferred. .. _cx_oracle_numeric: @@ -1423,13 +1421,6 @@ def is_disconnect(self, e, connection, cursor): return False def create_xid(self): - """create a two-phase transaction ID. - - this id will be passed to do_begin_twophase(), do_rollback_twophase(), - do_commit_twophase(). its format is unspecified. - - """ - id_ = random.randint(0, 2**128) return (0x1234, "%032x" % id_, "%032x" % 9) diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index 9cdec3b55ae..1f5a19b8761 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -13,6 +13,9 @@ :connectstring: oracle+oracledb://user:pass@hostname:port[/dbname][?service_name=[&key=value&key=value...]] :url: https://oracle.github.io/python-oracledb/ +Description +----------- + python-oracledb is released by Oracle to supersede the cx_Oracle driver. It is fully compatible with cx_Oracle and features both a "thin" client mode that requires no dependencies, as well as a "thick" mode that uses @@ -21,7 +24,7 @@ .. seealso:: :ref:`cx_oracle` - all of cx_Oracle's notes apply to the oracledb driver - as well. + as well, with the exception that oracledb supports two phase transactions. The SQLAlchemy ``oracledb`` dialect provides both a sync and an async implementation under the same dialect name. The proper version is @@ -70,6 +73,16 @@ https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.init_oracle_client +Two Phase Transactions Supported +-------------------------------- + +Two phase transactions are fully supported under oracledb. Starting with +oracledb 2.3 two phase transactions are supported also in thin mode. APIs +for two phase transactions are provided at the Core level via +:meth:`_engine.Connection.begin_twophase` and :paramref:`_orm.Session.twophase` +for transparent ORM use. + +.. versionchanged:: 2.0.32 added support for two phase transactions .. versionadded:: 2.0.0 added support for oracledb driver. @@ -155,6 +168,49 @@ def _load_version(self, dbapi_module): f"oracledb version {self._min_version} and above are supported" ) + def do_begin_twophase(self, connection, xid): + conn_xis = connection.connection.xid(*xid) + connection.connection.tpc_begin(conn_xis) + connection.connection.info["oracledb_xid"] = conn_xis + + def do_prepare_twophase(self, connection, xid): + should_commit = connection.connection.tpc_prepare() + connection.info["oracledb_should_commit"] = should_commit + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + if recover: + conn_xid = connection.connection.xid(*xid) + else: + conn_xid = None + connection.connection.tpc_rollback(conn_xid) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False + ): + conn_xid = None + if not is_prepared: + should_commit = connection.connection.tpc_prepare() + elif recover: + conn_xid = connection.connection.xid(*xid) + should_commit = True + else: + should_commit = connection.info["oracledb_should_commit"] + if should_commit: + connection.connection.tpc_commit(conn_xid) + + def do_recover_twophase(self, connection): + return [ + # oracledb seems to return bytes + ( + fi, + gti.decode() if isinstance(gti, bytes) else gti, + bq.decode() if isinstance(bq, bytes) else bq, + ) + for fi, gti, bq in connection.connection.tpc_recover() + ] + class AsyncAdapt_oracledb_cursor(AsyncAdapt_dbapi_cursor): _cursor: AsyncCursor @@ -251,6 +307,24 @@ def stmtcachesize(self, value): def cursor(self): return AsyncAdapt_oracledb_cursor(self) + def xid(self, *args: Any, **kwargs: Any) -> Any: + return self._connection.xid(*args, **kwargs) + + def tpc_begin(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_begin(*args, **kwargs)) + + def tpc_commit(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_commit(*args, **kwargs)) + + def tpc_prepare(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_prepare(*args, **kwargs)) + + def tpc_recover(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_recover(*args, **kwargs)) + + def tpc_rollback(self, *args: Any, **kwargs: Any) -> Any: + return self.await_(self._connection.tpc_rollback(*args, **kwargs)) + class AsyncAdaptFallback_oracledb_connection( AsyncAdaptFallback_dbapi_connection, AsyncAdapt_oracledb_connection diff --git a/lib/sqlalchemy/dialects/postgresql/provision.py b/lib/sqlalchemy/dialects/postgresql/provision.py index a87bb932066..38573c77ad6 100644 --- a/lib/sqlalchemy/dialects/postgresql/provision.py +++ b/lib/sqlalchemy/dialects/postgresql/provision.py @@ -97,7 +97,7 @@ def drop_all_schema_objects_pre_tables(cfg, eng): for xid in conn.exec_driver_sql( "select gid from pg_prepared_xacts" ).scalars(): - conn.execute("ROLLBACK PREPARED '%s'" % xid) + conn.exec_driver_sql("ROLLBACK PREPARED '%s'" % xid) @drop_all_schema_objects_post_tables.for_db("postgresql") diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py index d3a6f32c716..7fa361c9b92 100644 --- a/lib/sqlalchemy/testing/__init__.py +++ b/lib/sqlalchemy/testing/__init__.py @@ -83,6 +83,7 @@ from .util import resolve_lambda from .util import rowset from .util import run_as_contextmanager +from .util import skip_if_timeout from .util import teardown_events from .warnings import assert_warnings from .warnings import warn_test_suite diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py index a6ce6ca3cc2..f6fad11d0e2 100644 --- a/lib/sqlalchemy/testing/util.py +++ b/lib/sqlalchemy/testing/util.py @@ -10,13 +10,16 @@ from __future__ import annotations from collections import deque +import contextlib import decimal import gc from itertools import chain import random import sys from sys import getsizeof +import time import types +from typing import Any from . import config from . import mock @@ -517,3 +520,18 @@ def count_cache_key_tuples(tup): if elem: stack = list(elem) + [sentinel] + stack return num_elements + + +@contextlib.contextmanager +def skip_if_timeout(seconds: float, cleanup: Any = None): + + now = time.time() + yield + sec = time.time() - now + if sec > seconds: + try: + cleanup() + finally: + config.skip_test( + f"test took too long ({sec:.4f} seconds > {seconds})" + ) diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index 68650d6d2bc..9fe040c3a05 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -473,7 +473,8 @@ def test_two_phase_transaction(self, local_connection): @testing.requires.two_phase_transactions @testing.requires.two_phase_recovery - def test_two_phase_recover(self): + @testing.variation("commit", [True, False]) + def test_two_phase_recover(self, commit): users = self.tables.users # 2020, still can't get this to work w/ modern MySQL or MariaDB. @@ -501,17 +502,29 @@ def test_two_phase_recover(self): [], ) # recover_twophase needs to be run in a new transaction - with testing.db.connect() as connection2: - recoverables = connection2.recover_twophase() - assert transaction.xid in recoverables - connection2.commit_prepared(transaction.xid, recover=True) - - eq_( - connection2.execute( - select(users.c.user_id).order_by(users.c.user_id) - ).fetchall(), - [(1,)], - ) + with testing.db.connect() as connection3: + # oracle transactions can't be recovered for commit after... + # about 1 second? OK + with testing.skip_if_timeout( + 0.75, + cleanup=( + lambda: connection3.rollback_prepared( + transaction.xid, recover=True + ) + ), + ): + recoverables = connection3.recover_twophase() + assert transaction.xid in recoverables + + if commit: + connection3.commit_prepared(transaction.xid, recover=True) + res = [(1,)] + else: + connection3.rollback_prepared(transaction.xid, recover=True) + res = [] + + stmt = select(users.c.user_id).order_by(users.c.user_id) + eq_(connection3.execute(stmt).fetchall(), res) @testing.requires.two_phase_transactions def test_multiple_two_phase(self, local_connection): diff --git a/test/requirements.py b/test/requirements.py index 0dde542372a..21068fb787c 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -858,32 +858,27 @@ def pg_prepared_transaction(config): else: return num > 0 - return ( - skip_if( - [ - no_support( - "mssql", "two-phase xact not supported by drivers" - ), - no_support( - "sqlite", "two-phase xact not supported by database" - ), - # in Ia3cbbf56d4882fcc7980f90519412f1711fae74d - # we are evaluating which modern MySQL / MariaDB versions - # can handle two-phase testing without too many problems - # no_support( - # "mysql", - # "recent MySQL community editions have too many " - # "issues (late 2016), disabling for now", - # ), - NotPredicate( - LambdaPredicate( - pg_prepared_transaction, - "max_prepared_transactions not available or zero", - ) - ), - ] - ) - + self.skip_on_oracledb_thin + return skip_if( + [ + no_support("mssql", "two-phase xact not supported by drivers"), + no_support( + "sqlite", "two-phase xact not supported by database" + ), + # in Ia3cbbf56d4882fcc7980f90519412f1711fae74d + # we are evaluating which modern MySQL / MariaDB versions + # can handle two-phase testing without too many problems + # no_support( + # "mysql", + # "recent MySQL community editions have too many " + # "issues (late 2016), disabling for now", + # ), + NotPredicate( + LambdaPredicate( + pg_prepared_transaction, + "max_prepared_transactions not available or zero", + ) + ), + ] ) @property @@ -893,7 +888,7 @@ def two_phase_recovery(self): ["mysql", "mariadb"], "still can't get recover to work w/ MariaDB / MySQL", ) - + skip_if("oracle", "recovery not functional") + + skip_if("oracle+cx_oracle", "recovery not functional") ) @property @@ -1877,16 +1872,6 @@ def oracle5x(self): and config.db.dialect.cx_oracle_ver < (6,) ) - @property - def skip_on_oracledb_thin(self): - def go(config): - if against(config, "oracle+oracledb"): - with config.db.connect() as conn: - return config.db.dialect.is_thin_mode(conn) - return False - - return skip_if(go) - @property def computed_columns(self): return skip_if(["postgresql < 12", "sqlite < 3.31", "mysql < 5.7"]) From 30531c50d0167e36a5b2db91eee90e12e83d0f01 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 1 Aug 2024 09:49:55 -0400 Subject: [PATCH 268/544] mutate lists in place for return_defaults=True Fixed regression from version 1.4 in :meth:`_orm.Session.bulk_insert_mappings` where using the :paramref:`_orm.Session.bulk_insert_mappings.return_defaults` parameter would not populate the passed in dictionaries with newly generated primary key values. Fixes: #11661 Change-Id: I331d81a5b04456f107eb868f882d67773b3eec38 (cherry picked from commit 7001429a7561b3c55dd52b96dfa419004e535743) --- doc/build/changelog/unreleased_20/11661.rst | 10 +++ lib/sqlalchemy/orm/bulk_persistence.py | 24 ++++++- test/orm/dml/test_bulk.py | 74 ++++++++++++++++----- 3 files changed, 89 insertions(+), 19 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11661.rst diff --git a/doc/build/changelog/unreleased_20/11661.rst b/doc/build/changelog/unreleased_20/11661.rst new file mode 100644 index 00000000000..35985d8bbaa --- /dev/null +++ b/doc/build/changelog/unreleased_20/11661.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, orm, regression + :tickets: 11661 + + Fixed regression from version 1.4 in + :meth:`_orm.Session.bulk_insert_mappings` where using the + :paramref:`_orm.Session.bulk_insert_mappings.return_defaults` parameter + would not populate the passed in dictionaries with newly generated primary + key values. + diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index ff85650436e..d07cde85cd1 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -119,13 +119,35 @@ def _bulk_insert( ) if isstates: + if TYPE_CHECKING: + mappings = cast(Iterable[InstanceState[_O]], mappings) + if return_defaults: + # list of states allows us to attach .key for return_defaults case states = [(state, state.dict) for state in mappings] mappings = [dict_ for (state, dict_) in states] else: mappings = [state.dict for state in mappings] else: - mappings = [dict(m) for m in mappings] + if TYPE_CHECKING: + mappings = cast(Iterable[Dict[str, Any]], mappings) + + if return_defaults: + # use dictionaries given, so that newly populated defaults + # can be delivered back to the caller (see #11661). This is **not** + # compatible with other use cases such as a session-executed + # insert() construct, as this will confuse the case of + # insert-per-subclass for joined inheritance cases (see + # test_bulk_statements.py::BulkDMLReturningJoinedInhTest). + # + # So in this conditional, we have **only** called + # session.bulk_insert_mappings() which does not have this + # requirement + mappings = list(mappings) + else: + # for all other cases we need to establish a local dictionary + # so that the incoming dictionaries aren't mutated + mappings = [dict(m) for m in mappings] _expand_composites(mapper, mappings) connection = session_transaction.connection(base_mapper) diff --git a/test/orm/dml/test_bulk.py b/test/orm/dml/test_bulk.py index 4d24a52eceb..3159c139da2 100644 --- a/test/orm/dml/test_bulk.py +++ b/test/orm/dml/test_bulk.py @@ -90,8 +90,14 @@ def setup_mappers(cls): cls.mapper_registry.map_imperatively(Address, a) cls.mapper_registry.map_imperatively(Order, o) - @testing.combinations("save_objects", "insert_mappings", "insert_stmt") - def test_bulk_save_return_defaults(self, statement_type): + @testing.combinations( + "save_objects", + "insert_mappings", + "insert_stmt", + argnames="statement_type", + ) + @testing.variation("return_defaults", [True, False]) + def test_bulk_save_return_defaults(self, statement_type, return_defaults): (User,) = self.classes("User") s = fixture_session() @@ -102,12 +108,14 @@ def test_bulk_save_return_defaults(self, statement_type): returning_users_id = " RETURNING users.id" with self.sql_execution_asserter() as asserter: - s.bulk_save_objects(objects, return_defaults=True) + s.bulk_save_objects(objects, return_defaults=return_defaults) elif statement_type == "insert_mappings": data = [dict(name="u1"), dict(name="u2"), dict(name="u3")] returning_users_id = " RETURNING users.id" with self.sql_execution_asserter() as asserter: - s.bulk_insert_mappings(User, data, return_defaults=True) + s.bulk_insert_mappings( + User, data, return_defaults=return_defaults + ) elif statement_type == "insert_stmt": data = [dict(name="u1"), dict(name="u2"), dict(name="u3")] @@ -120,7 +128,10 @@ def test_bulk_save_return_defaults(self, statement_type): asserter.assert_( Conditional( - testing.db.dialect.insert_executemany_returning + ( + return_defaults + and testing.db.dialect.insert_executemany_returning + ) or statement_type == "insert_stmt", [ CompiledSQL( @@ -130,23 +141,50 @@ def test_bulk_save_return_defaults(self, statement_type): ), ], [ - CompiledSQL( - "INSERT INTO users (name) VALUES (:name)", - [{"name": "u1"}], - ), - CompiledSQL( - "INSERT INTO users (name) VALUES (:name)", - [{"name": "u2"}], - ), - CompiledSQL( - "INSERT INTO users (name) VALUES (:name)", - [{"name": "u3"}], - ), + Conditional( + return_defaults, + [ + CompiledSQL( + "INSERT INTO users (name) VALUES (:name)", + [{"name": "u1"}], + ), + CompiledSQL( + "INSERT INTO users (name) VALUES (:name)", + [{"name": "u2"}], + ), + CompiledSQL( + "INSERT INTO users (name) VALUES (:name)", + [{"name": "u3"}], + ), + ], + [ + CompiledSQL( + "INSERT INTO users (name) VALUES (:name)", + [ + {"name": "u1"}, + {"name": "u2"}, + {"name": "u3"}, + ], + ), + ], + ) ], ) ) + if statement_type == "save_objects": - eq_(objects[0].__dict__["id"], 1) + if return_defaults: + eq_(objects[0].__dict__["id"], 1) + eq_(inspect(objects[0]).key, (User, (1,), None)) + else: + assert "id" not in objects[0].__dict__ + eq_(inspect(objects[0]).key, None) + elif statement_type == "insert_mappings": + # test for #11661 + if return_defaults: + eq_(data[0]["id"], 1) + else: + assert "id" not in data[0] def test_bulk_save_objects_defaults_key(self): User = self.classes.User From 7c9dd6bd3359581f13184e3134c0d285fb466921 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 1 Aug 2024 15:51:00 -0400 Subject: [PATCH 269/544] escape percents for mysql enum and add suite tests Fixed issue in MySQL dialect where ENUM values that contained percent signs were not properly escaped for the driver. Fixes: #11479 Change-Id: I40d9aba619618603d3abb466f84a793d152b6788 (cherry picked from commit 2afb138d310da41d17f9e3dc9fa9339b52e7a9a4) --- doc/build/changelog/unreleased_20/11479.rst | 7 +++ lib/sqlalchemy/dialects/mysql/base.py | 2 + lib/sqlalchemy/testing/suite/test_types.py | 70 +++++++++++++++++++++ 3 files changed, 79 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11479.rst diff --git a/doc/build/changelog/unreleased_20/11479.rst b/doc/build/changelog/unreleased_20/11479.rst new file mode 100644 index 00000000000..fccaaf80264 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11479.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, mysql + :tickets: 11479 + + Fixed issue in MySQL dialect where ENUM values that contained percent signs + were not properly escaped for the driver. + diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index dacbb7afa27..603b5ff7c8e 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -2380,6 +2380,8 @@ def visit_LONGBLOB(self, type_, **kw): def _visit_enumerated_values(self, name, type_, enumerated_values): quoted_enums = [] for e in enumerated_values: + if self.dialect.identifier_preparer._double_percents: + e = e.replace("%", "%%") quoted_enums.append("'%s'" % e.replace("'", "''")) return self._extend_string( type_, {}, "%s(%s)" % (name, ",".join(quoted_enums)) diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index 4a7c1f199e1..d4c5a2250dc 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -32,6 +32,7 @@ from ... import cast from ... import Date from ... import DateTime +from ... import Enum from ... import Float from ... import Integer from ... import Interval @@ -1918,6 +1919,74 @@ def test_string_cast_crit_against_string_basic(self): ) +class EnumTest(_LiteralRoundTripFixture, fixtures.TablesTest): + __backend__ = True + + enum_values = "a", "b", "a%", "b%percent", "réveillé" + + datatype = Enum(*enum_values, name="myenum") + + @classmethod + def define_tables(cls, metadata): + Table( + "enum_table", + metadata, + Column("id", Integer, primary_key=True), + Column("enum_data", cls.datatype), + ) + + @testing.combinations(*enum_values, argnames="data") + def test_round_trip(self, data, connection): + connection.execute( + self.tables.enum_table.insert(), {"id": 1, "enum_data": data} + ) + + eq_( + connection.scalar( + select(self.tables.enum_table.c.enum_data).where( + self.tables.enum_table.c.id == 1 + ) + ), + data, + ) + + def test_round_trip_executemany(self, connection): + connection.execute( + self.tables.enum_table.insert(), + [ + {"id": 1, "enum_data": "b%percent"}, + {"id": 2, "enum_data": "réveillé"}, + {"id": 3, "enum_data": "b"}, + {"id": 4, "enum_data": "a%"}, + ], + ) + + eq_( + connection.scalars( + select(self.tables.enum_table.c.enum_data).order_by( + self.tables.enum_table.c.id + ) + ).all(), + ["b%percent", "réveillé", "b", "a%"], + ) + + @testing.requires.insert_executemany_returning + def test_round_trip_executemany_returning(self, connection): + result = connection.execute( + self.tables.enum_table.insert().returning( + self.tables.enum_table.c.enum_data + ), + [ + {"id": 1, "enum_data": "b%percent"}, + {"id": 2, "enum_data": "réveillé"}, + {"id": 3, "enum_data": "b"}, + {"id": 4, "enum_data": "a%"}, + ], + ) + + eq_(result.scalars().all(), ["b%percent", "réveillé", "b", "a%"]) + + class UuidTest(_LiteralRoundTripFixture, fixtures.TablesTest): __backend__ = True @@ -2066,6 +2135,7 @@ class NativeUUIDTest(UuidTest): "DateHistoricTest", "StringTest", "BooleanTest", + "EnumTest", "UuidTest", "NativeUUIDTest", ) From ce13550427914228825442154ccf62b1a51b5fe0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 1 Aug 2024 16:41:45 -0400 Subject: [PATCH 270/544] add check for pre-existing history records Fixed issue in history_meta example where the "version" column in the versioned table needs to default to the most recent version number in the history table on INSERT, to suit the use case of a table where rows are deleted, and can then be replaced by new rows that re-use the same primary key identity. This fix adds an additonal SELECT query per INSERT in the main table, which may be inefficient; for cases where primary keys are not re-used, the default function may be omitted. Patch courtesy Philipp H. v. Loewenfeld. Fixes: #10267 Change-Id: I6b0737a7e871763f95fd636c9ad98b80f3b5808e (cherry picked from commit 6a59eecfa891db84033f5d0c88451b344e5b6f0c) --- doc/build/changelog/unreleased_20/10267.rst | 13 ++++ examples/versioned_history/__init__.py | 6 +- examples/versioned_history/history_meta.py | 36 ++++++++- examples/versioned_history/test_versioning.py | 73 +++++++++++++++++++ 4 files changed, 124 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10267.rst diff --git a/doc/build/changelog/unreleased_20/10267.rst b/doc/build/changelog/unreleased_20/10267.rst new file mode 100644 index 00000000000..cfbf04f6dbd --- /dev/null +++ b/doc/build/changelog/unreleased_20/10267.rst @@ -0,0 +1,13 @@ +.. change:: + :tags: bug, examples + :tickets: 10267 + + Fixed issue in history_meta example where the "version" column in the + versioned table needs to default to the most recent version number in the + history table on INSERT, to suit the use case of a table where rows are + deleted, and can then be replaced by new rows that re-use the same primary + key identity. This fix adds an additonal SELECT query per INSERT in the + main table, which may be inefficient; for cases where primary keys are not + re-used, the default function may be omitted. Patch courtesy Philipp H. + v. Loewenfeld. + diff --git a/examples/versioned_history/__init__.py b/examples/versioned_history/__init__.py index 0593881e2de..2fa281b8dd1 100644 --- a/examples/versioned_history/__init__.py +++ b/examples/versioned_history/__init__.py @@ -6,10 +6,10 @@ class which represents historical versions of the target object. Compare to the :ref:`examples_versioned_rows` examples which write updates as new rows in the same table, without using a separate history table. -Usage is illustrated via a unit test module ``test_versioning.py``, which can -be run like any other module, using ``unittest`` internally:: +Usage is illustrated via a unit test module ``test_versioning.py``, which is +run using SQLAlchemy's internal pytest plugin:: - python -m examples.versioned_history.test_versioning + pytest test/base/test_examples.py A fragment of example usage, using declarative:: diff --git a/examples/versioned_history/history_meta.py b/examples/versioned_history/history_meta.py index e4c102c0ad0..88fb16a0049 100644 --- a/examples/versioned_history/history_meta.py +++ b/examples/versioned_history/history_meta.py @@ -2,13 +2,16 @@ import datetime +from sqlalchemy import and_ from sqlalchemy import Column from sqlalchemy import DateTime from sqlalchemy import event from sqlalchemy import ForeignKeyConstraint +from sqlalchemy import func from sqlalchemy import inspect from sqlalchemy import Integer from sqlalchemy import PrimaryKeyConstraint +from sqlalchemy import select from sqlalchemy import util from sqlalchemy.orm import attributes from sqlalchemy.orm import object_mapper @@ -148,8 +151,39 @@ def _history_mapper(local_mapper): super_history_table.append_column(col) if not super_mapper: + + def default_version_from_history(context): + # Set default value of version column to the maximum of the + # version in history columns already present +1 + # Otherwise re-appearance of deleted rows would cause an error + # with the next update + current_parameters = context.get_current_parameters() + return context.connection.scalar( + select( + func.coalesce(func.max(history_table.c.version), 0) + 1 + ).where( + and_( + *[ + history_table.c[c.name] + == current_parameters.get(c.name, None) + for c in inspect( + local_mapper.local_table + ).primary_key + ] + ) + ) + ) + local_mapper.local_table.append_column( - Column("version", Integer, default=1, nullable=False), + Column( + "version", + Integer, + # if rows are not being deleted from the main table with + # subsequent re-use of primary key, this default can be + # "1" instead of running a query per INSERT + default=default_version_from_history, + nullable=False, + ), replace_existing=True, ) local_mapper.add_property( diff --git a/examples/versioned_history/test_versioning.py b/examples/versioned_history/test_versioning.py index ac122581a4f..b3fe2170904 100644 --- a/examples/versioned_history/test_versioning.py +++ b/examples/versioned_history/test_versioning.py @@ -881,6 +881,79 @@ class SomeClass(Versioned, self.Base, ComparableEntity): sc2.name = "sc2 modified" sess.commit() + def test_external_id(self): + class ObjectExternal(Versioned, self.Base, ComparableEntity): + __tablename__ = "externalobjects" + + id1 = Column(String(3), primary_key=True) + id2 = Column(String(3), primary_key=True) + name = Column(String(50)) + + self.create_tables() + sess = self.session + sc = ObjectExternal(id1="aaa", id2="bbb", name="sc1") + sess.add(sc) + sess.commit() + + sc.name = "sc1modified" + sess.commit() + + assert sc.version == 2 + + ObjectExternalHistory = ObjectExternal.__history_mapper__.class_ + + eq_( + sess.query(ObjectExternalHistory).all(), + [ + ObjectExternalHistory( + version=1, id1="aaa", id2="bbb", name="sc1" + ), + ], + ) + + sess.delete(sc) + sess.commit() + + assert sess.query(ObjectExternal).count() == 0 + + eq_( + sess.query(ObjectExternalHistory).all(), + [ + ObjectExternalHistory( + version=1, id1="aaa", id2="bbb", name="sc1" + ), + ObjectExternalHistory( + version=2, id1="aaa", id2="bbb", name="sc1modified" + ), + ], + ) + + sc = ObjectExternal(id1="aaa", id2="bbb", name="sc1reappeared") + sess.add(sc) + sess.commit() + + assert sc.version == 3 + + sc.name = "sc1reappearedmodified" + sess.commit() + + assert sc.version == 4 + + eq_( + sess.query(ObjectExternalHistory).all(), + [ + ObjectExternalHistory( + version=1, id1="aaa", id2="bbb", name="sc1" + ), + ObjectExternalHistory( + version=2, id1="aaa", id2="bbb", name="sc1modified" + ), + ObjectExternalHistory( + version=3, id1="aaa", id2="bbb", name="sc1reappeared" + ), + ], + ) + class TestVersioningNewBase(TestVersioning): def make_base(self): From 34a2164c7c4d57073c20cae864bb51b90ff2d3a5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 1 Aug 2024 19:15:57 -0400 Subject: [PATCH 271/544] bring oracle timeout thing lower still seeing failures we may very well have to revert this and mark oracle as not supporting recovery Change-Id: I4d48607cb8579dc73c650f5232e4414a408735e2 (cherry picked from commit fc7b758b7bb707d7931d909702aa019bbba98fce) --- test/engine/test_transaction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index 9fe040c3a05..fb67c7434fe 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -506,7 +506,7 @@ def test_two_phase_recover(self, commit): # oracle transactions can't be recovered for commit after... # about 1 second? OK with testing.skip_if_timeout( - 0.75, + 0.50, cleanup=( lambda: connection3.rollback_prepared( transaction.xid, recover=True From 6a457923b80497cf3deaa5a47a9a834498f4b055 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 1 Aug 2024 15:58:57 -0400 Subject: [PATCH 272/544] skip in eager row processors for enable_eagerloads=False Fixed issue where using the :meth:`_orm.Query.enable_eagerloads` and :meth:`_orm.Query.yield_per` methods at the same time, in order to disable eager loading that's configured on the mapper directly, would be silently ignored, leading to errors or unexpected eager population of attributes. Fixes: #10834 Change-Id: I6a20bdedf23f6dd4e98ffb49ad784117fe4afdd3 (cherry picked from commit 0a8bf50422a4c5ce1945aee6d6d37d9467ebf1c1) --- doc/build/changelog/unreleased_20/10834.rst | 8 ++++++++ lib/sqlalchemy/orm/strategies.py | 11 +++++++++++ test/orm/test_query.py | 19 +++++++++++++++++++ 3 files changed, 38 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/10834.rst diff --git a/doc/build/changelog/unreleased_20/10834.rst b/doc/build/changelog/unreleased_20/10834.rst new file mode 100644 index 00000000000..7670f57ad17 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10834.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm + :tickets: 10834 + + Fixed issue where using the :meth:`_orm.Query.enable_eagerloads` and + :meth:`_orm.Query.yield_per` methods at the same time, in order to disable + eager loading that's configured on the mapper directly, would be silently + ignored, leading to errors or unexpected eager population of attributes. diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 00c6fcb6c1a..c6b9a4c7fa9 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -1373,12 +1373,16 @@ def create_row_processor( adapter, populators, ): + if not context.compile_state.compile_options._enable_eagerloads: + return + ( effective_path, run_loader, execution_options, recursion_depth, ) = self._setup_for_recursion(context, path, loadopt, self.join_depth) + if not run_loader: # this will not emit SQL and will only emit for a many-to-one # "use get" load. the "_RELATED" part means it may return @@ -2705,6 +2709,10 @@ def create_row_processor( adapter, populators, ): + + if not context.compile_state.compile_options._enable_eagerloads: + return + if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( "'%s' does not support object " @@ -2984,6 +2992,9 @@ def create_row_processor( if not run_loader: return + if not context.compile_state.compile_options._enable_eagerloads: + return + if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( "'%s' does not support object " diff --git a/test/orm/test_query.py b/test/orm/test_query.py index ea108c345b0..9dc26bc1e27 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -5537,6 +5537,25 @@ def test_eagerload_opt_disable(self): ) eq_(len(q.all()), 4) + @testing.combinations( + "joined", + "subquery", + "selectin", + "select", + "immediate", + argnames="lazy", + ) + def test_eagerload_config_disable(self, lazy): + self._eagerload_mappings(addresses_lazy=lazy) + + User = self.classes.User + sess = fixture_session() + q = sess.query(User).enable_eagerloads(False).yield_per(1) + objs = q.all() + eq_(len(objs), 4) + for obj in objs: + assert "addresses" not in obj.__dict__ + def test_m2o_joinedload_not_others(self): self._eagerload_mappings(addresses_lazy="joined") Address = self.classes.Address From c3b541c977c8cc67c9930f59fbecfe9ce115ed97 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 2 Aug 2024 09:42:59 -0400 Subject: [PATCH 273/544] changelog updates Change-Id: I3c319c15d883b88a4ceae2ea17d3122fcc90fb1f (cherry picked from commit bae75fe92f9636bafb75461ff0bc556432831e30) --- doc/build/changelog/unreleased_20/11435.rst | 3 --- doc/build/changelog/unreleased_20/11530.rst | 2 +- doc/build/changelog/unreleased_20/mypy1110.rst | 2 +- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/doc/build/changelog/unreleased_20/11435.rst b/doc/build/changelog/unreleased_20/11435.rst index 8e9ac23396d..a3f96de18c0 100644 --- a/doc/build/changelog/unreleased_20/11435.rst +++ b/doc/build/changelog/unreleased_20/11435.rst @@ -8,6 +8,3 @@ :func:`_orm.column_mapped_collection`, :func:`_orm.attribute_mapped_collection`. Pull request courtesy Takashi Kajinami. - - - . diff --git a/doc/build/changelog/unreleased_20/11530.rst b/doc/build/changelog/unreleased_20/11530.rst index 30c60cd1524..1ffa7c5d265 100644 --- a/doc/build/changelog/unreleased_20/11530.rst +++ b/doc/build/changelog/unreleased_20/11530.rst @@ -1,5 +1,5 @@ .. change:: - :tags: bug, events + :tags: bug, schema :tickets: 11530 Fixed additional issues in the event system triggered by unpickling of a diff --git a/doc/build/changelog/unreleased_20/mypy1110.rst b/doc/build/changelog/unreleased_20/mypy1110.rst index f722c407f25..7804da4c032 100644 --- a/doc/build/changelog/unreleased_20/mypy1110.rst +++ b/doc/build/changelog/unreleased_20/mypy1110.rst @@ -1,5 +1,5 @@ .. change:: - :tags: bug, mypy + :tags: bug, typing Fixed internal typing issues to establish compatibility with mypy 1.11.0. Note that this does not include issues which have arisen with the From de7727d25cf980e9a215ec73603b9fd469b7d357 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 1 Aug 2024 21:16:20 +0200 Subject: [PATCH 274/544] Added support for server-side cursor in oracledb async dialect. Added API support for server-side cursors for the oracledb async dialect, allowing use of the :meth:`_asyncio.AsyncConnection.stream` and similar stream methods. Fixes: #10820 Change-Id: I861670ccc20a81ec5ee45132b8059fc2a0359087 (cherry picked from commit ffb2e2d033f8e227b80ba3c5d06c67a96310e1ec) --- doc/build/changelog/unreleased_20/10820.rst | 7 +++ lib/sqlalchemy/connectors/asyncio.py | 11 +++- lib/sqlalchemy/dialects/oracle/oracledb.py | 50 ++++++++++++++- lib/sqlalchemy/testing/suite/test_results.py | 52 +++++++++++++--- test/engine/test_deprecations.py | 4 -- test/engine/test_execute.py | 5 +- test/ext/asyncio/test_engine_py3k.py | 64 ++++++++++++++------ 7 files changed, 153 insertions(+), 40 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10820.rst diff --git a/doc/build/changelog/unreleased_20/10820.rst b/doc/build/changelog/unreleased_20/10820.rst new file mode 100644 index 00000000000..e2cc717e2e3 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10820.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: oracle, usecase + :tickets: 10820 + + Added API support for server-side cursors for the oracledb async dialect, + allowing use of the :meth:`_asyncio.AsyncConnection.stream` and similar + stream methods. diff --git a/lib/sqlalchemy/connectors/asyncio.py b/lib/sqlalchemy/connectors/asyncio.py index 8dc198cf8e9..9b19bef78f6 100644 --- a/lib/sqlalchemy/connectors/asyncio.py +++ b/lib/sqlalchemy/connectors/asyncio.py @@ -36,7 +36,8 @@ def __init__(self, adapt_connection): cursor = self._connection.cursor() self._cursor = self._aenter_cursor(cursor) - self._rows = collections.deque() + if not self.server_side: + self._rows = collections.deque() def _aenter_cursor(self, cursor): return self.await_(cursor.__aenter__()) @@ -149,6 +150,14 @@ def fetchmany(self, size=None): def fetchall(self): return self.await_(self._cursor.fetchall()) + def __iter__(self): + iterator = self._cursor.__aiter__() + while True: + try: + yield self.await_(iterator.__anext__()) + except StopAsyncIteration: + break + class AsyncAdapt_dbapi_connection(AdaptedConnection): _cursor_cls = AsyncAdapt_dbapi_cursor diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index 1f5a19b8761..0667ed768e8 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -94,12 +94,14 @@ from typing import Any from typing import TYPE_CHECKING -from .cx_oracle import OracleDialect_cx_oracle as _OracleDialect_cx_oracle +from . import cx_oracle as _cx_oracle from ... import exc from ... import pool from ...connectors.asyncio import AsyncAdapt_dbapi_connection from ...connectors.asyncio import AsyncAdapt_dbapi_cursor +from ...connectors.asyncio import AsyncAdapt_dbapi_ss_cursor from ...connectors.asyncio import AsyncAdaptFallback_dbapi_connection +from ...engine import default from ...util import asbool from ...util import await_fallback from ...util import await_only @@ -109,8 +111,16 @@ from oracledb import AsyncCursor -class OracleDialect_oracledb(_OracleDialect_cx_oracle): +class OracleExecutionContext_oracledb( + _cx_oracle.OracleExecutionContext_cx_oracle +): + pass + + +class OracleDialect_oracledb(_cx_oracle.OracleDialect_cx_oracle): supports_statement_cache = True + execution_ctx_cls = OracleExecutionContext_oracledb + driver = "oracledb" _min_version = (1,) @@ -267,6 +277,17 @@ def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: self.close() +class AsyncAdapt_oracledb_ss_cursor( + AsyncAdapt_dbapi_ss_cursor, AsyncAdapt_oracledb_cursor +): + __slots__ = () + + def close(self) -> None: + if self._cursor is not None: + self._cursor.close() + self._cursor = None # type: ignore + + class AsyncAdapt_oracledb_connection(AsyncAdapt_dbapi_connection): _connection: AsyncConnection __slots__ = () @@ -307,6 +328,9 @@ def stmtcachesize(self, value): def cursor(self): return AsyncAdapt_oracledb_cursor(self) + def ss_cursor(self): + return AsyncAdapt_oracledb_ss_cursor(self) + def xid(self, *args: Any, **kwargs: Any) -> Any: return self._connection.xid(*args, **kwargs) @@ -355,9 +379,31 @@ def connect(self, *arg, **kw): ) +class OracleExecutionContextAsync_oracledb(OracleExecutionContext_oracledb): + # restore default create cursor + create_cursor = default.DefaultExecutionContext.create_cursor + + def create_default_cursor(self): + # copy of OracleExecutionContext_cx_oracle.create_cursor + c = self._dbapi_connection.cursor() + if self.dialect.arraysize: + c.arraysize = self.dialect.arraysize + + return c + + def create_server_side_cursor(self): + c = self._dbapi_connection.ss_cursor() + if self.dialect.arraysize: + c.arraysize = self.dialect.arraysize + + return c + + class OracleDialectAsync_oracledb(OracleDialect_oracledb): is_async = True + supports_server_side_cursors = True supports_statement_cache = True + execution_ctx_cls = OracleExecutionContextAsync_oracledb _min_version = (2,) diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py index b3f432fb76c..2b91a559dbe 100644 --- a/lib/sqlalchemy/testing/suite/test_results.py +++ b/lib/sqlalchemy/testing/suite/test_results.py @@ -7,6 +7,7 @@ # mypy: ignore-errors import datetime +import re from .. import engines from .. import fixtures @@ -273,6 +274,8 @@ def _is_server_side(self, cursor): return getattr(cursor, "server_side", False) elif self.engine.dialect.driver == "psycopg": return bool(getattr(cursor, "name", False)) + elif self.engine.dialect.driver == "oracledb": + return getattr(cursor, "server_side", False) else: return False @@ -293,11 +296,26 @@ def _fixture(self, server_side_cursors): ) return self.engine + def stringify(self, str_): + return re.compile(r"SELECT (\d+)", re.I).sub( + lambda m: str(select(int(m.group(1))).compile(testing.db)), str_ + ) + @testing.combinations( - ("global_string", True, "select 1", True), - ("global_text", True, text("select 1"), True), + ("global_string", True, lambda stringify: stringify("select 1"), True), + ( + "global_text", + True, + lambda stringify: text(stringify("select 1")), + True, + ), ("global_expr", True, select(1), True), - ("global_off_explicit", False, text("select 1"), False), + ( + "global_off_explicit", + False, + lambda stringify: text(stringify("select 1")), + False, + ), ( "stmt_option", False, @@ -315,15 +333,22 @@ def _fixture(self, server_side_cursors): ( "for_update_string", True, - "SELECT 1 FOR UPDATE", + lambda stringify: stringify("SELECT 1 FOR UPDATE"), True, testing.skip_if(["sqlite", "mssql"]), ), - ("text_no_ss", False, text("select 42"), False), + ( + "text_no_ss", + False, + lambda stringify: text(stringify("select 42")), + False, + ), ( "text_ss_option", False, - text("select 42").execution_options(stream_results=True), + lambda stringify: text(stringify("select 42")).execution_options( + stream_results=True + ), True, ), id_="iaaa", @@ -334,6 +359,11 @@ def test_ss_cursor_status( ): engine = self._fixture(engine_ss_arg) with engine.begin() as conn: + if callable(statement): + statement = testing.resolve_lambda( + statement, stringify=self.stringify + ) + if isinstance(statement, str): result = conn.exec_driver_sql(statement) else: @@ -348,7 +378,7 @@ def test_conn_option(self): # should be enabled for this one result = conn.execution_options( stream_results=True - ).exec_driver_sql("select 1") + ).exec_driver_sql(self.stringify("select 1")) assert self._is_server_side(result.cursor) # the connection has autobegun, which means at the end of the @@ -402,7 +432,9 @@ def test_roundtrip_fetchall(self, metadata): test_table = Table( "test_table", md, - Column("id", Integer, primary_key=True), + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), Column("data", String(50)), ) @@ -442,7 +474,9 @@ def test_roundtrip_fetchmany(self, metadata): test_table = Table( "test_table", md, - Column("id", Integer, primary_key=True), + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), Column("data", String(50)), ) diff --git a/test/engine/test_deprecations.py b/test/engine/test_deprecations.py index 9041a6af102..30bf9e66f64 100644 --- a/test/engine/test_deprecations.py +++ b/test/engine/test_deprecations.py @@ -300,10 +300,6 @@ def test_connection_fairy_connection(self): is_(fairy.connection, fairy.dbapi_connection) -def select1(db): - return str(select(1).compile(dialect=db.dialect)) - - class ResetEventTest(fixtures.TestBase): def _fixture(self, **kw): dbapi = Mock() diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 122c08461d1..61c422bb56a 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -1940,13 +1940,10 @@ def go2(dbapi_conn, xyz): def test_new_exec_driver_sql_no_events(self): m1 = Mock() - def select1(db): - return str(select(1).compile(dialect=db.dialect)) - with testing.db.connect() as conn: event.listen(conn, "before_execute", m1.before_execute) event.listen(conn, "after_execute", m1.after_execute) - conn.exec_driver_sql(select1(testing.db)) + conn.exec_driver_sql(str(select(1).compile(testing.db))) eq_(m1.mock_calls, []) def test_add_event_after_connect(self, testing_engine): diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index 9fb12e6936f..227307e086f 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -21,6 +21,7 @@ from sqlalchemy import Table from sqlalchemy import testing from sqlalchemy import text +from sqlalchemy import true from sqlalchemy import union_all from sqlalchemy.engine import cursor as _cursor from sqlalchemy.ext.asyncio import async_engine_from_config @@ -405,8 +406,7 @@ async def go(): @async_test async def test_statement_compile(self, async_engine): - stmt = _select1(async_engine) - eq_(str(select(1).compile(async_engine)), stmt) + stmt = str(select(1).compile(async_engine)) async with async_engine.connect() as conn: eq_(str(select(1).compile(conn)), stmt) @@ -967,11 +967,11 @@ async def test_sync_before_cursor_execute_engine(self, async_engine): event.listen(async_engine.sync_engine, "before_cursor_execute", canary) - s1 = _select1(async_engine) async with async_engine.connect() as conn: sync_conn = conn.sync_connection - await conn.execute(text(s1)) + await conn.execute(select(1)) + s1 = str(select(1).compile(async_engine)) eq_( canary.mock_calls, [mock.call(sync_conn, mock.ANY, s1, mock.ANY, mock.ANY, False)], @@ -981,15 +981,15 @@ async def test_sync_before_cursor_execute_engine(self, async_engine): async def test_sync_before_cursor_execute_connection(self, async_engine): canary = mock.Mock() - s1 = _select1(async_engine) async with async_engine.connect() as conn: sync_conn = conn.sync_connection event.listen( async_engine.sync_engine, "before_cursor_execute", canary ) - await conn.execute(text(s1)) + await conn.execute(select(1)) + s1 = str(select(1).compile(async_engine)) eq_( canary.mock_calls, [mock.call(sync_conn, mock.ANY, s1, mock.ANY, mock.ANY, False)], @@ -1331,20 +1331,51 @@ async def test_one_multi_result(self, async_engine): ): await result.one() - @testing.combinations( - ("scalars",), ("stream_scalars",), argnames="filter_" - ) + @testing.combinations(("scalars",), ("stream_scalars",), argnames="case") @async_test - async def test_scalars(self, async_engine, filter_): + async def test_scalars(self, async_engine, case): users = self.tables.users async with async_engine.connect() as conn: - if filter_ == "scalars": + if case == "scalars": result = (await conn.scalars(select(users))).all() - elif filter_ == "stream_scalars": + elif case == "stream_scalars": result = await (await conn.stream_scalars(select(users))).all() eq_(result, list(range(1, 20))) + @async_test + @testing.combinations(("stream",), ("stream_scalars",), argnames="case") + async def test_stream_fetch_many_not_complete(self, async_engine, case): + users = self.tables.users + big_query = select(users).join(users.alias("other"), true()) + async with async_engine.connect() as conn: + if case == "stream": + result = await conn.stream(big_query) + elif case == "stream_scalars": + result = await conn.stream_scalars(big_query) + + f1 = await result.fetchmany(5) + f2 = await result.fetchmany(10) + f3 = await result.fetchmany(7) + eq_(len(f1) + len(f2) + len(f3), 22) + + res = await result.fetchall() + eq_(len(res), 19 * 19 - 22) + + @async_test + @testing.combinations(("stream",), ("execute",), argnames="case") + async def test_cursor_close(self, async_engine, case): + users = self.tables.users + async with async_engine.connect() as conn: + if case == "stream": + result = await conn.stream(select(users)) + cursor = result._real_result.cursor + elif case == "execute": + result = await conn.execute(select(users)) + cursor = result.cursor + + await conn.run_sync(lambda _: cursor.close()) + class TextSyncDBAPI(fixtures.TestBase): __requires__ = ("asyncio",) @@ -1516,17 +1547,10 @@ async def test_gather_after_dispose(self, testing_engine, do_dispose): async def thing(engine): async with engine.connect() as conn: - await conn.exec_driver_sql("select 1") + await conn.exec_driver_sql(str(select(1).compile(engine))) if do_dispose: await engine.dispose() tasks = [thing(engine) for _ in range(10)] await asyncio.gather(*tasks) - - -def _select1(engine): - if engine.dialect.name == "oracle": - return "SELECT 1 FROM DUAL" - else: - return "SELECT 1" From 3a254c31a755a8c052fd0f0ce0ae88baa7b8ba1b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 5 Aug 2024 15:05:16 -0400 Subject: [PATCH 275/544] - 2.0.32 --- doc/build/changelog/changelog_20.rst | 187 +++++++++++++++++- doc/build/changelog/unreleased_20/10267.rst | 13 -- doc/build/changelog/unreleased_20/10820.rst | 7 - doc/build/changelog/unreleased_20/10834.rst | 8 - doc/build/changelog/unreleased_20/11163.rst | 11 -- doc/build/changelog/unreleased_20/11435.rst | 10 - doc/build/changelog/unreleased_20/11471.rst | 9 - doc/build/changelog/unreleased_20/11479.rst | 7 - doc/build/changelog/unreleased_20/11480.rst | 9 - doc/build/changelog/unreleased_20/11522.rst | 7 - doc/build/changelog/unreleased_20/11530.rst | 8 - doc/build/changelog/unreleased_20/11532.rst | 8 - doc/build/changelog/unreleased_20/11575.rst | 8 - doc/build/changelog/unreleased_20/11576.rst | 11 -- doc/build/changelog/unreleased_20/11592.rst | 9 - doc/build/changelog/unreleased_20/11625.rst | 9 - doc/build/changelog/unreleased_20/11661.rst | 10 - doc/build/changelog/unreleased_20/11663.rst | 16 -- .../changelog/unreleased_20/mypy1110.rst | 7 - doc/build/conf.py | 4 +- 20 files changed, 188 insertions(+), 170 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/10267.rst delete mode 100644 doc/build/changelog/unreleased_20/10820.rst delete mode 100644 doc/build/changelog/unreleased_20/10834.rst delete mode 100644 doc/build/changelog/unreleased_20/11163.rst delete mode 100644 doc/build/changelog/unreleased_20/11435.rst delete mode 100644 doc/build/changelog/unreleased_20/11471.rst delete mode 100644 doc/build/changelog/unreleased_20/11479.rst delete mode 100644 doc/build/changelog/unreleased_20/11480.rst delete mode 100644 doc/build/changelog/unreleased_20/11522.rst delete mode 100644 doc/build/changelog/unreleased_20/11530.rst delete mode 100644 doc/build/changelog/unreleased_20/11532.rst delete mode 100644 doc/build/changelog/unreleased_20/11575.rst delete mode 100644 doc/build/changelog/unreleased_20/11576.rst delete mode 100644 doc/build/changelog/unreleased_20/11592.rst delete mode 100644 doc/build/changelog/unreleased_20/11625.rst delete mode 100644 doc/build/changelog/unreleased_20/11661.rst delete mode 100644 doc/build/changelog/unreleased_20/11663.rst delete mode 100644 doc/build/changelog/unreleased_20/mypy1110.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index ec885b1a488..f6f324bd62b 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,192 @@ .. changelog:: :version: 2.0.32 - :include_notes_from: unreleased_20 + :released: August 5, 2024 + + .. change:: + :tags: bug, examples + :tickets: 10267 + + Fixed issue in history_meta example where the "version" column in the + versioned table needs to default to the most recent version number in the + history table on INSERT, to suit the use case of a table where rows are + deleted, and can then be replaced by new rows that re-use the same primary + key identity. This fix adds an additonal SELECT query per INSERT in the + main table, which may be inefficient; for cases where primary keys are not + re-used, the default function may be omitted. Patch courtesy Philipp H. + v. Loewenfeld. + + + .. change:: + :tags: oracle, usecase + :tickets: 10820 + + Added API support for server-side cursors for the oracledb async dialect, + allowing use of the :meth:`_asyncio.AsyncConnection.stream` and similar + stream methods. + + .. change:: + :tags: bug, orm + :tickets: 10834 + + Fixed issue where using the :meth:`_orm.Query.enable_eagerloads` and + :meth:`_orm.Query.yield_per` methods at the same time, in order to disable + eager loading that's configured on the mapper directly, would be silently + ignored, leading to errors or unexpected eager population of attributes. + + .. change:: + :tags: orm + :tickets: 11163 + + Added a warning noting when an + :meth:`_engine.ConnectionEvents.engine_connect` event may be leaving + a transaction open, which can alter the behavior of a + :class:`_orm.Session` using such an engine as bind. + On SQLAlchemy 2.1 :paramref:`_orm.Session.join_transaction_mode` will + instead be ignored in all cases when the session bind is + an :class:`_engine.Engine`. + + .. change:: + :tags: bug, general, regression + :tickets: 11435 + + Restored legacy class names removed from + ``sqlalalchemy.orm.collections.*``, including + :class:`_orm.MappedCollection`, :func:`_orm.mapped_collection`, + :func:`_orm.column_mapped_collection`, + :func:`_orm.attribute_mapped_collection`. Pull request courtesy Takashi + Kajinami. + + .. change:: + :tags: bug, sql + :tickets: 11471 + + Follow up of :ticket:`11471` to fix caching issue where using the + :meth:`.CompoundSelectState.add_cte` method of the + :class:`.CompoundSelectState` construct would not set a correct cache key + which distinguished between different CTE expressions. Also added tests + that would detect issues similar to the one fixed in :ticket:`11544`. + + .. change:: + :tags: bug, mysql + :tickets: 11479 + + Fixed issue in MySQL dialect where ENUM values that contained percent signs + were not properly escaped for the driver. + + + .. change:: + :tags: usecase, oracle + :tickets: 11480 + + Implemented two-phase transactions for the oracledb dialect. Historically, + this feature never worked with the cx_Oracle dialect, however recent + improvements to the oracledb successor now allow this to be possible. The + two phase transaction API is available at the Core level via the + :meth:`_engine.Connection.begin_twophase` method. + + .. change:: + :tags: bug, postgresql + :tickets: 11522 + + It is now considered a pool-invalidating disconnect event when psycopg2 + throws an "SSL SYSCALL error: Success" error message, which can occur when + the SSL connection to Postgres is terminated abnormally. + + .. change:: + :tags: bug, schema + :tickets: 11530 + + Fixed additional issues in the event system triggered by unpickling of a + :class:`.Enum` datatype, continuing from :ticket:`11365` and + :ticket:`11360`, where dynamically generated elements of the event + structure would not be present when unpickling in a new process. + + .. change:: + :tags: bug, engine + :tickets: 11532 + + Fixed issue in "insertmanyvalues" feature where a particular call to + ``cursor.fetchall()`` were not wrapped in SQLAlchemy's exception wrapper, + which apparently can raise a database exception during fetch when using + pyodbc. + + .. change:: + :tags: usecase, orm + :tickets: 11575 + + The :paramref:`_orm.aliased.name` parameter to :func:`_orm.aliased` may now + be combined with the :paramref:`_orm.aliased.flat` parameter, producing + per-table names based on a name-prefixed naming convention. Pull request + courtesy Eric Atkin. + + .. change:: + :tags: bug, postgresql + :tickets: 11576 + + Fixed issue where the :func:`_sql.collate` construct, which explicitly sets + a collation for a given expression, would maintain collation settings for + the underlying type object from the expression, causing SQL expressions to + have both collations stated at once when used in further expressions for + specific dialects that render explicit type casts, such as that of asyncpg. + The :func:`_sql.collate` construct now assigns its own type to explicitly + include the new collation, assuming it's a string type. + + .. change:: + :tags: bug, sql + :tickets: 11592 + + Fixed bug where the :meth:`.Operators.nulls_first()` and + :meth:`.Operators.nulls_last()` modifiers would not be treated the same way + as :meth:`.Operators.desc()` and :meth:`.Operators.asc()` when determining + if an ORDER BY should be against a label name already in the statement. All + four modifiers are now treated the same within ORDER BY. + + .. change:: + :tags: bug, orm, regression + :tickets: 11625 + + Fixed regression appearing in 2.0.21 caused by :ticket:`10279` where using + a :func:`_sql.delete` or :func:`_sql.update` against an ORM class that is + the base of an inheritance hierarchy, while also specifying that subclasses + should be loaded polymorphically, would leak the polymorphic joins into the + UPDATE or DELETE statement as well creating incorrect SQL. + + .. change:: + :tags: bug, orm, regression + :tickets: 11661 + + Fixed regression from version 1.4 in + :meth:`_orm.Session.bulk_insert_mappings` where using the + :paramref:`_orm.Session.bulk_insert_mappings.return_defaults` parameter + would not populate the passed in dictionaries with newly generated primary + key values. + + + .. change:: + :tags: bug, oracle, sqlite + :tickets: 11663 + + Implemented bitwise operators for Oracle which was previously + non-functional due to a non-standard syntax used by this database. + Oracle's support for bitwise "or" and "xor" starts with server version 21. + Additionally repaired the implementation of "xor" for SQLite. + + As part of this change, the dialect compliance test suite has been enhanced + to include support for server-side bitwise tests; third party dialect + authors should refer to new "supports_bitwise" methods in the + requirements.py file to enable these tests. + + + + + .. change:: + :tags: bug, typing + + Fixed internal typing issues to establish compatibility with mypy 1.11.0. + Note that this does not include issues which have arisen with the + deprecated mypy plugin used by SQLAlchemy 1.4-style code; see the addiional + change note for this plugin indicating revised compatibility. .. changelog:: :version: 2.0.31 diff --git a/doc/build/changelog/unreleased_20/10267.rst b/doc/build/changelog/unreleased_20/10267.rst deleted file mode 100644 index cfbf04f6dbd..00000000000 --- a/doc/build/changelog/unreleased_20/10267.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. change:: - :tags: bug, examples - :tickets: 10267 - - Fixed issue in history_meta example where the "version" column in the - versioned table needs to default to the most recent version number in the - history table on INSERT, to suit the use case of a table where rows are - deleted, and can then be replaced by new rows that re-use the same primary - key identity. This fix adds an additonal SELECT query per INSERT in the - main table, which may be inefficient; for cases where primary keys are not - re-used, the default function may be omitted. Patch courtesy Philipp H. - v. Loewenfeld. - diff --git a/doc/build/changelog/unreleased_20/10820.rst b/doc/build/changelog/unreleased_20/10820.rst deleted file mode 100644 index e2cc717e2e3..00000000000 --- a/doc/build/changelog/unreleased_20/10820.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: oracle, usecase - :tickets: 10820 - - Added API support for server-side cursors for the oracledb async dialect, - allowing use of the :meth:`_asyncio.AsyncConnection.stream` and similar - stream methods. diff --git a/doc/build/changelog/unreleased_20/10834.rst b/doc/build/changelog/unreleased_20/10834.rst deleted file mode 100644 index 7670f57ad17..00000000000 --- a/doc/build/changelog/unreleased_20/10834.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 10834 - - Fixed issue where using the :meth:`_orm.Query.enable_eagerloads` and - :meth:`_orm.Query.yield_per` methods at the same time, in order to disable - eager loading that's configured on the mapper directly, would be silently - ignored, leading to errors or unexpected eager population of attributes. diff --git a/doc/build/changelog/unreleased_20/11163.rst b/doc/build/changelog/unreleased_20/11163.rst deleted file mode 100644 index da21b45378a..00000000000 --- a/doc/build/changelog/unreleased_20/11163.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: orm - :tickets: 11163 - - Added a warning noting when an - :meth:`_engine.ConnectionEvents.engine_connect` event may be leaving - a transaction open, which can alter the behavior of a - :class:`_orm.Session` using such an engine as bind. - On SQLAlchemy 2.1 :paramref:`_orm.Session.join_transaction_mode` will - instead be ignored in all cases when the session bind is - an :class:`_engine.Engine`. diff --git a/doc/build/changelog/unreleased_20/11435.rst b/doc/build/changelog/unreleased_20/11435.rst deleted file mode 100644 index a3f96de18c0..00000000000 --- a/doc/build/changelog/unreleased_20/11435.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, general, regression - :tickets: 11435 - - Restored legacy class names removed from - ``sqlalalchemy.orm.collections.*``, including - :class:`_orm.MappedCollection`, :func:`_orm.mapped_collection`, - :func:`_orm.column_mapped_collection`, - :func:`_orm.attribute_mapped_collection`. Pull request courtesy Takashi - Kajinami. diff --git a/doc/build/changelog/unreleased_20/11471.rst b/doc/build/changelog/unreleased_20/11471.rst deleted file mode 100644 index 4170de02985..00000000000 --- a/doc/build/changelog/unreleased_20/11471.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 11471 - - Follow up of :ticket:`11471` to fix caching issue where using the - :meth:`.CompoundSelectState.add_cte` method of the - :class:`.CompoundSelectState` construct would not set a correct cache key - which distinguished between different CTE expressions. Also added tests - that would detect issues similar to the one fixed in :ticket:`11544`. diff --git a/doc/build/changelog/unreleased_20/11479.rst b/doc/build/changelog/unreleased_20/11479.rst deleted file mode 100644 index fccaaf80264..00000000000 --- a/doc/build/changelog/unreleased_20/11479.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 11479 - - Fixed issue in MySQL dialect where ENUM values that contained percent signs - were not properly escaped for the driver. - diff --git a/doc/build/changelog/unreleased_20/11480.rst b/doc/build/changelog/unreleased_20/11480.rst deleted file mode 100644 index 7a653a6b69f..00000000000 --- a/doc/build/changelog/unreleased_20/11480.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: usecase, oracle - :tickets: 11480 - - Implemented two-phase transactions for the oracledb dialect. Historically, - this feature never worked with the cx_Oracle dialect, however recent - improvements to the oracledb successor now allow this to be possible. The - two phase transaction API is available at the Core level via the - :meth:`_engine.Connection.begin_twophase` method. diff --git a/doc/build/changelog/unreleased_20/11522.rst b/doc/build/changelog/unreleased_20/11522.rst deleted file mode 100644 index 279197a779b..00000000000 --- a/doc/build/changelog/unreleased_20/11522.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 11522 - - It is now considered a pool-invalidating disconnect event when psycopg2 - throws an "SSL SYSCALL error: Success" error message, which can occur when - the SSL connection to Postgres is terminated abnormally. \ No newline at end of file diff --git a/doc/build/changelog/unreleased_20/11530.rst b/doc/build/changelog/unreleased_20/11530.rst deleted file mode 100644 index 1ffa7c5d265..00000000000 --- a/doc/build/changelog/unreleased_20/11530.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, schema - :tickets: 11530 - - Fixed additional issues in the event system triggered by unpickling of a - :class:`.Enum` datatype, continuing from :ticket:`11365` and - :ticket:`11360`, where dynamically generated elements of the event - structure would not be present when unpickling in a new process. diff --git a/doc/build/changelog/unreleased_20/11532.rst b/doc/build/changelog/unreleased_20/11532.rst deleted file mode 100644 index 141463d5835..00000000000 --- a/doc/build/changelog/unreleased_20/11532.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 11532 - - Fixed issue in "insertmanyvalues" feature where a particular call to - ``cursor.fetchall()`` were not wrapped in SQLAlchemy's exception wrapper, - which apparently can raise a database exception during fetch when using - pyodbc. diff --git a/doc/build/changelog/unreleased_20/11575.rst b/doc/build/changelog/unreleased_20/11575.rst deleted file mode 100644 index 4eb56655fad..00000000000 --- a/doc/build/changelog/unreleased_20/11575.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: usecase, orm - :tickets: 11575 - - The :paramref:`_orm.aliased.name` parameter to :func:`_orm.aliased` may now - be combined with the :paramref:`_orm.aliased.flat` parameter, producing - per-table names based on a name-prefixed naming convention. Pull request - courtesy Eric Atkin. diff --git a/doc/build/changelog/unreleased_20/11576.rst b/doc/build/changelog/unreleased_20/11576.rst deleted file mode 100644 index 93cfe3bf036..00000000000 --- a/doc/build/changelog/unreleased_20/11576.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 11576 - - Fixed issue where the :func:`_sql.collate` construct, which explicitly sets - a collation for a given expression, would maintain collation settings for - the underlying type object from the expression, causing SQL expressions to - have both collations stated at once when used in further expressions for - specific dialects that render explicit type casts, such as that of asyncpg. - The :func:`_sql.collate` construct now assigns its own type to explicitly - include the new collation, assuming it's a string type. diff --git a/doc/build/changelog/unreleased_20/11592.rst b/doc/build/changelog/unreleased_20/11592.rst deleted file mode 100644 index 616eb1e2865..00000000000 --- a/doc/build/changelog/unreleased_20/11592.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 11592 - - Fixed bug where the :meth:`.Operators.nulls_first()` and - :meth:`.Operators.nulls_last()` modifiers would not be treated the same way - as :meth:`.Operators.desc()` and :meth:`.Operators.asc()` when determining - if an ORDER BY should be against a label name already in the statement. All - four modifiers are now treated the same within ORDER BY. diff --git a/doc/build/changelog/unreleased_20/11625.rst b/doc/build/changelog/unreleased_20/11625.rst deleted file mode 100644 index c32a90ad822..00000000000 --- a/doc/build/changelog/unreleased_20/11625.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm, regression - :tickets: 11625 - - Fixed regression appearing in 2.0.21 caused by :ticket:`10279` where using - a :func:`_sql.delete` or :func:`_sql.update` against an ORM class that is - the base of an inheritance hierarchy, while also specifying that subclasses - should be loaded polymorphically, would leak the polymorphic joins into the - UPDATE or DELETE statement as well creating incorrect SQL. diff --git a/doc/build/changelog/unreleased_20/11661.rst b/doc/build/changelog/unreleased_20/11661.rst deleted file mode 100644 index 35985d8bbaa..00000000000 --- a/doc/build/changelog/unreleased_20/11661.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, orm, regression - :tickets: 11661 - - Fixed regression from version 1.4 in - :meth:`_orm.Session.bulk_insert_mappings` where using the - :paramref:`_orm.Session.bulk_insert_mappings.return_defaults` parameter - would not populate the passed in dictionaries with newly generated primary - key values. - diff --git a/doc/build/changelog/unreleased_20/11663.rst b/doc/build/changelog/unreleased_20/11663.rst deleted file mode 100644 index 599cd744bf7..00000000000 --- a/doc/build/changelog/unreleased_20/11663.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. change:: - :tags: bug, oracle, sqlite - :tickets: 11663 - - Implemented bitwise operators for Oracle which was previously - non-functional due to a non-standard syntax used by this database. - Oracle's support for bitwise "or" and "xor" starts with server version 21. - Additionally repaired the implementation of "xor" for SQLite. - - As part of this change, the dialect compliance test suite has been enhanced - to include support for server-side bitwise tests; third party dialect - authors should refer to new "supports_bitwise" methods in the - requirements.py file to enable these tests. - - - diff --git a/doc/build/changelog/unreleased_20/mypy1110.rst b/doc/build/changelog/unreleased_20/mypy1110.rst deleted file mode 100644 index 7804da4c032..00000000000 --- a/doc/build/changelog/unreleased_20/mypy1110.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, typing - - Fixed internal typing issues to establish compatibility with mypy 1.11.0. - Note that this does not include issues which have arisen with the - deprecated mypy plugin used by SQLAlchemy 1.4-style code; see the addiional - change note for this plugin indicating revised compatibility. diff --git a/doc/build/conf.py b/doc/build/conf.py index 054fe873d85..d85b42525df 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.31" +release = "2.0.32" -release_date = "June 18, 2024" +release_date = "August 5, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 0b07f6293193e37f3a2897f5d0b61b443d32fcfe Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 5 Aug 2024 15:08:57 -0400 Subject: [PATCH 276/544] Version 2.0.33 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index f6f324bd62b..e4448ec3dff 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.33 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.32 :released: August 5, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index b15c99062a5..8eec8fce271 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.32" +__version__ = "2.0.33" def __go(lcls: Any) -> None: From 9dc0c2aaa18601dec18d0f0356a0ab022023b932 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 5 Aug 2024 17:01:08 -0400 Subject: [PATCH 277/544] add changelog for #11557 also classify for oracle Change-Id: I725db9c6ae6a8fabe7faf8631113633b338afea4 (cherry picked from commit 7e2615c8c86ffc0247463ba6aeab86b9c4d4a281) --- doc/build/changelog/changelog_20.rst | 6 ++++++ doc/build/changelog/unreleased_20/11557.txt | 6 ------ 2 files changed, 6 insertions(+), 6 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11557.txt diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index e4448ec3dff..0fa618f4a21 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -29,6 +29,12 @@ re-used, the default function may be omitted. Patch courtesy Philipp H. v. Loewenfeld. + .. change:: + :tags: bug, oracle + :tickets: 11557 + + Fixed table reflection on Oracle 10.2 and older where compression options + are not supported. .. change:: :tags: oracle, usecase diff --git a/doc/build/changelog/unreleased_20/11557.txt b/doc/build/changelog/unreleased_20/11557.txt deleted file mode 100644 index be270a6f251..00000000000 --- a/doc/build/changelog/unreleased_20/11557.txt +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, reflection, oracle - :tickets: 11557 - - Fixed table reflection on Oracle 10.2 and older where compression options - are not supported. From f7f541bed6b174739477d7d53ec82036c038ab43 Mon Sep 17 00:00:00 2001 From: Anders Bogsnes Date: Mon, 5 Aug 2024 16:28:48 -0400 Subject: [PATCH 278/544] Add array_type to SuiteRequirements Added missing ``array_type`` property to the testing suite ``SuiteRequirements`` class. Closes: #11694 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11694 Pull-request-sha: 24697f6a4346005aa83d8eb06f94bba4cc994862 Change-Id: I192b5b932dfef07043c0c0cfe8ea36b02425a44d (cherry picked from commit f52be645d7f5735bba52786b2f417cc0786f138d) --- doc/build/changelog/unreleased_20/array_type.rst | 5 +++++ lib/sqlalchemy/testing/requirements.py | 5 +++++ 2 files changed, 10 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/array_type.rst diff --git a/doc/build/changelog/unreleased_20/array_type.rst b/doc/build/changelog/unreleased_20/array_type.rst new file mode 100644 index 00000000000..9b0801faf5b --- /dev/null +++ b/doc/build/changelog/unreleased_20/array_type.rst @@ -0,0 +1,5 @@ +.. change:: + :tags: bug, test + + Added missing ``array_type`` property to the testing suite + ``SuiteRequirements`` class. diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 273e5acab91..b23230f8b2c 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1093,6 +1093,11 @@ def go(config): return exclusions.only_if(go) + @property + def array_type(self): + """Target platform implements a native ARRAY type""" + return exclusions.closed() + @property def json_type(self): """target platform implements a native JSON type.""" From efa1c9593494aa04b448ccbe39dc77efb69e0d08 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 23:07:55 +0200 Subject: [PATCH 279/544] Bump pypa/cibuildwheel from 2.19.2 to 2.20.0 (#11690) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.19.2 to 2.20.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.19.2...v2.20.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit 3ac034057ce621379fb8e0926b851a903d2c7e0b) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 4a0cd9e19be..7fbc8e9ea21 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.19.2 + uses: pypa/cibuildwheel@v2.20.0 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From 653c4d5908b79e653a0aab0989b3cf2bc04c24c2 Mon Sep 17 00:00:00 2001 From: Tao Zhou Date: Tue, 6 Aug 2024 16:21:44 -0400 Subject: [PATCH 280/544] Fix a misreference in inheritance.rst (#11700) (cherry picked from commit 10ac7ce5c8527240e8cc4fa65f3f7a0ba10a2f26) --- doc/build/orm/queryguide/inheritance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/queryguide/inheritance.rst b/doc/build/orm/queryguide/inheritance.rst index 136bed55a60..537d51ae59e 100644 --- a/doc/build/orm/queryguide/inheritance.rst +++ b/doc/build/orm/queryguide/inheritance.rst @@ -128,7 +128,7 @@ objects at once. This loader option works in a similar fashion as the SELECT statement against each sub-table for objects loaded in the hierarchy, using ``IN`` to query for additional rows based on primary key. -:func:`_orm.selectinload` accepts as its arguments the base entity that is +:func:`_orm.selectin_polymorphic` accepts as its arguments the base entity that is being queried, followed by a sequence of subclasses of that entity for which their specific attributes should be loaded for incoming rows:: From 6b31cd7e1bd0f5d9fb2ced53b59176175eff9843 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 7 Aug 2024 09:10:40 -0400 Subject: [PATCH 281/544] disable recovery tests for oracledb this is not holding up in CI and is not a critical feature for now Change-Id: Ib9547028265403497c176d96c462d76cd86c967a (cherry picked from commit 3a4f8cd8760a6901880310bef7ced7e4b424d375) --- test/requirements.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/requirements.py b/test/requirements.py index a2f8598d632..2e74f81aeb6 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -889,6 +889,7 @@ def two_phase_recovery(self): "still can't get recover to work w/ MariaDB / MySQL", ) + skip_if("oracle+cx_oracle", "recovery not functional") + + skip_if("oracle+oracledb", "recovery can't be reliably tested") ) @property From e77b3b62e4375307315a67a156724b87794fff85 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 7 Aug 2024 12:18:25 -0400 Subject: [PATCH 282/544] restore generative to with_statement_hint Fixed regression in :meth:`_sql.Select.with_statement_hint` and others where the generative behavior of the method stopped producing a copy of the object. Fixes: #11703 Change-Id: Ia4482f91f76fae9982dc6b075bf5cfec7042ffa6 (cherry picked from commit a9747467a8b6e8212f758aaceffdc96f087e15bb) --- doc/build/changelog/unreleased_20/11703.rst | 7 +++++++ lib/sqlalchemy/sql/selectable.py | 1 + test/sql/test_select.py | 15 +++++++++++++++ 3 files changed, 23 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11703.rst diff --git a/doc/build/changelog/unreleased_20/11703.rst b/doc/build/changelog/unreleased_20/11703.rst new file mode 100644 index 00000000000..5c703138a14 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11703.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, sql, regression + :tickets: 11703 + + Fixed regression in :meth:`_sql.Select.with_statement_hint` and others + where the generative behavior of the method stopped producing a copy of the + object. diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index cb0fd6f71cd..c365a41f343 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -473,6 +473,7 @@ class HasHints: ("_hints", InternalTraversal.dp_table_hint_list), ] + @_generative def with_statement_hint(self, text: str, dialect_name: str = "*") -> Self: """Add a statement hint to this :class:`_expression.Select` or other selectable object. diff --git a/test/sql/test_select.py b/test/sql/test_select.py index e772c5911d0..2bef71dd1e5 100644 --- a/test/sql/test_select.py +++ b/test/sql/test_select.py @@ -469,6 +469,21 @@ def test_select_multiple_compound_elements(self, methname, joiner): " %(joiner)s SELECT :param_3 AS anon_3" % {"joiner": joiner}, ) + @testing.combinations( + lambda stmt: stmt.with_statement_hint("some hint"), + lambda stmt: stmt.with_hint(table("x"), "some hint"), + lambda stmt: stmt.where(column("q") == 5), + lambda stmt: stmt.having(column("q") == 5), + lambda stmt: stmt.order_by(column("q")), + lambda stmt: stmt.group_by(column("q")), + # TODO: continue + ) + def test_methods_generative(self, testcase): + s1 = select(1) + s2 = testing.resolve_lambda(testcase, stmt=s1) + + assert s1 is not s2 + class ColumnCollectionAsSelectTest(fixtures.TestBase, AssertsCompiledSQL): """tests related to #8285.""" From 5adb9484e9a786128e9a0a7c845ea23eced63850 Mon Sep 17 00:00:00 2001 From: Masterchen09 <13187726+Masterchen09@users.noreply.github.com> Date: Tue, 6 Aug 2024 14:20:51 -0400 Subject: [PATCH 283/544] handle quoted_name instances separately in engine.reflection.cache MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixed issue in internal reflection cache where particular reflection scenarios regarding same-named quoted_name() constructs would not be correctly cached. Pull request courtesy Felix Lüdin. Fixes: #11687 Closes: #11688 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11688 Pull-request-sha: 43d94273a5b13a89226e60de4b958d5b4ac7ff78 Change-Id: I73273dff532a9d14d54065bc33339874e3cb2716 (cherry picked from commit e26e9f123a0afe0a89f7635389764206d15a5f1e) --- doc/build/changelog/unreleased_20/11687.rst | 7 + lib/sqlalchemy/engine/reflection.py | 13 +- test/engine/test_reflection.py | 162 ++++++++++++++++++++ 3 files changed, 180 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11687.rst diff --git a/doc/build/changelog/unreleased_20/11687.rst b/doc/build/changelog/unreleased_20/11687.rst new file mode 100644 index 00000000000..c18d30ffabd --- /dev/null +++ b/doc/build/changelog/unreleased_20/11687.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, reflection + :tickets: 11687 + + Fixed issue in internal reflection cache where particular reflection + scenarios regarding same-named quoted_name() constructs would not be + correctly cached. Pull request courtesy Felix Lüdin. diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index ef1e566c29e..09b09880350 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -55,6 +55,7 @@ from ..sql import operators from ..sql import schema as sa_schema from ..sql.cache_key import _ad_hoc_cache_key_from_args +from ..sql.elements import quoted_name from ..sql.elements import TextClause from ..sql.type_api import TypeEngine from ..sql.visitors import InternalTraversal @@ -89,8 +90,16 @@ def cache( exclude = {"info_cache", "unreflectable"} key = ( fn.__name__, - tuple(a for a in args if isinstance(a, str)), - tuple((k, v) for k, v in kw.items() if k not in exclude), + tuple( + (str(a), a.quote) if isinstance(a, quoted_name) else a + for a in args + if isinstance(a, str) + ), + tuple( + (k, (str(v), v.quote) if isinstance(v, quoted_name) else v) + for k, v in kw.items() + if k not in exclude + ), ) ret: _R = info_cache.get(key) if ret is None: diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py index 003b457a51a..adb40370655 100644 --- a/test/engine/test_reflection.py +++ b/test/engine/test_reflection.py @@ -1,3 +1,4 @@ +import itertools import unicodedata import sqlalchemy as sa @@ -19,6 +20,8 @@ from sqlalchemy import testing from sqlalchemy import UniqueConstraint from sqlalchemy.engine import Inspector +from sqlalchemy.engine.reflection import cache +from sqlalchemy.sql.elements import quoted_name from sqlalchemy.testing import assert_raises from sqlalchemy.testing import assert_raises_message from sqlalchemy.testing import AssertsCompiledSQL @@ -2494,3 +2497,162 @@ def test_table_works_minus_fks(self, connection, tab_w_fks): "SELECT b_1.x, b_1.q, b_1.p, b_1.r, b_1.s, b_1.t " "FROM b AS b_1 JOIN a ON a.x = b_1.r", ) + + +class ReflectionCacheTest(fixtures.TestBase): + @testing.fixture(params=["arg", "kwarg"]) + def cache(self, connection, request): + dialect = connection.dialect + info_cache = {} + counter = itertools.count(1) + + @cache + def get_cached_name(self, connection, *args, **kw): + return next(counter) + + def get_cached_name_via_arg(name): + return get_cached_name( + dialect, connection, name, info_cache=info_cache + ) + + def get_cached_name_via_kwarg(name): + return get_cached_name( + dialect, connection, name=name, info_cache=info_cache + ) + + if request.param == "arg": + yield get_cached_name_via_arg + elif request.param == "kwarg": + yield get_cached_name_via_kwarg + else: + assert False + + @testing.fixture(params=[False, True]) + def quote(self, request): + yield request.param + + def test_single_string(self, cache): + # new value + eq_(cache("name1"), 1) + + # same value, counter not incremented + eq_(cache("name1"), 1) + + def test_multiple_string(self, cache): + # new value + eq_(cache("name1"), 1) + eq_(cache("name2"), 2) + + # same values, counter not incremented + eq_(cache("name1"), 1) + eq_(cache("name2"), 2) + + def test_single_quoted_name(self, cache, quote): + # new value + eq_(cache(quoted_name("name1", quote=quote)), 1) + + # same value, counter not incremented + eq_(cache(quoted_name("name1", quote=quote)), 1) + + def test_multiple_quoted_name(self, cache, quote): + # new value + eq_(cache(quoted_name("name1", quote=quote)), 1) + eq_(cache(quoted_name("name2", quote=quote)), 2) + + # same values, counter not incremented + eq_(cache(quoted_name("name1", quote=quote)), 1) + eq_(cache(quoted_name("name2", quote=quote)), 2) + + def test_single_quoted_name_and_string(self, cache, quote): + # new values + eq_(cache(quoted_name("n1", quote=quote)), 1) + eq_(cache("n1"), 2) + + # same values, counter not incremented + eq_(cache(quoted_name("n1", quote=quote)), 1) + eq_(cache("n1"), 2) + + def test_multiple_quoted_name_and_string(self, cache, quote): + # new values + eq_(cache(quoted_name("n1", quote=quote)), 1) + eq_(cache(quoted_name("n2", quote=quote)), 2) + eq_(cache("n1"), 3) + eq_(cache("n2"), 4) + + # same values, counter not incremented + eq_(cache(quoted_name("n1", quote=quote)), 1) + eq_(cache(quoted_name("n2", quote=quote)), 2) + eq_(cache("n1"), 3) + eq_(cache("n2"), 4) + + def test_single_quoted_name_false_true_and_string(self, cache, quote): + # new values + eq_(cache(quoted_name("n1", quote=quote)), 1) + eq_(cache(quoted_name("n1", quote=not quote)), 2) + eq_(cache("n1"), 3) + + # same values, counter not incremented + eq_(cache(quoted_name("n1", quote=quote)), 1) + eq_(cache(quoted_name("n1", quote=not quote)), 2) + eq_(cache("n1"), 3) + + def test_multiple_quoted_name_false_true_and_string(self, cache, quote): + # new values + eq_(cache(quoted_name("n1", quote=quote)), 1) + eq_(cache(quoted_name("n2", quote=quote)), 2) + eq_(cache(quoted_name("n1", quote=not quote)), 3) + eq_(cache(quoted_name("n2", quote=not quote)), 4) + eq_(cache("n1"), 5) + eq_(cache("n2"), 6) + + # same values, counter not incremented + eq_(cache(quoted_name("n1", quote=quote)), 1) + eq_(cache(quoted_name("n2", quote=quote)), 2) + eq_(cache(quoted_name("n1", quote=not quote)), 3) + eq_(cache(quoted_name("n2", quote=not quote)), 4) + eq_(cache("n1"), 5) + eq_(cache("n2"), 6) + + def test_multiple_quoted_name_false_true_and_string_arg_and_kwarg( + self, connection, quote + ): + dialect = connection.dialect + info_cache = {} + counter = itertools.count(1) + + @cache + def get_cached_name(self, connection, *args, **kw): + return next(counter) + + def cache_(*args, **kw): + return get_cached_name( + dialect, connection, *args, **kw, info_cache=info_cache + ) + + # new values + eq_(cache_(quoted_name("n1", quote=quote)), 1) + eq_(cache_(name=quoted_name("n1", quote=quote)), 2) + eq_(cache_(quoted_name("n2", quote=quote)), 3) + eq_(cache_(name=quoted_name("n2", quote=quote)), 4) + eq_(cache_(quoted_name("n1", quote=not quote)), 5) + eq_(cache_(name=quoted_name("n1", quote=not quote)), 6) + eq_(cache_(quoted_name("n2", quote=not quote)), 7) + eq_(cache_(name=quoted_name("n2", quote=not quote)), 8) + eq_(cache_("n1"), 9) + eq_(cache_(name="n1"), 10) + eq_(cache_("n2"), 11) + eq_(cache_(name="n2"), 12) + + # same values, counter not incremented + eq_(cache_(quoted_name("n1", quote=quote)), 1) + eq_(cache_(name=quoted_name("n1", quote=quote)), 2) + eq_(cache_(quoted_name("n2", quote=quote)), 3) + eq_(cache_(name=quoted_name("n2", quote=quote)), 4) + eq_(cache_(quoted_name("n1", quote=not quote)), 5) + eq_(cache_(name=quoted_name("n1", quote=not quote)), 6) + eq_(cache_(quoted_name("n2", quote=not quote)), 7) + eq_(cache_(name=quoted_name("n2", quote=not quote)), 8) + eq_(cache_("n1"), 9) + eq_(cache_(name="n1"), 10) + eq_(cache_("n2"), 11) + eq_(cache_(name="n2"), 12) From 4794629f243718c2d72d838785a4ac5494b7ad6b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 7 Aug 2024 14:31:48 -0400 Subject: [PATCH 284/544] note prefix_with as a hint mechanism References: #11702 References: #11704 Change-Id: Ieee0780f6d132a29269430fc26de5c0664b16c2e (cherry picked from commit 6cf5e2a188fc5e337d22a098a5fe9a9fe10cc7e7) --- lib/sqlalchemy/sql/selectable.py | 35 +++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index c365a41f343..632e2e792bd 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -478,10 +478,20 @@ def with_statement_hint(self, text: str, dialect_name: str = "*") -> Self: """Add a statement hint to this :class:`_expression.Select` or other selectable object. - This method is similar to :meth:`_expression.Select.with_hint` - except that - it does not require an individual table, and instead applies to the - statement as a whole. + .. tip:: + + :meth:`_expression.Select.with_statement_hint` generally adds hints + **at the trailing end** of a SELECT statement. To place + dialect-specific hints such as optimizer hints at the **front** of + the SELECT statement after the SELECT keyword, use the + :meth:`_expression.Select.prefix_with` method for an open-ended + space, or for table-specific hints the + :meth:`_expression.Select.with_hint` may be used, which places + hints in a dialect-specific location. + + This method is similar to :meth:`_expression.Select.with_hint` except + that it does not require an individual table, and instead applies to + the statement as a whole. Hints here are specific to the backend database and may include directives such as isolation levels, file directives, fetch directives, @@ -493,7 +503,7 @@ def with_statement_hint(self, text: str, dialect_name: str = "*") -> Self: :meth:`_expression.Select.prefix_with` - generic SELECT prefixing which also can suit some database-specific HINT syntaxes such as - MySQL optimizer hints + MySQL or Oracle optimizer hints """ return self._with_hint(None, text, dialect_name) @@ -509,6 +519,17 @@ def with_hint( selectable to this :class:`_expression.Select` or other selectable object. + .. tip:: + + The :meth:`_expression.Select.with_hint` method adds hints that are + **specific to a single table** to a statement, in a location that + is **dialect-specific**. To add generic optimizer hints to the + **beginning** of a statement ahead of the SELECT keyword such as + for MySQL or Oracle, use the :meth:`_expression.Select.prefix_with` + method. To add optimizer hints to the **end** of a statement such + as for PostgreSQL, use the + :meth:`_expression.Select.with_statement_hint` method. + The text of the hint is rendered in the appropriate location for the database backend in use, relative to the given :class:`_schema.Table` or :class:`_expression.Alias` @@ -538,6 +559,10 @@ def with_hint( :meth:`_expression.Select.with_statement_hint` + :meth:`_expression.Select.prefix_with` - generic SELECT prefixing + which also can suit some database-specific HINT syntaxes such as + MySQL or Oracle optimizer hints + """ return self._with_hint(selectable, text, dialect_name) From 39132d8189ae01a6f9ea4ea270015d7615d80d56 Mon Sep 17 00:00:00 2001 From: Gabi Nagy Date: Fri, 9 Aug 2024 21:54:24 +0200 Subject: [PATCH 285/544] Fix typo in docs (#11715) (cherry picked from commit 387be54e60e96455de6ecaadf8ff20df8304c760) --- doc/build/orm/session_basics.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/session_basics.rst b/doc/build/orm/session_basics.rst index 4b47be43bfc..0c04e34b2ed 100644 --- a/doc/build/orm/session_basics.rst +++ b/doc/build/orm/session_basics.rst @@ -154,7 +154,7 @@ The purpose of :class:`_orm.sessionmaker` is to provide a factory for :class:`_orm.Session` objects with a fixed configuration. As it is typical that an application will have an :class:`_engine.Engine` object in module scope, the :class:`_orm.sessionmaker` can provide a factory for -:class:`_orm.Session` objects that are against this engine:: +:class:`_orm.Session` objects that are constructed against this engine:: from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker From 7944dec6e20441c9ad327fed6936e296162e1def Mon Sep 17 00:00:00 2001 From: Gregg Lind Date: Fri, 9 Aug 2024 14:56:44 -0500 Subject: [PATCH 286/544] Docs: Dialects/index.rst - Add clickhouse-sqlalchemy pypi link. (#11717) Add the Pypi link for the `clickhouse-sqlalchemy` package to Externally supported dialects. (cherry picked from commit 081a91535f9229c957c0f40e035eb8c399bf6ab7) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 564656ec513..b0064a09e9a 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -77,6 +77,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Apache Solr | sqlalchemy-solr_ | +------------------------------------------------+---------------------------------------+ +| Clickhouse | clickhouse-sqlalchemy_ | ++------------------------------------------------+---------------------------------------+ | CockroachDB | sqlalchemy-cockroachdb_ | +------------------------------------------------+---------------------------------------+ | CrateDB | sqlalchemy-cratedb_ | @@ -166,3 +168,4 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _databend-sqlalchemy: https://github.com/datafuselabs/databend-sqlalchemy .. _sqlalchemy-greenplum: https://github.com/PlaidCloud/sqlalchemy-greenplum .. _databricks: https://docs.databricks.com/en/dev-tools/sqlalchemy.html +.. _clickhouse-sqlalchemy: https://pypi.org/project/clickhouse-sqlalchemy/ From 9a6e31cf4efdf842b0025ed7b496803cbeb57fce Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 11 Aug 2024 15:41:36 -0400 Subject: [PATCH 287/544] turn off pyodbc pooling new updates of unixodbc are turning this on in CI revealing that our isolation level tests assume no pooling takes place, so disable this, which is only at global module level for pyodbc Change-Id: I971dfddc90d248281e8ca8677a3a41af6de28b86 (cherry picked from commit 896dbdb5920ffb645a8948c254f73dd0fcb0d3c0) --- lib/sqlalchemy/dialects/mssql/provision.py | 7 +++++++ lib/sqlalchemy/testing/provision.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/dialects/mssql/provision.py b/lib/sqlalchemy/dialects/mssql/provision.py index 143d386c45e..1c684b1dfef 100644 --- a/lib/sqlalchemy/dialects/mssql/provision.py +++ b/lib/sqlalchemy/dialects/mssql/provision.py @@ -22,10 +22,17 @@ from ...testing.provision import get_temp_table_name from ...testing.provision import log from ...testing.provision import normalize_sequence +from ...testing.provision import post_configure_engine from ...testing.provision import run_reap_dbs from ...testing.provision import temp_table_keyword_args +@post_configure_engine.for_db("mssql") +def post_configure_engine(url, engine, follower_ident): + if engine.driver == "pyodbc": + engine.dialect.dbapi.pooling = False + + @generate_driver_url.for_db("mssql") def generate_driver_url(url, driver, query_str): backend = url.get_backend_name() diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index e50c6eb5d5d..c6dc43e5383 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -362,7 +362,7 @@ def update_db_opts(db_url, db_opts, options): def post_configure_engine(url, engine, follower_ident): """Perform extra steps after configuring an engine for testing. - (For the internal dialects, currently only used by sqlite, oracle) + (For the internal dialects, currently only used by sqlite, oracle, mssql) """ From 3e7b257542c57a590d607eb18ce6bf6a482d595c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 29 Jul 2024 12:34:34 -0400 Subject: [PATCH 288/544] cherry-pick changelog from 1.4.53 (cherry picked from commit 551b5135df63386d1540b709e37e86a629c1c25f) --- doc/build/changelog/changelog_14.rst | 97 ++++++++++++++++++- doc/build/changelog/unreleased_14/11417.rst | 12 --- doc/build/changelog/unreleased_14/11471.rst | 8 -- doc/build/changelog/unreleased_14/11499.rst | 6 -- doc/build/changelog/unreleased_14/11514.rst | 9 -- doc/build/changelog/unreleased_14/11544.rst | 10 -- doc/build/changelog/unreleased_14/11562.rst | 9 -- doc/build/changelog/unreleased_14/11582.rst | 7 -- .../unreleased_14/greenlet_compat.rst | 11 --- .../changelog/unreleased_14/mypy1110.rst | 14 --- 10 files changed, 96 insertions(+), 87 deletions(-) delete mode 100644 doc/build/changelog/unreleased_14/11417.rst delete mode 100644 doc/build/changelog/unreleased_14/11471.rst delete mode 100644 doc/build/changelog/unreleased_14/11499.rst delete mode 100644 doc/build/changelog/unreleased_14/11514.rst delete mode 100644 doc/build/changelog/unreleased_14/11544.rst delete mode 100644 doc/build/changelog/unreleased_14/11562.rst delete mode 100644 doc/build/changelog/unreleased_14/11582.rst delete mode 100644 doc/build/changelog/unreleased_14/greenlet_compat.rst delete mode 100644 doc/build/changelog/unreleased_14/mypy1110.rst diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index 47586bfd4f6..81d71ec2b5e 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -15,7 +15,102 @@ This document details individual issue-level changes made throughout .. changelog:: :version: 1.4.53 - :include_notes_from: unreleased_14 + :released: July 29, 2024 + + .. change:: + :tags: bug, general + :tickets: 11417 + :versions: 2.0.31 + + Set up full Python 3.13 support to the extent currently possible, repairing + issues within internal language helpers as well as the serializer extension + module. + + For version 1.4, this also modernizes the "extras" names in setup.cfg + to use dashes and not underscores for two-word names. Underscore names + are still present to accommodate potential compatibility issues. + + .. change:: + :tags: bug, sql + :tickets: 11471 + :versions: 2.0.31 + + Fixed caching issue where using the :meth:`.TextualSelect.add_cte` method + of the :class:`.TextualSelect` construct would not set a correct cache key + which distinguished between different CTE expressions. + + .. change:: + :tags: bug, engine + :tickets: 11499 + + Adjustments to the C extensions, which are specific to the SQLAlchemy 1.x + series, to work under Python 3.13. Pull request courtesy Ben Beasley. + + .. change:: + :tags: bug, mssql + :tickets: 11514 + :versions: 2.0.32 + + Fixed issue where SQL Server drivers don't support bound parameters when + rendering the "frame specification" for a window function, e.g. "ROWS + BETWEEN", etc. + + + .. change:: + :tags: bug, sql + :tickets: 11544 + :versions: 2.0 + + Fixed caching issue where the + :paramref:`_sql.Select.with_for_update.key_share` element of + :meth:`_sql.Select.with_for_update` was not considered as part of the cache + key, leading to incorrect caching if different variations of this parameter + were used with an otherwise identical statement. + + .. change:: + :tags: bug, orm, regression + :tickets: 11562 + :versions: 2.0.32 + + Fixed regression going back to 1.4 where accessing a collection using the + "dynamic" strategy on a transient object and attempting to query would + raise an internal error rather than the expected :class:`.NoResultFound` + that occurred in 1.3. + + .. change:: + :tags: bug, reflection, sqlite + :tickets: 11582 + :versions: 2.0.32 + + Fixed reflection of computed column in SQLite to properly account + for complex expressions. + + .. change:: + :tags: usecase, engine + :versions: 2.0.31 + + Modified the internal representation used for adapting asyncio calls to + greenlets to allow for duck-typed compatibility with third party libraries + that implement SQLAlchemy's "greenlet-to-asyncio" pattern directly. + Running code within a greenlet that features the attribute + ``__sqlalchemy_greenlet_provider__ = True`` will allow calls to + :func:`sqlalchemy.util.await_only` directly. + + + .. change:: + :tags: bug, mypy + :versions: 2.0.32 + + The deprecated mypy plugin is no longer fully functional with the latest + series of mypy 1.11.0, as changes in the mypy interpreter are no longer + compatible with the approach used by the plugin. If code is dependent on + the mypy plugin with sqlalchemy2-stubs, it's recommended to pin mypy to be + below the 1.11.0 series. Seek upgrading to the 2.0 series of SQLAlchemy + and migrating to the modern type annotations. + + .. seealso:: + + :ref:`mypy_toplevel` .. changelog:: :version: 1.4.52 diff --git a/doc/build/changelog/unreleased_14/11417.rst b/doc/build/changelog/unreleased_14/11417.rst deleted file mode 100644 index b37af43e3d3..00000000000 --- a/doc/build/changelog/unreleased_14/11417.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, general - :tickets: 11417 - :versions: 2.0.31 - - Set up full Python 3.13 support to the extent currently possible, repairing - issues within internal language helpers as well as the serializer extension - module. - - For version 1.4, this also modernizes the "extras" names in setup.cfg - to use dashes and not underscores for two-word names. Underscore names - are still present to accommodate potential compatibility issues. diff --git a/doc/build/changelog/unreleased_14/11471.rst b/doc/build/changelog/unreleased_14/11471.rst deleted file mode 100644 index 47fda837575..00000000000 --- a/doc/build/changelog/unreleased_14/11471.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 11471 - :versions: 2.0.31 - - Fixed caching issue where using the :meth:`.TextualSelect.add_cte` method - of the :class:`.TextualSelect` construct would not set a correct cache key - which distinguished between different CTE expressions. diff --git a/doc/build/changelog/unreleased_14/11499.rst b/doc/build/changelog/unreleased_14/11499.rst deleted file mode 100644 index e03062c1911..00000000000 --- a/doc/build/changelog/unreleased_14/11499.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 11499 - - Adjustments to the C extensions, which are specific to the SQLAlchemy 1.x - series, to work under Python 3.13. Pull request courtesy Ben Beasley. diff --git a/doc/build/changelog/unreleased_14/11514.rst b/doc/build/changelog/unreleased_14/11514.rst deleted file mode 100644 index 145f87f4384..00000000000 --- a/doc/build/changelog/unreleased_14/11514.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, mssql - :tickets: 11514 - :versions: 2.0.32 - - Fixed issue where SQL Server drivers don't support bound parameters when - rendering the "frame specification" for a window function, e.g. "ROWS - BETWEEN", etc. - diff --git a/doc/build/changelog/unreleased_14/11544.rst b/doc/build/changelog/unreleased_14/11544.rst deleted file mode 100644 index 6bc3b9705f4..00000000000 --- a/doc/build/changelog/unreleased_14/11544.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 11544 - :versions: 2.0 - - Fixed caching issue where the - :paramref:`_sql.Select.with_for_update.key_share` element of - :meth:`_sql.Select.with_for_update` was not considered as part of the cache - key, leading to incorrect caching if different variations of this parameter - were used with an otherwise identical statement. diff --git a/doc/build/changelog/unreleased_14/11562.rst b/doc/build/changelog/unreleased_14/11562.rst deleted file mode 100644 index beaad363351..00000000000 --- a/doc/build/changelog/unreleased_14/11562.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm, regression - :tickets: 11562 - :versions: 2.0.32 - - Fixed regression going back to 1.4 where accessing a collection using the - "dynamic" strategy on a transient object and attempting to query would - raise an internal error rather than the expected :class:`.NoResultFound` - that occurred in 1.3. diff --git a/doc/build/changelog/unreleased_14/11582.rst b/doc/build/changelog/unreleased_14/11582.rst deleted file mode 100644 index 6a2009cbae4..00000000000 --- a/doc/build/changelog/unreleased_14/11582.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, reflection, sqlite - :tickets: 11582 - :versions: 2.0.32 - - Fixed reflection of computed column in SQLite to properly account - for complex expressions. diff --git a/doc/build/changelog/unreleased_14/greenlet_compat.rst b/doc/build/changelog/unreleased_14/greenlet_compat.rst deleted file mode 100644 index 95ce98113df..00000000000 --- a/doc/build/changelog/unreleased_14/greenlet_compat.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: usecase, engine - :versions: 2.0.31 - - Modified the internal representation used for adapting asyncio calls to - greenlets to allow for duck-typed compatibility with third party libraries - that implement SQLAlchemy's "greenlet-to-asyncio" pattern directly. - Running code within a greenlet that features the attribute - ``__sqlalchemy_greenlet_provider__ = True`` will allow calls to - :func:`sqlalchemy.util.await_only` directly. - diff --git a/doc/build/changelog/unreleased_14/mypy1110.rst b/doc/build/changelog/unreleased_14/mypy1110.rst deleted file mode 100644 index 3f1fe05ce2d..00000000000 --- a/doc/build/changelog/unreleased_14/mypy1110.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. change:: - :tags: bug, mypy - :versions: 2.0.32 - - The deprecated mypy plugin is no longer fully functional with the latest - series of mypy 1.11.0, as changes in the mypy interpreter are no longer - compatible with the approach used by the plugin. If code is dependent on - the mypy plugin with sqlalchemy2-stubs, it's recommended to pin mypy to be - below the 1.11.0 series. Seek upgrading to the 2.0 series of SQLAlchemy - and migrating to the modern type annotations. - - .. seealso:: - - :ref:`mypy_toplevel` From 1f58ce14568ec59dc7f8125caa736c2acbf40e89 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 29 Jul 2024 12:34:35 -0400 Subject: [PATCH 289/544] cherry-pick changelog update for 1.4.54 (cherry picked from commit 7ea59f7505a78cd801b48d82a97919a239086f61) --- doc/build/changelog/changelog_14.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index 81d71ec2b5e..e96d41bcca4 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -13,6 +13,10 @@ This document details individual issue-level changes made throughout :start-line: 5 +.. changelog:: + :version: 1.4.54 + :include_notes_from: unreleased_14 + .. changelog:: :version: 1.4.53 :released: July 29, 2024 From 36198e16fb36cba572af36d0aa09a5149fcf7812 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 13 Aug 2024 09:13:51 -0400 Subject: [PATCH 290/544] omit mysql8 dupe key alias for INSERT..FROM SELECT Fixed issue in MySQL dialect where using INSERT..FROM SELECT in combination with ON DUPLICATE KEY UPDATE would erroneously render on MySQL 8 and above the "AS new" clause, leading to syntax failures. This clause is required on MySQL 8 to follow the VALUES clause if use of the "new" alias is present, however is not permitted to follow a FROM SELECT clause. Fixes: #11731 Change-Id: I254a3db4e9dccd9a76b11fdfe6e38a064ba0b5cf (cherry picked from commit 63b45202848de0cb3cfd41de130000355cbb88ef) --- doc/build/changelog/unreleased_20/11731.rst | 10 +++++++++ lib/sqlalchemy/dialects/mysql/base.py | 2 +- test/dialect/mysql/test_compiler.py | 25 +++++++++++++++++++++ test/dialect/mysql/test_on_duplicate.py | 18 +++++++++++++++ 4 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11731.rst diff --git a/doc/build/changelog/unreleased_20/11731.rst b/doc/build/changelog/unreleased_20/11731.rst new file mode 100644 index 00000000000..34ab8b48c58 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11731.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, mysql + :tickets: 11731 + + Fixed issue in MySQL dialect where using INSERT..FROM SELECT in combination + with ON DUPLICATE KEY UPDATE would erroneously render on MySQL 8 and above + the "AS new" clause, leading to syntax failures. This clause is required + on MySQL 8 to follow the VALUES clause if use of the "new" alias is + present, however is not permitted to follow a FROM SELECT clause. + diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 603b5ff7c8e..e512b9c75c2 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1349,7 +1349,7 @@ def visit_on_duplicate_key_update(self, on_duplicate, **kw): clauses = [] - requires_mysql8_alias = ( + requires_mysql8_alias = statement.select is None and ( self.dialect._requires_alias_for_on_duplicate_key ) diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index 6712300aa40..189390659ad 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -1127,6 +1127,31 @@ def test_from_values(self, version: Variation): self.assert_compile(stmt, expected_sql, dialect=dialect) + @testing.variation("version", ["mysql8", "all_others"]) + def test_from_select(self, version: Variation): + stmt = insert(self.table).from_select( + ["id", "bar"], + select(self.table.c.id, literal("bar2")), + ) + stmt = stmt.on_duplicate_key_update( + bar=stmt.inserted.bar, baz=stmt.inserted.baz + ) + + expected_sql = ( + "INSERT INTO foos (id, bar) SELECT foos.id, %s AS anon_1 " + "FROM foos " + "ON DUPLICATE KEY UPDATE bar = VALUES(bar), baz = VALUES(baz)" + ) + if version.all_others: + dialect = None + elif version.mysql8: + dialect = mysql.dialect() + dialect._requires_alias_for_on_duplicate_key = True + else: + version.fail() + + self.assert_compile(stmt, expected_sql, dialect=dialect) + def test_from_literal(self): stmt = insert(self.table).values( [{"id": 1, "bar": "ab"}, {"id": 2, "bar": "b"}] diff --git a/test/dialect/mysql/test_on_duplicate.py b/test/dialect/mysql/test_on_duplicate.py index 5a4e6ca8d5c..35aebb470c3 100644 --- a/test/dialect/mysql/test_on_duplicate.py +++ b/test/dialect/mysql/test_on_duplicate.py @@ -3,6 +3,8 @@ from sqlalchemy import exc from sqlalchemy import func from sqlalchemy import Integer +from sqlalchemy import literal +from sqlalchemy import select from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.dialects.mysql import insert @@ -63,6 +65,22 @@ def test_on_duplicate_key_update_multirow(self, connection): [(1, "ab", "bz", False)], ) + def test_on_duplicate_key_from_select(self, connection): + foos = self.tables.foos + conn = connection + conn.execute(insert(foos).values(dict(id=1, bar="b", baz="bz"))) + stmt = insert(foos).from_select( + ["id", "bar", "baz"], + select(foos.c.id, literal("bar2"), literal("baz2")), + ) + stmt = stmt.on_duplicate_key_update(bar=stmt.inserted.bar) + + conn.execute(stmt) + eq_( + conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), + [(1, "bar2", "bz", False)], + ) + def test_on_duplicate_key_update_singlerow(self, connection): foos = self.tables.foos conn = connection From 06656edafcb6e8b0cfa784679101c0e5f7aa211c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 12 Aug 2024 19:50:05 -0400 Subject: [PATCH 291/544] de-memoize _proxy_key when new annotations are added Fixed regression from 1.3 where the column key used for a hybrid property might be populated with that of the underlying column that it returns, for a property that returns an ORM mapped column directly, rather than the key used by the hybrid property itself. Fixes: #11728 Change-Id: Ifb298e46a20f90f6b6a717674f142a87cbceb468 (cherry picked from commit ffc7e8d73b30ea45fb03e0727b9fe96b6b8d4cfa) --- doc/build/changelog/unreleased_14/11728.rst | 9 ++++ lib/sqlalchemy/sql/elements.py | 9 +++- test/ext/test_hybrid.py | 49 +++++++++++++++++++++ 3 files changed, 66 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_14/11728.rst diff --git a/doc/build/changelog/unreleased_14/11728.rst b/doc/build/changelog/unreleased_14/11728.rst new file mode 100644 index 00000000000..b27aa3333d7 --- /dev/null +++ b/doc/build/changelog/unreleased_14/11728.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, regression, orm + :tickets: 11728 + :versions: 2.0.33 + + Fixed regression from 1.3 where the column key used for a hybrid property + might be populated with that of the underlying column that it returns, for + a property that returns an ORM mapped column directly, rather than the key + used by the hybrid property itself. diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 70c9e01da57..45c1674d9fb 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -5315,7 +5315,14 @@ def __init__(self, element, values): def _with_annotations(self, values): clone = super()._with_annotations(values) - clone.__dict__.pop("comparator", None) + for attr in ( + "comparator", + "_proxy_key", + "_tq_key_label", + "_tq_label", + "_non_anon_label", + ): + clone.__dict__.pop(attr, None) return clone @util.memoized_property diff --git a/test/ext/test_hybrid.py b/test/ext/test_hybrid.py index 69e9c133515..8e3d7e9cd57 100644 --- a/test/ext/test_hybrid.py +++ b/test/ext/test_hybrid.py @@ -7,6 +7,7 @@ from sqlalchemy import insert from sqlalchemy import inspect from sqlalchemy import Integer +from sqlalchemy import LABEL_STYLE_DISAMBIGUATE_ONLY from sqlalchemy import LABEL_STYLE_TABLENAME_PLUS_COL from sqlalchemy import literal_column from sqlalchemy import Numeric @@ -423,6 +424,21 @@ def name(self): return A + @testing.fixture + def _unnamed_expr_matches_col_fixture(self): + Base = declarative_base() + + class A(Base): + __tablename__ = "a" + id = Column(Integer, primary_key=True) + foo = Column(String) + + @hybrid.hybrid_property + def bar(self): + return self.foo + + return A + def test_access_from_unmapped(self): """test #9519""" @@ -497,6 +513,39 @@ def test_labeling_for_unnamed(self, _unnamed_expr_fixture): "a.lastname AS name FROM a) AS anon_1", ) + @testing.variation("pre_populate_col_proxy", [True, False]) + def test_labeling_for_unnamed_matches_col( + self, _unnamed_expr_matches_col_fixture, pre_populate_col_proxy + ): + """test #11728""" + + A = _unnamed_expr_matches_col_fixture + + if pre_populate_col_proxy: + pre_stmt = select(A.id, A.foo) + pre_stmt.subquery().c + + stmt = select(A.id, A.bar) + self.assert_compile( + stmt, + "SELECT a.id, a.foo FROM a", + ) + + compile_state = stmt._compile_state_factory(stmt, None) + eq_( + compile_state._column_naming_convention( + LABEL_STYLE_DISAMBIGUATE_ONLY, legacy=False + )(list(stmt.inner_columns)[1]), + "bar", + ) + eq_(stmt.subquery().c.keys(), ["id", "bar"]) + + self.assert_compile( + select(stmt.subquery()), + "SELECT anon_1.id, anon_1.foo FROM " + "(SELECT a.id AS id, a.foo AS foo FROM a) AS anon_1", + ) + def test_labeling_for_unnamed_tablename_plus_col( self, _unnamed_expr_fixture ): From 13f067024591f459833b7744297d925e421c0fe1 Mon Sep 17 00:00:00 2001 From: Jeff Horemans Date: Tue, 13 Aug 2024 11:22:51 -0400 Subject: [PATCH 292/544] Reflect broader range of check constraints for SQLite. Improvements to the regex used by the SQLite dialect to reflect the name and contents of a CHECK constraint. Constraints with newline, tab, or space characters in either or both the constraint text and constraint name are now properly reflected. Pull request courtesy Jeff Horemans. Fixes: #11677 Closes: #11701 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11701 Pull-request-sha: b2d629f18695ecb08cddeb99d7c2573e1f3d04b2 Change-Id: I2dd06c778e7c130848e418a80004032eb6144e6d (cherry picked from commit 4c8469947d79766813bc9326ef16c25ff6882f53) --- doc/build/changelog/unreleased_20/11677.rst | 10 +++ lib/sqlalchemy/dialects/sqlite/base.py | 70 ++++++++++++++++++--- test/dialect/test_sqlite.py | 38 +++++++++++ 3 files changed, 110 insertions(+), 8 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11677.rst diff --git a/doc/build/changelog/unreleased_20/11677.rst b/doc/build/changelog/unreleased_20/11677.rst new file mode 100644 index 00000000000..b1ac39b436f --- /dev/null +++ b/doc/build/changelog/unreleased_20/11677.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, sqlite + :tickets: 11677 + + Improvements to the regex used by the SQLite dialect to reflect the name + and contents of a CHECK constraint. Constraints with newline, tab, or + space characters in either or both the constraint text and constraint name + are now properly reflected. Pull request courtesy Jeff Horemans. + + diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 04e84a68d2e..a678e10940c 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -2624,15 +2624,69 @@ def get_check_constraints(self, connection, table_name, schema=None, **kw): connection, table_name, schema=schema, **kw ) - CHECK_PATTERN = r"(?:CONSTRAINT (.+) +)?" r"CHECK *\( *(.+) *\),? *" + # Notes: + # * The pattern currently matches any character for the name of the + # constraint, including newline characters (re.S flag) as long as + # none of the SQLite's table constraints keywords are encountered + # by a negative lookahead. + # This prevents the pattern from matching subsequent constraints + # as part of the name. + # This is only done for those keywords if seperated by spaces, to + # support constraint names that contains them e.g. "check_value". + # + # * Because check constraint definitions can also contain newline + # or tab characters, the pattern matches any character untill either + # the beginning of the next constraint statement using a + # non-capturing and non-consuming group, allowing the next one + # to match, or the end of the table definition + # e.g. newline and closing ')'. + CHECK_PATTERN = r""" + # Non-capturing group for the name part of named check constraints. + # This group is optional as unnamed check constraints can exist. + (?: + # Match beginning of constraint definition seperated by whitespace. + CONSTRAINT\s + + # First capturing group that matches the actual name of the constraint. + # Any characters is allowed, as long as none of the reserved table + # constraint keywords are encountered using a negative lookahead. + ((?:(?!\sPRIMARY\s|\sFOREIGN\sKEY|\sUNIQUE\s|\sCHECK\s).)+) + + # End of optional non-capturing name group seperated by whitespace. + \s)? + + # Match beginning of the check expression with starting parenthesis + # and optional whitespace. + CHECK\s?\( + + # Match actual expression, which can be any character. + (.+?) + + # End parenthesis of the check expression. + \) + + # Non-capturing group that helps denote the end of the check + # expression part. + # This can either be (1) the beginning of the next constraint, + # or (2) the end of the table definition. + (?: + + # (1) Matches end of check constraint with trailing comma, + # optional whitespace (including newline), and the beginning + # of the next constraint (either named or unnamed). + ,[\s\n]*(?=CONSTRAINT|CHECK) + # OR operator, seperating (1) & (2) + | + # (2) Matches end parenthesis of table definition, seperated by + # newline. + \n\) + # End of non-capturing group. + ) + """ cks = [] - # NOTE: we aren't using re.S here because we actually are - # taking advantage of each CHECK constraint being all on one - # line in the table definition in order to delineate. This - # necessarily makes assumptions as to how the CREATE TABLE - # was emitted. - - for match in re.finditer(CHECK_PATTERN, table_data or "", re.I): + for match in re.finditer( + CHECK_PATTERN, table_data or "", re.I | re.S | re.VERBOSE + ): name = match.group(1) if name: diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 33be95e6d9e..3ca43baece6 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -1816,6 +1816,27 @@ def setup_test_class(cls): Table("q", meta, Column("id", Integer), PrimaryKeyConstraint("id")) + # intentional new line + Table( + "r", + meta, + Column("id", Integer), + Column("value", Integer), + Column("prefix", String), + PrimaryKeyConstraint("id"), + CheckConstraint("id > 0"), + # Constraint definition with newline and tab characters + CheckConstraint( + """((value > 0) AND \n\t(value < 100) AND \n\t + (value != 50))""", + name="ck_r_value_multiline", + ), + # Constraint name with special chars and 'check' in the name + CheckConstraint("value IS NOT NULL", name="^check-r* #\n\t"), + # Constraint definition with special characters. + CheckConstraint("prefix NOT GLOB '*[^-. /#,]*'"), + ) + meta.create_all(conn) # will contain an "autoindex" @@ -1911,6 +1932,7 @@ def teardown_test_class(cls): "b", "a1", "a2", + "r", ]: conn.exec_driver_sql("drop table %s" % name) @@ -2456,6 +2478,22 @@ def test_check_constraint(self): {"sqltext": "q > 1 AND q < 6", "name": None}, ], ) + print(inspector.get_check_constraints("r")) + eq_( + inspector.get_check_constraints("r"), + [ + {"sqltext": "value IS NOT NULL", "name": "^check-r* #\n\t"}, + # Triple-quote multi-line definition should have added a + # newline and whitespace: + { + "sqltext": "((value > 0) AND \n\t(value < 100) AND \n\t\n" + " (value != 50))", + "name": "ck_r_value_multiline", + }, + {"sqltext": "id > 0", "name": None}, + {"sqltext": "prefix NOT GLOB '*[^-. /#,]*'", "name": None}, + ], + ) @testing.combinations( ("plain_name", "plain_name"), From fb0331faf355308fe5ad2e1ca21935f7768c79d3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 15 Aug 2024 08:46:13 -0400 Subject: [PATCH 293/544] remove print statement Change-Id: I33c9f7daee1034639bb0725b114f6e48803a4fed (cherry picked from commit 98836f07f3219ab49aaabeb3a647d9a9799aacd5) --- test/dialect/test_sqlite.py | 1 - 1 file changed, 1 deletion(-) diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 3ca43baece6..da59208bea8 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -2478,7 +2478,6 @@ def test_check_constraint(self): {"sqltext": "q > 1 AND q < 6", "name": None}, ], ) - print(inspector.get_check_constraints("r")) eq_( inspector.get_check_constraints("r"), [ From 0736d3b39636492cd374d020c9f41d451e099db2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edgar=20Ram=C3=ADrez=20Mondrag=C3=B3n?= <16805946+edgarrmondragon@users.noreply.github.com> Date: Mon, 19 Aug 2024 12:17:54 -0600 Subject: [PATCH 294/544] Fix docs link to Oracle Cloud Autonomous Databases (#11762) (cherry picked from commit 89355b82f444e49e0613e4ba8423ffab33e77c38) --- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 873d943371d..ed9b02d3fb1 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -62,7 +62,7 @@ Users of Oracle Cloud should use this syntax and also configure the cloud wallet as shown in cx_Oracle documentation `Connecting to Autononmous Databases -`_. +`_. SID Connections ^^^^^^^^^^^^^^^ From a89f1befed749c26a0e1bf028848f373b61e1322 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 21 Aug 2024 10:49:27 -0400 Subject: [PATCH 295/544] add missing slash for sqlite URL with explicit :memory: Fixes: #11772 Change-Id: I3bdb1c81c5a503cb16143c9d1c130ec79769fbca (cherry picked from commit 663e6f1d60c99fa93da479b4f6f3b3c5e49cc2a8) --- lib/sqlalchemy/dialects/sqlite/pysqlite.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index f39baf32171..69a902c32ab 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -52,11 +52,11 @@ e = create_engine('sqlite:///C:\\path\\to\\database.db') To use sqlite ``:memory:`` database specify it as the filename using -``sqlite://:memory:``. It's also the default if no filepath is +``sqlite:///:memory:``. It's also the default if no filepath is present, specifying only ``sqlite://`` and nothing else:: - # in-memory database - e = create_engine('sqlite://:memory:') + # in-memory database (note three slashes) + e = create_engine('sqlite:///:memory:') # also in-memory database e2 = create_engine('sqlite://') From 4aaf491f00e07cc179806bfeafa6b013d173e995 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sun, 11 Aug 2024 11:55:15 +0200 Subject: [PATCH 296/544] fix most broken links This was achieved by running Broken Link Checker https://github.com/stevenvachon/broken-link-checker on the docs Change-Id: Ic415f9435b3c817e518fbbad46ac8fb9e8503d15 (cherry picked from commit 21921a8a94452aed37766645f4d7785d33e6e70f) --- doc/build/changelog/changelog_04.rst | 2 +- doc/build/changelog/changelog_05.rst | 2 +- doc/build/changelog/changelog_08.rst | 7 ++++--- doc/build/changelog/migration_06.rst | 11 +++++------ doc/build/changelog/migration_07.rst | 3 +-- doc/build/changelog/migration_08.rst | 2 +- doc/build/changelog/migration_09.rst | 2 +- doc/build/changelog/migration_10.rst | 2 +- doc/build/errors.rst | 2 +- lib/sqlalchemy/dialects/oracle/base.py | 4 ++-- lib/sqlalchemy/ext/automap.py | 2 +- 11 files changed, 19 insertions(+), 20 deletions(-) diff --git a/doc/build/changelog/changelog_04.rst b/doc/build/changelog/changelog_04.rst index b0312b0921a..323aeb46541 100644 --- a/doc/build/changelog/changelog_04.rst +++ b/doc/build/changelog/changelog_04.rst @@ -60,7 +60,7 @@ convert_unicode logic disabled in the sqlite dialect, to adjust for pysqlite 2.5.0's new requirement that only Python unicode objects are accepted; - https://itsystementwicklung.de/pipermail/list-pysqlite/2008-March/000018.html + https://web.archive.org/web/20090614054912/https://itsystementwicklung.de/pipermail/list-pysqlite/2008-March/000018.html .. change:: :tags: oracle diff --git a/doc/build/changelog/changelog_05.rst b/doc/build/changelog/changelog_05.rst index e998cb4443b..c0125f7dee4 100644 --- a/doc/build/changelog/changelog_05.rst +++ b/doc/build/changelog/changelog_05.rst @@ -2873,7 +2873,7 @@ logic disabled in the sqlite dialect, to adjust for pysqlite 2.5.0's new requirement that only Python unicode objects are accepted; - https://itsystementwicklung.de/pipermail/list-pysqlite/2008-March/000018.html + http://web.archive.org/web/20090614054912/https://itsystementwicklung.de/pipermail/list-pysqlite/2008-March/000018.html .. change:: :tags: mysql diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst index 363f5aeb1b8..7bca35df9cb 100644 --- a/doc/build/changelog/changelog_08.rst +++ b/doc/build/changelog/changelog_08.rst @@ -3499,7 +3499,7 @@ ready for general use yet, however it does have *extremely* rudimental functionality now. - https://bitbucket.org/zzzeek/sqlalchemy-access + https://github.com/gordthompson/sqlalchemy-access .. change:: :tags: maxdb, moved @@ -3507,8 +3507,9 @@ The MaxDB dialect, which hasn't been functional for several years, is - moved out to a pending bitbucket project, - https://bitbucket.org/zzzeek/sqlalchemy-maxdb. + moved out to a pending bitbucket project, (deleted; to view + the MaxDB code see the commit before it was removed at + https://github.com/sqlalchemy/sqlalchemy/tree/ba67f7dbc5eb7a1ed2a3e1b56df72a837130f7bb/lib/sqlalchemy/dialects/maxdb) .. change:: :tags: sqlite, feature diff --git a/doc/build/changelog/migration_06.rst b/doc/build/changelog/migration_06.rst index 0330ac5d4a4..320f34009af 100644 --- a/doc/build/changelog/migration_06.rst +++ b/doc/build/changelog/migration_06.rst @@ -86,11 +86,10 @@ sign "+": Important Dialect Links: * Documentation on connect arguments: - https://www.sqlalchemy.org/docs/06/dbengine.html#create- - engine-url-arguments. + https://www.sqlalchemy.org/docs/06/dbengine.html#create-engine-url-arguments. -* Reference documentation for individual dialects: https://ww - w.sqlalchemy.org/docs/06/reference/dialects/index.html +* Reference documentation for individual dialects: + https://www.sqlalchemy.org/docs/06/reference/dialects/index.html. * The tips and tricks at DatabaseNotes. @@ -1223,8 +1222,8 @@ SQLSoup SQLSoup has been modernized and updated to reflect common 0.5/0.6 capabilities, including well defined session -integration. Please read the new docs at [https://www.sqlalc -hemy.org/docs/06/reference/ext/sqlsoup.html]. +integration. Please read the new docs at +[https://www.sqlalchemy.org/docs/06/reference/ext/sqlsoup.html]. Declarative ----------- diff --git a/doc/build/changelog/migration_07.rst b/doc/build/changelog/migration_07.rst index 19716ad3c4c..4f1c98be1a8 100644 --- a/doc/build/changelog/migration_07.rst +++ b/doc/build/changelog/migration_07.rst @@ -204,8 +204,7 @@ scenarios. Highlights of this release include: A demonstration of callcount reduction including a sample benchmark script is at -https://techspot.zzzeek.org/2010/12/12/a-tale-of-three- -profiles/ +https://techspot.zzzeek.org/2010/12/12/a-tale-of-three-profiles/ Composites Rewritten -------------------- diff --git a/doc/build/changelog/migration_08.rst b/doc/build/changelog/migration_08.rst index 7b42aae4744..ea9b9170537 100644 --- a/doc/build/changelog/migration_08.rst +++ b/doc/build/changelog/migration_08.rst @@ -1494,7 +1494,7 @@ SQLSoup SQLSoup is a handy package that presents an alternative interface on top of the SQLAlchemy ORM. SQLSoup is now moved into its own project and documented/released -separately; see https://bitbucket.org/zzzeek/sqlsoup. +separately; see https://github.com/zzzeek/sqlsoup. SQLSoup is a very simple tool that could also benefit from contributors who are interested in its style of usage. diff --git a/doc/build/changelog/migration_09.rst b/doc/build/changelog/migration_09.rst index 287fc2c933a..61cd9a3a307 100644 --- a/doc/build/changelog/migration_09.rst +++ b/doc/build/changelog/migration_09.rst @@ -1148,7 +1148,7 @@ can be dropped in using callable functions. It is hoped that the :class:`.AutomapBase` system provides a quick and modernized solution to the problem that the very famous -`SQLSoup `_ +`SQLSoup `_ also tries to solve, that of generating a quick and rudimentary object model from an existing database on the fly. By addressing the issue strictly at the mapper configuration level, and integrating fully with existing diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 5a016140ae3..1e61b308571 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -2680,7 +2680,7 @@ on MySQL:: Drizzle Dialect is now an External Dialect ------------------------------------------ -The dialect for `Drizzle `_ is now an external +The dialect for `Drizzle `_ is now an external dialect, available at https://bitbucket.org/zzzeek/sqlalchemy-drizzle. This dialect was added to SQLAlchemy right before SQLAlchemy was able to accommodate third party dialects well; going forward, all databases that aren't diff --git a/doc/build/errors.rst b/doc/build/errors.rst index 4c12e0fb179..237d5d0ab3b 100644 --- a/doc/build/errors.rst +++ b/doc/build/errors.rst @@ -475,7 +475,7 @@ when a construct is stringified without any dialect-specific information. However, there are many constructs that are specific to some particular kind of database dialect, for which the :class:`.StrSQLCompiler` doesn't know how to turn into a string, such as the PostgreSQL -`"insert on conflict" `_ construct:: +:ref:`postgresql_insert_on_conflict` construct:: >>> from sqlalchemy.dialects.postgresql import insert >>> from sqlalchemy import table, column diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index aa98381a416..9dd3acf7c14 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -331,7 +331,7 @@ Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution is available at -https://asktom.oracle.com/tkyte/update_cascade/index.html . +https://web.archive.org/web/20090317041251/https://asktom.oracle.com/tkyte/update_cascade/index.html When using the SQLAlchemy ORM, the ORM has limited ability to manually issue cascading updates - specify ForeignKey objects using the @@ -1277,7 +1277,7 @@ def define_constraint_cascades(self, constraint): # oracle has no ON UPDATE CASCADE - # its only available via triggers - # https://asktom.oracle.com/tkyte/update_cascade/index.html + # https://web.archive.org/web/20090317041251/https://asktom.oracle.com/tkyte/update_cascade/index.html if constraint.onupdate is not None: util.warn( "Oracle does not contain native UPDATE CASCADE " diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index d7920904bb0..70b0fe62c11 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -11,7 +11,7 @@ It is hoped that the :class:`.AutomapBase` system provides a quick and modernized solution to the problem that the very famous -`SQLSoup `_ +`SQLSoup `_ also tries to solve, that of generating a quick and rudimentary object model from an existing database on the fly. By addressing the issue strictly at the mapper configuration level, and integrating fully with existing From 48a38511a2354615f1698250133abafb134e51a3 Mon Sep 17 00:00:00 2001 From: Gord Thompson Date: Sun, 25 Aug 2024 15:53:59 -0600 Subject: [PATCH 297/544] Update link for sqlalchemy-pytds dialect (#11786) (cherry picked from commit 5b117f3d4b38d12d61a39fc60582d4348232334f) --- doc/build/dialects/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index b0064a09e9a..1a230481961 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -109,7 +109,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Microsoft Access (via pyodbc) | sqlalchemy-access_ | +------------------------------------------------+---------------------------------------+ -| Microsoft SQL Server (via python-tds) | sqlalchemy-tds_ | +| Microsoft SQL Server (via python-tds) | sqlalchemy-pytds_ | +------------------------------------------------+---------------------------------------+ | Microsoft SQL Server (via turbodbc) | sqlalchemy-turbodbc_ | +------------------------------------------------+---------------------------------------+ @@ -151,7 +151,7 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _sqlalchemy-sqlany: https://github.com/sqlanywhere/sqlalchemy-sqlany .. _sqlalchemy-monetdb: https://github.com/gijzelaerr/sqlalchemy-monetdb .. _snowflake-sqlalchemy: https://github.com/snowflakedb/snowflake-sqlalchemy -.. _sqlalchemy-tds: https://github.com/m32/sqlalchemy-tds +.. _sqlalchemy-pytds: https://pypi.org/project/sqlalchemy-pytds/ .. _sqlalchemy-cratedb: https://github.com/crate/sqlalchemy-cratedb .. _sqlalchemy-access: https://pypi.org/project/sqlalchemy-access/ .. _elasticsearch-dbapi: https://github.com/preset-io/elasticsearch-dbapi/ From 909a9429fdbab45c9bd91783916cb2358b5406a0 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 27 Aug 2024 19:20:44 +0200 Subject: [PATCH 298/544] Fix memory leak on top-level _ModuleMarker. Correctly cleanup the internal top-level module registry when no inner modules or classes are registered into it. Fixes: #11788 Change-Id: I489dd6394dd3f14458379368b8c8f18d5a0bb109 (cherry picked from commit 4ae9e65e1d69100e585f783dfe8f2150388b49f4) --- doc/build/changelog/unreleased_20/11788.rst | 6 ++++++ lib/sqlalchemy/orm/clsregistry.py | 5 +++-- test/orm/declarative/test_clsregistry.py | 2 +- 3 files changed, 10 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11788.rst diff --git a/doc/build/changelog/unreleased_20/11788.rst b/doc/build/changelog/unreleased_20/11788.rst new file mode 100644 index 00000000000..736cbd3370f --- /dev/null +++ b/doc/build/changelog/unreleased_20/11788.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, orm + :tickets: 11788 + + Correctly cleanup the internal top-level module registry when no + inner modules or classes are registered into it. diff --git a/lib/sqlalchemy/orm/clsregistry.py b/lib/sqlalchemy/orm/clsregistry.py index 26113d8b24d..382d6aef9be 100644 --- a/lib/sqlalchemy/orm/clsregistry.py +++ b/lib/sqlalchemy/orm/clsregistry.py @@ -287,8 +287,9 @@ def __getitem__(self, name: str) -> ClsRegistryToken: def _remove_item(self, name: str) -> None: self.contents.pop(name, None) - if not self.contents and self.parent is not None: - self.parent._remove_item(self.name) + if not self.contents: + if self.parent is not None: + self.parent._remove_item(self.name) _registries.discard(self) def resolve_attr(self, key: str) -> Union[_ModNS, Type[Any]]: diff --git a/test/orm/declarative/test_clsregistry.py b/test/orm/declarative/test_clsregistry.py index ffc8528125c..0cf775e4d27 100644 --- a/test/orm/declarative/test_clsregistry.py +++ b/test/orm/declarative/test_clsregistry.py @@ -230,7 +230,7 @@ def test_dupe_classes_cleanout(self): del f2 gc_collect() - eq_(len(clsregistry._registries), 1) + eq_(len(clsregistry._registries), 0) def test_dupe_classes_name_race(self): """test the race condition that the class was garbage " From 4c13e0c651f2dcb14a1a37c6d4105af145cccc0a Mon Sep 17 00:00:00 2001 From: John A Stevenson Date: Mon, 26 Aug 2024 11:05:38 -0400 Subject: [PATCH 299/544] Update SQLite UNIQUE inline constraint parsing to handle tabs Improvements to the regex used by the SQLite dialect to reflect the name and contents of a UNIQUE constraint that is defined inline within a column definition inside of a SQLite CREATE TABLE statement, accommodating for tab characters present within the column / constraint line. Pull request courtesy John A Stevenson. Fixes: #11746 Closes: #11759 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11759 Pull-request-sha: 5752491154ad19e29abec8d69fa4076d996d964e Change-Id: I048a90c992bfaf6857c1150f50bf3c6cc5697095 (cherry picked from commit 8071c21b5cb5009b45e8449dcb37b7c3786445e0) --- doc/build/changelog/unreleased_20/11746.rst | 12 +++++++++ lib/sqlalchemy/dialects/sqlite/base.py | 4 +-- test/dialect/test_sqlite.py | 27 ++++++++++++++++----- 3 files changed, 35 insertions(+), 8 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11746.rst diff --git a/doc/build/changelog/unreleased_20/11746.rst b/doc/build/changelog/unreleased_20/11746.rst new file mode 100644 index 00000000000..36dc1a7393c --- /dev/null +++ b/doc/build/changelog/unreleased_20/11746.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: bug, sqlite + :tickets: 11746 + + Improvements to the regex used by the SQLite dialect to reflect the name + and contents of a UNIQUE constraint that is defined inline within a column + definition inside of a SQLite CREATE TABLE statement, accommodating for tab + characters present within the column / constraint line. Pull request + courtesy John A Stevenson. + + + diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index a678e10940c..cf8f16966ba 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -2588,8 +2588,8 @@ def parse_uqs(): return UNIQUE_PATTERN = r'(?:CONSTRAINT "?(.+?)"? +)?UNIQUE *\((.+?)\)' INLINE_UNIQUE_PATTERN = ( - r'(?:(".+?")|(?:[\[`])?([a-z0-9_]+)(?:[\]`])?) ' - r"+[a-z0-9_ ]+? +UNIQUE" + r'(?:(".+?")|(?:[\[`])?([a-z0-9_]+)(?:[\]`])?)[\t ]' + r"+[a-z0-9_ ]+?[\t ]+UNIQUE" ) for match in re.finditer(UNIQUE_PATTERN, table_data, re.I): diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index da59208bea8..bdc3e7d09f0 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -2503,17 +2503,27 @@ def test_check_constraint(self): argnames="colname,expected", ) @testing.combinations( - "uq", "uq_inline", "pk", "ix", argnames="constraint_type" + "uq", + "uq_inline", + "uq_inline_tab_before", # tab before column params + "uq_inline_tab_within", # tab within column params + "pk", + "ix", + argnames="constraint_type", ) def test_constraint_cols( self, colname, expected, constraint_type, connection, metadata ): - if constraint_type == "uq_inline": + if constraint_type.startswith("uq_inline"): + inline_create_sql = { + "uq_inline": "CREATE TABLE t (%s INTEGER UNIQUE)", + "uq_inline_tab_before": "CREATE TABLE t (%s\tINTEGER UNIQUE)", + "uq_inline_tab_within": "CREATE TABLE t (%s INTEGER\tUNIQUE)", + } + t = Table("t", metadata, Column(colname, Integer)) connection.exec_driver_sql( - """ - CREATE TABLE t (%s INTEGER UNIQUE) - """ + inline_create_sql[constraint_type] % connection.dialect.identifier_preparer.quote(colname) ) else: @@ -2531,7 +2541,12 @@ def test_constraint_cols( t.create(connection) - if constraint_type in ("uq", "uq_inline"): + if constraint_type in ( + "uq", + "uq_inline", + "uq_inline_tab_before", + "uq_inline_tab_within", + ): const = inspect(connection).get_unique_constraints("t")[0] eq_(const["column_names"], [expected]) elif constraint_type == "pk": From 5a036f803d221a13e51b71b0e9b08dca499637f2 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 27 Aug 2024 20:04:52 +0200 Subject: [PATCH 300/544] fix select.with_only_columns type hints Fixes: #11782 Change-Id: Idce218a9730986d3ca70547c83aa1c0f8b5ee5b2 (cherry picked from commit 73344fd0d35bd2bf4c4bb8f2a8534a97d7f241af) --- doc/build/changelog/unreleased_20/11782.rst | 5 ++++ lib/sqlalchemy/sql/selectable.py | 31 +++++++++++++++++---- tools/format_docs_code.py | 21 ++++++++------ tools/generate_proxy_methods.py | 29 +++++++++++-------- tools/generate_sql_functions.py | 1 + tools/generate_tuple_map_overloads.py | 31 +++++++++++++-------- tools/normalize_file_headers.py | 2 +- tools/trace_orm_adapter.py | 1 + 8 files changed, 84 insertions(+), 37 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11782.rst diff --git a/doc/build/changelog/unreleased_20/11782.rst b/doc/build/changelog/unreleased_20/11782.rst new file mode 100644 index 00000000000..df8e1f5c3bd --- /dev/null +++ b/doc/build/changelog/unreleased_20/11782.rst @@ -0,0 +1,5 @@ +.. change:: + :tags: bug, typing + :tickets: 11782 + + Fixed typing issue with :meth:`_sql.Select.with_only_columns`. diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 632e2e792bd..2a96a7a1008 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -5819,22 +5819,33 @@ def reduce_columns(self, only_synonyms: bool = True) -> Select[Any]: ) return woc - # START OVERLOADED FUNCTIONS self.with_only_columns Select 8 + # START OVERLOADED FUNCTIONS self.with_only_columns Select 1-8 ", *, maintain_column_froms: bool =..." # noqa: E501 # code within this block is **programmatically, - # statically generated** by tools/generate_sel_v1_overloads.py + # statically generated** by tools/generate_tuple_map_overloads.py @overload - def with_only_columns(self, __ent0: _TCCA[_T0]) -> Select[Tuple[_T0]]: ... + def with_only_columns( + self, __ent0: _TCCA[_T0], *, maintain_column_froms: bool = ... + ) -> Select[Tuple[_T0]]: ... @overload def with_only_columns( - self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1] + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + *, + maintain_column_froms: bool = ..., ) -> Select[Tuple[_T0, _T1]]: ... @overload def with_only_columns( - self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2] + self, + __ent0: _TCCA[_T0], + __ent1: _TCCA[_T1], + __ent2: _TCCA[_T2], + *, + maintain_column_froms: bool = ..., ) -> Select[Tuple[_T0, _T1, _T2]]: ... @overload @@ -5844,6 +5855,8 @@ def with_only_columns( __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], + *, + maintain_column_froms: bool = ..., ) -> Select[Tuple[_T0, _T1, _T2, _T3]]: ... @overload @@ -5854,6 +5867,8 @@ def with_only_columns( __ent2: _TCCA[_T2], __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], + *, + maintain_column_froms: bool = ..., ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: ... @overload @@ -5865,6 +5880,8 @@ def with_only_columns( __ent3: _TCCA[_T3], __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], + *, + maintain_column_froms: bool = ..., ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ... @overload @@ -5877,6 +5894,8 @@ def with_only_columns( __ent4: _TCCA[_T4], __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], + *, + maintain_column_froms: bool = ..., ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload @@ -5890,6 +5909,8 @@ def with_only_columns( __ent5: _TCCA[_T5], __ent6: _TCCA[_T6], __ent7: _TCCA[_T7], + *, + maintain_column_froms: bool = ..., ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... # END OVERLOADED FUNCTIONS self.with_only_columns diff --git a/tools/format_docs_code.py b/tools/format_docs_code.py index 7bae0126b02..8d24a9163af 100644 --- a/tools/format_docs_code.py +++ b/tools/format_docs_code.py @@ -6,6 +6,7 @@ .. versionadded:: 2.0 """ + # mypy: ignore-errors from argparse import ArgumentParser @@ -316,11 +317,13 @@ def main( print( f"{to_reformat} file(s) would be reformatted;", ( - f"{sum(formatting_error_counts)} formatting errors " - f"reported in {len(formatting_error_counts)} files" - ) - if formatting_error_counts - else "no formatting errors reported", + ( + f"{sum(formatting_error_counts)} formatting errors " + f"reported in {len(formatting_error_counts)} files" + ) + if formatting_error_counts + else "no formatting errors reported" + ), ) exit(1) @@ -388,9 +391,11 @@ def main( for val in config.get("target_version", []) if val != "py27" }, - line_length=config.get("line_length", DEFAULT_LINE_LENGTH) - if args.project_line_length - else DEFAULT_LINE_LENGTH, + line_length=( + config.get("line_length", DEFAULT_LINE_LENGTH) + if args.project_line_length + else DEFAULT_LINE_LENGTH + ), ) REPORT_ONLY_DOCTEST = args.report_doctest diff --git a/tools/generate_proxy_methods.py b/tools/generate_proxy_methods.py index 9881d26426f..31832ae8bfa 100644 --- a/tools/generate_proxy_methods.py +++ b/tools/generate_proxy_methods.py @@ -40,6 +40,7 @@ .. versionadded:: 2.0 """ + # mypy: ignore-errors from __future__ import annotations @@ -85,9 +86,9 @@ def __repr__(self) -> str: return self.sym -classes: collections.defaultdict[ - str, Dict[str, Tuple[Any, ...]] -] = collections.defaultdict(dict) +classes: collections.defaultdict[str, Dict[str, Tuple[Any, ...]]] = ( + collections.defaultdict(dict) +) _T = TypeVar("_T", bound="Any") @@ -214,18 +215,22 @@ def instrument(buf: TextIO, name: str, clslevel: bool = False) -> None: if spec.defaults: new_defaults = tuple( - _repr_sym("util.EMPTY_DICT") - if df is util.EMPTY_DICT - else df + ( + _repr_sym("util.EMPTY_DICT") + if df is util.EMPTY_DICT + else df + ) for df in spec.defaults ) elem[3] = new_defaults if spec.kwonlydefaults: new_kwonlydefaults = { - name: _repr_sym("util.EMPTY_DICT") - if df is util.EMPTY_DICT - else df + name: ( + _repr_sym("util.EMPTY_DICT") + if df is util.EMPTY_DICT + else df + ) for name, df in spec.kwonlydefaults.items() } elem[5] = new_kwonlydefaults @@ -415,9 +420,9 @@ def main(cmd: code_writer_cmd) -> None: from sqlalchemy import util from sqlalchemy.util import langhelpers - util.create_proxy_methods = ( - langhelpers.create_proxy_methods - ) = create_proxy_methods + util.create_proxy_methods = langhelpers.create_proxy_methods = ( + create_proxy_methods + ) for entry in entries: if cmd.args.module in {"all", entry}: diff --git a/tools/generate_sql_functions.py b/tools/generate_sql_functions.py index 51422dc7e6b..fc62486f6c3 100644 --- a/tools/generate_sql_functions.py +++ b/tools/generate_sql_functions.py @@ -1,6 +1,7 @@ """Generate inline stubs for generic functions on func """ + # mypy: ignore-errors from __future__ import annotations diff --git a/tools/generate_tuple_map_overloads.py b/tools/generate_tuple_map_overloads.py index 476636b1d0f..098b356e026 100644 --- a/tools/generate_tuple_map_overloads.py +++ b/tools/generate_tuple_map_overloads.py @@ -16,6 +16,7 @@ .. versionadded:: 2.0 """ + # mypy: ignore-errors from __future__ import annotations @@ -36,10 +37,13 @@ sys.path.append(str(Path(__file__).parent.parent)) -def process_module(modname: str, filename: str, cmd: code_writer_cmd) -> str: +def process_module( + modname: str, filename: str, expected_number: int, cmd: code_writer_cmd +) -> str: # use tempfile in same path as the module, or at least in the # current working directory, so that black / zimports use # local pyproject.toml + found = 0 with NamedTemporaryFile( mode="w", delete=False, @@ -54,6 +58,7 @@ def process_module(modname: str, filename: str, cmd: code_writer_cmd) -> str: line, ) if m: + found += 1 indent = m.group(1) given_fnname = current_fnname = m.group(2) if current_fnname.startswith("self."): @@ -110,16 +115,20 @@ def {current_fnname}( if not in_block: buf.write(line) + if found != expected_number: + raise Exception( + f"{modname} processed {found}. expected {expected_number}" + ) return buf.name -def run_module(modname: str, cmd: code_writer_cmd) -> None: +def run_module(modname: str, count: int, cmd: code_writer_cmd) -> None: cmd.write_status(f"importing module {modname}\n") mod = importlib.import_module(modname) destination_path = mod.__file__ assert destination_path is not None - tempfile = process_module(modname, destination_path, cmd) + tempfile = process_module(modname, destination_path, count, cmd) cmd.run_zimports(tempfile) cmd.run_black(tempfile) @@ -127,17 +136,17 @@ def run_module(modname: str, cmd: code_writer_cmd) -> None: def main(cmd: code_writer_cmd) -> None: - for modname in entries: + for modname, count in entries: if cmd.args.module in {"all", modname}: - run_module(modname, cmd) + run_module(modname, count, cmd) entries = [ - "sqlalchemy.sql._selectable_constructors", - "sqlalchemy.orm.session", - "sqlalchemy.orm.query", - "sqlalchemy.sql.selectable", - "sqlalchemy.sql.dml", + ("sqlalchemy.sql._selectable_constructors", 1), + ("sqlalchemy.orm.session", 1), + ("sqlalchemy.orm.query", 1), + ("sqlalchemy.sql.selectable", 1), + ("sqlalchemy.sql.dml", 3), ] if __name__ == "__main__": @@ -146,7 +155,7 @@ def main(cmd: code_writer_cmd) -> None: with cmd.add_arguments() as parser: parser.add_argument( "--module", - choices=entries + ["all"], + choices=[n for n, _ in entries] + ["all"], default="all", help="Which file to generate. Default is to regenerate all files", ) diff --git a/tools/normalize_file_headers.py b/tools/normalize_file_headers.py index 8d82f849558..ba4cd5734f8 100644 --- a/tools/normalize_file_headers.py +++ b/tools/normalize_file_headers.py @@ -49,7 +49,7 @@ def run_file(cmd: code_writer_cmd, file: Path, update_year: bool): def run(cmd: code_writer_cmd, update_year: bool): i = 0 - for ext in ('py', 'pyx', 'pxd'): + for ext in ("py", "pyx", "pxd"): for file in sa_path.glob(f"**/*.{ext}"): run_file(cmd, file, update_year) i += 1 diff --git a/tools/trace_orm_adapter.py b/tools/trace_orm_adapter.py index de8098bcb8f..966705690de 100644 --- a/tools/trace_orm_adapter.py +++ b/tools/trace_orm_adapter.py @@ -23,6 +23,7 @@ """ # noqa: E501 + # mypy: ignore-errors From c5d2e78d5fde6a26207f303933d7d284ebd5227c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 29 Aug 2024 10:04:47 -0400 Subject: [PATCH 301/544] pass to_metadata argument to Enum.copy() Fixed bug where the ``metadata`` element of an ``Enum`` datatype would not be transferred to the new :class:`.MetaData` object when the type had been copied via a :meth:`.Table.to_metadata` operation, leading to inconsistent behaviors within create/drop sequences. Fixes: #11802 Change-Id: Ibbc93aa31bdfde0d67a9530f41a08e826c17d58e (cherry picked from commit 22cbc7dcb48c946dda66704797665289965eb22e) --- doc/build/changelog/unreleased_20/11802.rst | 8 ++++ lib/sqlalchemy/sql/schema.py | 4 +- lib/sqlalchemy/sql/sqltypes.py | 12 ++++++ test/sql/test_metadata.py | 45 ++++++++++++++++++--- 4 files changed, 62 insertions(+), 7 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11802.rst diff --git a/doc/build/changelog/unreleased_20/11802.rst b/doc/build/changelog/unreleased_20/11802.rst new file mode 100644 index 00000000000..f6e7847ee2a --- /dev/null +++ b/doc/build/changelog/unreleased_20/11802.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, schema + :tickets: 11802 + + Fixed bug where the ``metadata`` element of an ``Enum`` datatype would not + be transferred to the new :class:`.MetaData` object when the type had been + copied via a :meth:`.Table.to_metadata` operation, leading to inconsistent + behaviors within create/drop sequences. diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 97d123007bd..65c12b308fe 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -1434,7 +1434,7 @@ def referred_schema_fn(table, to_schema, args = [] for col in self.columns: - args.append(col._copy(schema=actual_schema)) + args.append(col._copy(schema=actual_schema, _to_metadata=metadata)) table = Table( name, metadata, @@ -2475,6 +2475,8 @@ def _copy(self, **kw: Any) -> Column[Any]: server_onupdate = self.server_onupdate if isinstance(server_default, (Computed, Identity)): # TODO: likely should be copied in all cases + # TODO: if a Sequence, we would need to transfer the Sequence + # .metadata as well args.append(server_default._copy(**kw)) server_default = server_onupdate = None diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index f29131c933c..ad9a696ee82 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -1085,6 +1085,11 @@ def copy(self, **kw): return self.adapt( cast("Type[TypeEngine[Any]]", self.__class__), _create_events=True, + metadata=( + kw.get("_to_metadata", self.metadata) + if self.metadata is not None + else None + ), ) @overload @@ -1908,6 +1913,13 @@ def __init__( if _adapted_from: self.dispatch = self.dispatch._join(_adapted_from.dispatch) + def copy(self, **kw): + # override SchemaType.copy() to not include to_metadata logic + return self.adapt( + cast("Type[TypeEngine[Any]]", self.__class__), + _create_events=True, + ) + def _should_create_constraint(self, compiler, **kw): if not self._is_impl_for_variant(compiler.dialect, kw): return False diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 97c2f086458..1b068c02f7f 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -2395,17 +2395,27 @@ def test_inherit_schema_enum(self): t1 = Table("x", m, Column("y", type_), schema="z") eq_(t1.c.y.type.schema, "z") - def test_to_metadata_copy_type(self): + @testing.variation("assign_metadata", [True, False]) + def test_to_metadata_copy_type(self, assign_metadata): m1 = MetaData() - type_ = self.MyType() + if assign_metadata: + type_ = self.MyType(metadata=m1) + else: + type_ = self.MyType() + t1 = Table("x", m1, Column("y", type_)) m2 = MetaData() t2 = t1.to_metadata(m2) - # metadata isn't set - is_(t2.c.y.type.metadata, None) + if assign_metadata: + # metadata was transferred + # issue #11802 + is_(t2.c.y.type.metadata, m2) + else: + # metadata isn't set + is_(t2.c.y.type.metadata, None) # our test type sets table, though is_(t2.c.y.type.table, t2) @@ -2435,11 +2445,34 @@ def test_to_metadata_independent_schema(self): eq_(t2.c.y.type.schema, None) - def test_to_metadata_inherit_schema(self): + @testing.combinations( + ("name", "foobar", "name"), + ("schema", "someschema", "schema"), + ("inherit_schema", True, "inherit_schema"), + ("metadata", MetaData(), "metadata"), + ) + def test_copy_args(self, argname, value, attrname): + kw = {argname: value} + e1 = self.MyType(**kw) + + e1_copy = e1.copy() + + eq_(getattr(e1_copy, attrname), value) + + @testing.variation("already_has_a_schema", [True, False]) + def test_to_metadata_inherit_schema(self, already_has_a_schema): m1 = MetaData() - type_ = self.MyType(inherit_schema=True) + if already_has_a_schema: + type_ = self.MyType(schema="foo", inherit_schema=True) + eq_(type_.schema, "foo") + else: + type_ = self.MyType(inherit_schema=True) + t1 = Table("x", m1, Column("y", type_)) + # note that inherit_schema means the schema mutates to be that + # of the table + is_(type_.schema, None) m2 = MetaData() t2 = t1.to_metadata(m2, schema="bar") From 42ec1f70138d51dd7e61578453faa0f4d47f6ec3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 31 Aug 2024 12:56:00 -0400 Subject: [PATCH 302/544] re-process args for builtin generic types Improvements to the ORM annotated declarative type map lookup dealing with composed types such as ``dict[str, Any]`` linking to JSON (or others) with or without "future annotations" mode. There's apparently a big incompatiblity in types from typing vs. Python builtins in the way they genericize. The typing library makes it very difficult to distinguish between the two sets of types. This patch is a bit slash and burn to work around all this. These should likely be reported as bugs in the Python standard library if they aren't already. Fixes: #11814 Change-Id: I56a62701d5e883be04df7f45fd9429bb9c1c9a6f (cherry picked from commit f746fd78e303352d426a15c1f76ee835ce399d44) --- doc/build/changelog/unreleased_20/11814.rst | 9 ++++ lib/sqlalchemy/orm/decl_base.py | 2 +- lib/sqlalchemy/orm/util.py | 9 +++- lib/sqlalchemy/util/typing.py | 41 +++++++++++++++++++ .../test_tm_future_annotations_sync.py | 36 +++++++++++++--- test/orm/declarative/test_typed_mapping.py | 36 +++++++++++++--- 6 files changed, 120 insertions(+), 13 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11814.rst diff --git a/doc/build/changelog/unreleased_20/11814.rst b/doc/build/changelog/unreleased_20/11814.rst new file mode 100644 index 00000000000..a9feecb28c6 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11814.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 11814 + + Improvements to the ORM annotated declarative type map lookup dealing with + composed types such as ``dict[str, Any]`` linking to JSON (or others) with + or without "future annotations" mode. + + diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index 1203c9cb36a..d43fbffc576 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -431,7 +431,7 @@ def _setup_inheritance(self, mapper_kw: _MapperKwArgs) -> None: class _CollectedAnnotation(NamedTuple): raw_annotation: _AnnotationScanType mapped_container: Optional[Type[Mapped[Any]]] - extracted_mapped_annotation: Union[Type[Any], str] + extracted_mapped_annotation: Union[_AnnotationScanType, str] is_dataclass: bool attr_value: Any originating_module: str diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 0b4ad88ed8b..2b4ac3c9d7c 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -89,6 +89,7 @@ de_stringify_union_elements as _de_stringify_union_elements, ) from ..util.typing import eval_name_only as _eval_name_only +from ..util.typing import fixup_container_fwd_refs from ..util.typing import is_origin_of_cls from ..util.typing import Literal from ..util.typing import Protocol @@ -2321,7 +2322,7 @@ def _extract_mapped_subtype( is_dataclass_field: bool, expect_mapped: bool = True, raiseerr: bool = True, -) -> Optional[Tuple[Union[type, str], Optional[type]]]: +) -> Optional[Tuple[Union[_AnnotationScanType, str], Optional[type]]]: """given an annotation, figure out if it's ``Mapped[something]`` and if so, return the ``something`` part. @@ -2407,7 +2408,11 @@ def _extract_mapped_subtype( "Expected sub-type for Mapped[] annotation" ) - return annotated.__args__[0], annotated.__origin__ + return ( + # fix dict/list/set args to be ForwardRef, see #11814 + fixup_container_fwd_refs(annotated.__args__[0]), + annotated.__origin__, + ) def _mapper_property_as_plain_name(prop: Type[Any]) -> str: diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 64619957a6b..81e77f629f7 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -188,9 +188,50 @@ def de_stringify_annotation( ) return _copy_generic_annotation_with(annotation, elements) + return annotation # type: ignore +def fixup_container_fwd_refs( + type_: _AnnotationScanType, +) -> _AnnotationScanType: + """Correct dict['x', 'y'] into dict[ForwardRef('x'), ForwardRef('y')] + and similar for list, set + + """ + if ( + is_generic(type_) + and type_.__origin__ + in ( + dict, + set, + list, + collections_abc.MutableSet, + collections_abc.MutableMapping, + collections_abc.MutableSequence, + collections_abc.Mapping, + collections_abc.Sequence, + ) + # fight, kick and scream to struggle to tell the difference between + # dict[] and typing.Dict[] which DO NOT compare the same and DO NOT + # behave the same yet there is NO WAY to distinguish between which type + # it is using public attributes + and not re.match( + "typing.(?:Dict|List|Set|.*Mapping|.*Sequence|.*Set)", repr(type_) + ) + ): + # compat with py3.10 and earlier + return type_.__origin__.__class_getitem__( # type: ignore + tuple( + [ + ForwardRef(elem) if isinstance(elem, str) else elem + for elem in type_.__args__ + ] + ) + ) + return type_ + + def _copy_generic_annotation_with( annotation: GenericProtocol[_T], elements: Tuple[_AnnotationScanType, ...] ) -> Type[_T]: diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 2bdf340d4c0..765318cfa28 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -1420,21 +1420,47 @@ class RefElementTwo(decl_base): (str, str), ), id_="sa", + argnames="container_typ,args", ) - def test_extract_generic_from_pep593(self, container_typ, args): - """test #9099""" + @testing.variation("style", ["pep593", "alias", "direct"]) + def test_extract_composed(self, container_typ, args, style): + """test #9099 (pep593) + + test #11814 + + """ global TestType - TestType = Annotated[container_typ[args], 0] + + if style.pep593: + TestType = Annotated[container_typ[args], 0] + elif style.alias: + TestType = container_typ[args] + elif style.direct: + TestType = container_typ + double_strings = args == (str, str) class Base(DeclarativeBase): - type_annotation_map = {TestType: JSON()} + if style.direct: + if double_strings: + type_annotation_map = {TestType[str, str]: JSON()} + else: + type_annotation_map = {TestType[str]: JSON()} + else: + type_annotation_map = {TestType: JSON()} class MyClass(Base): __tablename__ = "my_table" id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[TestType] = mapped_column() + + if style.direct: + if double_strings: + data: Mapped[TestType[str, str]] = mapped_column() + else: + data: Mapped[TestType[str]] = mapped_column() + else: + data: Mapped[TestType] = mapped_column() is_(MyClass.__table__.c.data.type._type_affinity, JSON) diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 6fb792b0ba0..8b10118f4c9 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -1411,21 +1411,47 @@ class RefElementTwo(decl_base): (str, str), ), id_="sa", + argnames="container_typ,args", ) - def test_extract_generic_from_pep593(self, container_typ, args): - """test #9099""" + @testing.variation("style", ["pep593", "alias", "direct"]) + def test_extract_composed(self, container_typ, args, style): + """test #9099 (pep593) + + test #11814 + + """ global TestType - TestType = Annotated[container_typ[args], 0] + + if style.pep593: + TestType = Annotated[container_typ[args], 0] + elif style.alias: + TestType = container_typ[args] + elif style.direct: + TestType = container_typ + double_strings = args == (str, str) class Base(DeclarativeBase): - type_annotation_map = {TestType: JSON()} + if style.direct: + if double_strings: + type_annotation_map = {TestType[str, str]: JSON()} + else: + type_annotation_map = {TestType[str]: JSON()} + else: + type_annotation_map = {TestType: JSON()} class MyClass(Base): __tablename__ = "my_table" id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[TestType] = mapped_column() + + if style.direct: + if double_strings: + data: Mapped[TestType[str, str]] = mapped_column() + else: + data: Mapped[TestType[str]] = mapped_column() + else: + data: Mapped[TestType] = mapped_column() is_(MyClass.__table__.c.data.type._type_affinity, JSON) From 4107f63e5f961706c751a9e5fb5a0131cf6d1dd9 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 2 Sep 2024 11:18:35 -0400 Subject: [PATCH 303/544] add asyncio.CancelledError to terminate conditions Revising the asyncpg ``terminate()`` fix first made in :ticket:`10717` which improved the resiliency of this call under all circumstances, adding ``asyncio.CancelledError`` to the list of exceptions that are intercepted as failing for a graceful ``.close()`` which will then proceed to call ``.terminate()``. Fixes: #11821 Change-Id: Ic5e21cd18cc5517aae372560c52b2b2396c65483 (cherry picked from commit dc7aec467487f729c3a6d1e9e352626593cf5d67) --- doc/build/changelog/unreleased_20/11821.rst | 9 +++++++++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 1 + 2 files changed, 10 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11821.rst diff --git a/doc/build/changelog/unreleased_20/11821.rst b/doc/build/changelog/unreleased_20/11821.rst new file mode 100644 index 00000000000..b72412f489f --- /dev/null +++ b/doc/build/changelog/unreleased_20/11821.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, postgresql + :tickets: 11821 + + Revising the asyncpg ``terminate()`` fix first made in :ticket:`10717` + which improved the resiliency of this call under all circumstances, adding + ``asyncio.CancelledError`` to the list of exceptions that are intercepted + as failing for a graceful ``.close()`` which will then proceed to call + ``.terminate()``. diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index b00ce5a02da..117062c4c63 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -879,6 +879,7 @@ def terminate(self): self.await_(self._connection.close(timeout=2)) except ( asyncio.TimeoutError, + asyncio.CancelledError, OSError, self.dbapi.asyncpg.PostgresError, ): From cf82be60f811ed6bdc1a8c4221148d97dd6b704d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 2 Sep 2024 09:13:27 -0400 Subject: [PATCH 304/544] unpin setuptools This removes the pin that was placed in 2607262110bdc5c5dc96fc19ddca895a15a58e4e The pin for ``setuptools<69.3`` in ``pyproject.toml`` has been removed. This pin was to prevent a sudden change to :pep:`625` in setuptools from taking place which changes the file name of SQLAlchemy's source distribution on pypi to be an all lower case name, which is very likely to cause problems with various build environments that expected a particular naming style. However, the presence of this pin is now holding back environments that otherwise want to use a newer setuptools, so we've decided to move forward with this change, with the assumption that build environments will have largely accommodated the setuptools change by now. References: #11818 Change-Id: I0cd9ab0512004669a8f0aa0cb7f560d89a2da2bd (cherry picked from commit 2b1149e335cb838b31356b6aa33eeb41bfc9a0d1) --- doc/build/changelog/unreleased_20/11818.rst | 16 ++++++++++++++++ pyproject.toml | 4 +--- 2 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11818.rst diff --git a/doc/build/changelog/unreleased_20/11818.rst b/doc/build/changelog/unreleased_20/11818.rst new file mode 100644 index 00000000000..c75a6c64b6b --- /dev/null +++ b/doc/build/changelog/unreleased_20/11818.rst @@ -0,0 +1,16 @@ +.. change:: + :tags: change, general + :tickets: 11818 + + The pin for ``setuptools<69.3`` in ``pyproject.toml`` has been + removed. This pin was to prevent a sudden change to :pep:`625` in + setuptools from taking place which changes the file name of SQLAlchemy's + source distribution on pypi to be an all lower case name, which is very + likely to cause problems with various build environments that expected a + particular naming style. However, the presence of this pin is now holding + back environments that otherwise want to use a newer setuptools, so we've + decided to move forward with this change, with the assumption that build + environments will have largely accommodated the setuptools change by + now. + + diff --git a/pyproject.toml b/pyproject.toml index 0f53594dfaf..f75bf319243 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,7 @@ [build-system] build-backend = "setuptools.build_meta" requires = [ - # avoid moving to https://github.com/pypa/setuptools/issues/3593 - # until we're ready - "setuptools>=61.0,<69.3", + "setuptools>=61.0", "cython>=0.29.24; platform_python_implementation == 'CPython'", # Skip cython when using pypy ] From ca69db7e1ff6dabbbd57b1bca3387d0321da19a5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 2 Sep 2024 10:37:29 -0400 Subject: [PATCH 305/544] dont erase transaction if rollback/commit failed outside of asyncpg Fixed critical issue in the asyncpg driver where a rollback or commit that fails specifically for the ``MissingGreenlet`` condition or any other error that is not raised by asyncpg itself would discard the asyncpg transaction in any case, even though the transaction were still idle, leaving to a server side condition with an idle transaction that then goes back into the connection pool. The flags for "transaction closed" are now not reset for errors that are raised outside of asyncpg itself. When asyncpg itself raises an error for ``.commit()`` or ``.rollback()``, asyncpg does then discard of this transaction. Fixes: #11819 Change-Id: I12f0532788b03ea63fb47a7af21e07c37effb070 (cherry picked from commit a1f220cb4d1a04412a53200f454fbfc706e136b3) --- doc/build/changelog/unreleased_14/11819.rst | 14 ++++++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 36 ++++++++++--- test/dialect/postgresql/test_async_pg_py3k.py | 50 +++++++++++++++++++ 3 files changed, 92 insertions(+), 8 deletions(-) create mode 100644 doc/build/changelog/unreleased_14/11819.rst diff --git a/doc/build/changelog/unreleased_14/11819.rst b/doc/build/changelog/unreleased_14/11819.rst new file mode 100644 index 00000000000..6211eb487ee --- /dev/null +++ b/doc/build/changelog/unreleased_14/11819.rst @@ -0,0 +1,14 @@ +.. change:: + :tags: bug, postgresql + :tickets: 11819 + :versions: 2.0.33, 1.4.54 + + Fixed critical issue in the asyncpg driver where a rollback or commit that + fails specifically for the ``MissingGreenlet`` condition or any other error + that is not raised by asyncpg itself would discard the asyncpg transaction + in any case, even though the transaction were still idle, leaving to a + server side condition with an idle transaction that then goes back into the + connection pool. The flags for "transaction closed" are now not reset for + errors that are raised outside of asyncpg itself. When asyncpg itself + raises an error for ``.commit()`` or ``.rollback()``, asyncpg does then + discard of this transaction. diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index b00ce5a02da..7b02d93d627 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -844,25 +844,45 @@ def cursor(self, server_side=False): else: return AsyncAdapt_asyncpg_cursor(self) + async def _rollback_and_discard(self): + try: + await self._transaction.rollback() + finally: + # if asyncpg .rollback() was actually called, then whether or + # not it raised or succeeded, the transation is done, discard it + self._transaction = None + self._started = False + + async def _commit_and_discard(self): + try: + await self._transaction.commit() + finally: + # if asyncpg .commit() was actually called, then whether or + # not it raised or succeeded, the transation is done, discard it + self._transaction = None + self._started = False + def rollback(self): if self._started: try: - self.await_(self._transaction.rollback()) - except Exception as error: - self._handle_exception(error) - finally: + self.await_(self._rollback_and_discard()) self._transaction = None self._started = False + except Exception as error: + # don't dereference asyncpg transaction if we didn't + # actually try to call rollback() on it + self._handle_exception(error) def commit(self): if self._started: try: - self.await_(self._transaction.commit()) - except Exception as error: - self._handle_exception(error) - finally: + self.await_(self._commit_and_discard()) self._transaction = None self._started = False + except Exception as error: + # don't dereference asyncpg transaction if we didn't + # actually try to call commit() on it + self._handle_exception(error) def close(self): self.rollback() diff --git a/test/dialect/postgresql/test_async_pg_py3k.py b/test/dialect/postgresql/test_async_pg_py3k.py index ed3d63d8336..1a85c8f89f9 100644 --- a/test/dialect/postgresql/test_async_pg_py3k.py +++ b/test/dialect/postgresql/test_async_pg_py3k.py @@ -13,6 +13,7 @@ from sqlalchemy.dialects.postgresql import ENUM from sqlalchemy.testing import async_test from sqlalchemy.testing import eq_ +from sqlalchemy.testing import expect_raises from sqlalchemy.testing import fixtures from sqlalchemy.testing import mock @@ -165,6 +166,55 @@ async def async_setup(engine, enums): ], ) + @testing.variation("trans", ["commit", "rollback"]) + @async_test + async def test_dont_reset_open_transaction( + self, trans, async_testing_engine + ): + """test for #11819""" + + engine = async_testing_engine() + + control_conn = await engine.connect() + await control_conn.execution_options(isolation_level="AUTOCOMMIT") + + conn = await engine.connect() + txid_current = ( + await conn.exec_driver_sql("select txid_current()") + ).scalar() + + with expect_raises(exc.MissingGreenlet): + if trans.commit: + conn.sync_connection.connection.dbapi_connection.commit() + elif trans.rollback: + conn.sync_connection.connection.dbapi_connection.rollback() + else: + trans.fail() + + trans_exists = ( + await control_conn.exec_driver_sql( + f"SELECT count(*) FROM pg_stat_activity " + f"where backend_xid={txid_current}" + ) + ).scalar() + eq_(trans_exists, 1) + + if trans.commit: + await conn.commit() + elif trans.rollback: + await conn.rollback() + else: + trans.fail() + + trans_exists = ( + await control_conn.exec_driver_sql( + f"SELECT count(*) FROM pg_stat_activity " + f"where backend_xid={txid_current}" + ) + ).scalar() + eq_(trans_exists, 0) + await engine.dispose() + @async_test async def test_failed_commit_recover(self, metadata, async_testing_engine): Table("t1", metadata, Column("id", Integer, primary_key=True)) From e787c1c5a94d419984e55ad3f1a6f30ff059be05 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 2 Sep 2024 11:22:36 -0400 Subject: [PATCH 306/544] add "The server failed to resume the transaction" to pymssql Added error "The server failed to resume the transaction" to the list of error strings for the pymssql driver in determining a disconnect scenario, as observed by one user using pymssql under otherwise unknown conditions as leaving an unusable connection in the connection pool which fails to ping cleanly. Fixes: #11822 Change-Id: I1cf98046978b10775f19531878b597d32b056f13 (cherry picked from commit ac5a27db854fe9f6fbad5b93130e4a9c19405f3a) --- doc/build/changelog/unreleased_20/11822.rst | 9 +++++++++ lib/sqlalchemy/dialects/mssql/pymssql.py | 1 + test/dialect/mssql/test_engine.py | 1 + 3 files changed, 11 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11822.rst diff --git a/doc/build/changelog/unreleased_20/11822.rst b/doc/build/changelog/unreleased_20/11822.rst new file mode 100644 index 00000000000..f6c91918f39 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11822.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, mssql + :tickets: 11822 + + Added error "The server failed to resume the transaction" to the list of + error strings for the pymssql driver in determining a disconnect scenario, + as observed by one user using pymssql under otherwise unknown conditions as + leaving an unusable connection in the connection pool which fails to ping + cleanly. diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index ea1f9bd3a7e..c4207987bcd 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -103,6 +103,7 @@ def is_disconnect(self, e, connection, cursor): "message 20006", # Write to the server failed "message 20017", # Unexpected EOF from the server "message 20047", # DBPROCESS is dead or not enabled + "The server failed to resume the transaction", ): if msg in str(e): return True diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py index e87b9825f1b..26b7208ec8a 100644 --- a/test/dialect/mssql/test_engine.py +++ b/test/dialect/mssql/test_engine.py @@ -326,6 +326,7 @@ def test_pymssql_disconnect(self): "message 20006", # Write to the server failed "message 20017", # Unexpected EOF from the server "message 20047", # DBPROCESS is dead or not enabled + "The server failed to resume the transaction", ]: eq_(dialect.is_disconnect(error, None, None), True) From f54b99701fe995f9f30a093df46653ac13db0906 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 3 Sep 2024 12:54:22 -0400 Subject: [PATCH 307/544] changelog categorization fixes, typos Change-Id: I1292ad4c8a671a98d23861281a8d36e2ab8a6f55 (cherry picked from commit dd835a2a63b0d2fad67ea8ed719e48d6d48229d3) --- doc/build/changelog/unreleased_20/11687.rst | 2 +- doc/build/changelog/unreleased_20/11818.rst | 19 +++++++++---------- .../changelog/unreleased_20/array_type.rst | 2 +- 3 files changed, 11 insertions(+), 12 deletions(-) diff --git a/doc/build/changelog/unreleased_20/11687.rst b/doc/build/changelog/unreleased_20/11687.rst index c18d30ffabd..734ed222cb9 100644 --- a/doc/build/changelog/unreleased_20/11687.rst +++ b/doc/build/changelog/unreleased_20/11687.rst @@ -1,5 +1,5 @@ .. change:: - :tags: bug, reflection + :tags: bug, engine :tickets: 11687 Fixed issue in internal reflection cache where particular reflection diff --git a/doc/build/changelog/unreleased_20/11818.rst b/doc/build/changelog/unreleased_20/11818.rst index c75a6c64b6b..c218f27dcc1 100644 --- a/doc/build/changelog/unreleased_20/11818.rst +++ b/doc/build/changelog/unreleased_20/11818.rst @@ -2,15 +2,14 @@ :tags: change, general :tickets: 11818 - The pin for ``setuptools<69.3`` in ``pyproject.toml`` has been - removed. This pin was to prevent a sudden change to :pep:`625` in - setuptools from taking place which changes the file name of SQLAlchemy's - source distribution on pypi to be an all lower case name, which is very - likely to cause problems with various build environments that expected a - particular naming style. However, the presence of this pin is now holding - back environments that otherwise want to use a newer setuptools, so we've - decided to move forward with this change, with the assumption that build - environments will have largely accommodated the setuptools change by - now. + The pin for ``setuptools<69.3`` in ``pyproject.toml`` has been removed. + This pin was to prevent a sudden change in setuptools to use :pep:`625` + from taking place, which would change the file name of SQLAlchemy's source + distribution on pypi to be an all lower case name, which is likely to cause + problems with various build environments that expected the previous naming + style. However, the presence of this pin is holding back environments that + otherwise want to use a newer setuptools, so we've decided to move forward + with this change, with the assumption that build environments will have + largely accommodated the setuptools change by now. diff --git a/doc/build/changelog/unreleased_20/array_type.rst b/doc/build/changelog/unreleased_20/array_type.rst index 9b0801faf5b..23e0727fa26 100644 --- a/doc/build/changelog/unreleased_20/array_type.rst +++ b/doc/build/changelog/unreleased_20/array_type.rst @@ -1,5 +1,5 @@ .. change:: - :tags: bug, test + :tags: bug, tests Added missing ``array_type`` property to the testing suite ``SuiteRequirements`` class. From 4f41476377a95d0c8d50cbb79bca073951d33b8e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 3 Sep 2024 12:56:56 -0400 Subject: [PATCH 308/544] - 2.0.33 --- doc/build/changelog/changelog_20.rst | 127 +++++++++++++++++- doc/build/changelog/unreleased_20/11677.rst | 10 -- doc/build/changelog/unreleased_20/11687.rst | 7 - doc/build/changelog/unreleased_20/11703.rst | 7 - doc/build/changelog/unreleased_20/11731.rst | 10 -- doc/build/changelog/unreleased_20/11746.rst | 12 -- doc/build/changelog/unreleased_20/11782.rst | 5 - doc/build/changelog/unreleased_20/11788.rst | 6 - doc/build/changelog/unreleased_20/11802.rst | 8 -- doc/build/changelog/unreleased_20/11814.rst | 9 -- doc/build/changelog/unreleased_20/11818.rst | 15 --- doc/build/changelog/unreleased_20/11821.rst | 9 -- doc/build/changelog/unreleased_20/11822.rst | 9 -- .../changelog/unreleased_20/array_type.rst | 5 - doc/build/conf.py | 4 +- 15 files changed, 128 insertions(+), 115 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11677.rst delete mode 100644 doc/build/changelog/unreleased_20/11687.rst delete mode 100644 doc/build/changelog/unreleased_20/11703.rst delete mode 100644 doc/build/changelog/unreleased_20/11731.rst delete mode 100644 doc/build/changelog/unreleased_20/11746.rst delete mode 100644 doc/build/changelog/unreleased_20/11782.rst delete mode 100644 doc/build/changelog/unreleased_20/11788.rst delete mode 100644 doc/build/changelog/unreleased_20/11802.rst delete mode 100644 doc/build/changelog/unreleased_20/11814.rst delete mode 100644 doc/build/changelog/unreleased_20/11818.rst delete mode 100644 doc/build/changelog/unreleased_20/11821.rst delete mode 100644 doc/build/changelog/unreleased_20/11822.rst delete mode 100644 doc/build/changelog/unreleased_20/array_type.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 0fa618f4a21..40cd86afe93 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,132 @@ .. changelog:: :version: 2.0.33 - :include_notes_from: unreleased_20 + :released: September 3, 2024 + + .. change:: + :tags: bug, sqlite + :tickets: 11677 + + Improvements to the regex used by the SQLite dialect to reflect the name + and contents of a CHECK constraint. Constraints with newline, tab, or + space characters in either or both the constraint text and constraint name + are now properly reflected. Pull request courtesy Jeff Horemans. + + + + .. change:: + :tags: bug, engine + :tickets: 11687 + + Fixed issue in internal reflection cache where particular reflection + scenarios regarding same-named quoted_name() constructs would not be + correctly cached. Pull request courtesy Felix Lüdin. + + .. change:: + :tags: bug, sql, regression + :tickets: 11703 + + Fixed regression in :meth:`_sql.Select.with_statement_hint` and others + where the generative behavior of the method stopped producing a copy of the + object. + + .. change:: + :tags: bug, mysql + :tickets: 11731 + + Fixed issue in MySQL dialect where using INSERT..FROM SELECT in combination + with ON DUPLICATE KEY UPDATE would erroneously render on MySQL 8 and above + the "AS new" clause, leading to syntax failures. This clause is required + on MySQL 8 to follow the VALUES clause if use of the "new" alias is + present, however is not permitted to follow a FROM SELECT clause. + + + .. change:: + :tags: bug, sqlite + :tickets: 11746 + + Improvements to the regex used by the SQLite dialect to reflect the name + and contents of a UNIQUE constraint that is defined inline within a column + definition inside of a SQLite CREATE TABLE statement, accommodating for tab + characters present within the column / constraint line. Pull request + courtesy John A Stevenson. + + + + + .. change:: + :tags: bug, typing + :tickets: 11782 + + Fixed typing issue with :meth:`_sql.Select.with_only_columns`. + + .. change:: + :tags: bug, orm + :tickets: 11788 + + Correctly cleanup the internal top-level module registry when no + inner modules or classes are registered into it. + + .. change:: + :tags: bug, schema + :tickets: 11802 + + Fixed bug where the ``metadata`` element of an ``Enum`` datatype would not + be transferred to the new :class:`.MetaData` object when the type had been + copied via a :meth:`.Table.to_metadata` operation, leading to inconsistent + behaviors within create/drop sequences. + + .. change:: + :tags: bug, orm + :tickets: 11814 + + Improvements to the ORM annotated declarative type map lookup dealing with + composed types such as ``dict[str, Any]`` linking to JSON (or others) with + or without "future annotations" mode. + + + + .. change:: + :tags: change, general + :tickets: 11818 + + The pin for ``setuptools<69.3`` in ``pyproject.toml`` has been removed. + This pin was to prevent a sudden change in setuptools to use :pep:`625` + from taking place, which would change the file name of SQLAlchemy's source + distribution on pypi to be an all lower case name, which is likely to cause + problems with various build environments that expected the previous naming + style. However, the presence of this pin is holding back environments that + otherwise want to use a newer setuptools, so we've decided to move forward + with this change, with the assumption that build environments will have + largely accommodated the setuptools change by now. + + + + .. change:: + :tags: bug, postgresql + :tickets: 11821 + + Revising the asyncpg ``terminate()`` fix first made in :ticket:`10717` + which improved the resiliency of this call under all circumstances, adding + ``asyncio.CancelledError`` to the list of exceptions that are intercepted + as failing for a graceful ``.close()`` which will then proceed to call + ``.terminate()``. + + .. change:: + :tags: bug, mssql + :tickets: 11822 + + Added error "The server failed to resume the transaction" to the list of + error strings for the pymssql driver in determining a disconnect scenario, + as observed by one user using pymssql under otherwise unknown conditions as + leaving an unusable connection in the connection pool which fails to ping + cleanly. + + .. change:: + :tags: bug, tests + + Added missing ``array_type`` property to the testing suite + ``SuiteRequirements`` class. .. changelog:: :version: 2.0.32 diff --git a/doc/build/changelog/unreleased_20/11677.rst b/doc/build/changelog/unreleased_20/11677.rst deleted file mode 100644 index b1ac39b436f..00000000000 --- a/doc/build/changelog/unreleased_20/11677.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, sqlite - :tickets: 11677 - - Improvements to the regex used by the SQLite dialect to reflect the name - and contents of a CHECK constraint. Constraints with newline, tab, or - space characters in either or both the constraint text and constraint name - are now properly reflected. Pull request courtesy Jeff Horemans. - - diff --git a/doc/build/changelog/unreleased_20/11687.rst b/doc/build/changelog/unreleased_20/11687.rst deleted file mode 100644 index 734ed222cb9..00000000000 --- a/doc/build/changelog/unreleased_20/11687.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 11687 - - Fixed issue in internal reflection cache where particular reflection - scenarios regarding same-named quoted_name() constructs would not be - correctly cached. Pull request courtesy Felix Lüdin. diff --git a/doc/build/changelog/unreleased_20/11703.rst b/doc/build/changelog/unreleased_20/11703.rst deleted file mode 100644 index 5c703138a14..00000000000 --- a/doc/build/changelog/unreleased_20/11703.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, sql, regression - :tickets: 11703 - - Fixed regression in :meth:`_sql.Select.with_statement_hint` and others - where the generative behavior of the method stopped producing a copy of the - object. diff --git a/doc/build/changelog/unreleased_20/11731.rst b/doc/build/changelog/unreleased_20/11731.rst deleted file mode 100644 index 34ab8b48c58..00000000000 --- a/doc/build/changelog/unreleased_20/11731.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 11731 - - Fixed issue in MySQL dialect where using INSERT..FROM SELECT in combination - with ON DUPLICATE KEY UPDATE would erroneously render on MySQL 8 and above - the "AS new" clause, leading to syntax failures. This clause is required - on MySQL 8 to follow the VALUES clause if use of the "new" alias is - present, however is not permitted to follow a FROM SELECT clause. - diff --git a/doc/build/changelog/unreleased_20/11746.rst b/doc/build/changelog/unreleased_20/11746.rst deleted file mode 100644 index 36dc1a7393c..00000000000 --- a/doc/build/changelog/unreleased_20/11746.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, sqlite - :tickets: 11746 - - Improvements to the regex used by the SQLite dialect to reflect the name - and contents of a UNIQUE constraint that is defined inline within a column - definition inside of a SQLite CREATE TABLE statement, accommodating for tab - characters present within the column / constraint line. Pull request - courtesy John A Stevenson. - - - diff --git a/doc/build/changelog/unreleased_20/11782.rst b/doc/build/changelog/unreleased_20/11782.rst deleted file mode 100644 index df8e1f5c3bd..00000000000 --- a/doc/build/changelog/unreleased_20/11782.rst +++ /dev/null @@ -1,5 +0,0 @@ -.. change:: - :tags: bug, typing - :tickets: 11782 - - Fixed typing issue with :meth:`_sql.Select.with_only_columns`. diff --git a/doc/build/changelog/unreleased_20/11788.rst b/doc/build/changelog/unreleased_20/11788.rst deleted file mode 100644 index 736cbd3370f..00000000000 --- a/doc/build/changelog/unreleased_20/11788.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11788 - - Correctly cleanup the internal top-level module registry when no - inner modules or classes are registered into it. diff --git a/doc/build/changelog/unreleased_20/11802.rst b/doc/build/changelog/unreleased_20/11802.rst deleted file mode 100644 index f6e7847ee2a..00000000000 --- a/doc/build/changelog/unreleased_20/11802.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, schema - :tickets: 11802 - - Fixed bug where the ``metadata`` element of an ``Enum`` datatype would not - be transferred to the new :class:`.MetaData` object when the type had been - copied via a :meth:`.Table.to_metadata` operation, leading to inconsistent - behaviors within create/drop sequences. diff --git a/doc/build/changelog/unreleased_20/11814.rst b/doc/build/changelog/unreleased_20/11814.rst deleted file mode 100644 index a9feecb28c6..00000000000 --- a/doc/build/changelog/unreleased_20/11814.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11814 - - Improvements to the ORM annotated declarative type map lookup dealing with - composed types such as ``dict[str, Any]`` linking to JSON (or others) with - or without "future annotations" mode. - - diff --git a/doc/build/changelog/unreleased_20/11818.rst b/doc/build/changelog/unreleased_20/11818.rst deleted file mode 100644 index c218f27dcc1..00000000000 --- a/doc/build/changelog/unreleased_20/11818.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. change:: - :tags: change, general - :tickets: 11818 - - The pin for ``setuptools<69.3`` in ``pyproject.toml`` has been removed. - This pin was to prevent a sudden change in setuptools to use :pep:`625` - from taking place, which would change the file name of SQLAlchemy's source - distribution on pypi to be an all lower case name, which is likely to cause - problems with various build environments that expected the previous naming - style. However, the presence of this pin is holding back environments that - otherwise want to use a newer setuptools, so we've decided to move forward - with this change, with the assumption that build environments will have - largely accommodated the setuptools change by now. - - diff --git a/doc/build/changelog/unreleased_20/11821.rst b/doc/build/changelog/unreleased_20/11821.rst deleted file mode 100644 index b72412f489f..00000000000 --- a/doc/build/changelog/unreleased_20/11821.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 11821 - - Revising the asyncpg ``terminate()`` fix first made in :ticket:`10717` - which improved the resiliency of this call under all circumstances, adding - ``asyncio.CancelledError`` to the list of exceptions that are intercepted - as failing for a graceful ``.close()`` which will then proceed to call - ``.terminate()``. diff --git a/doc/build/changelog/unreleased_20/11822.rst b/doc/build/changelog/unreleased_20/11822.rst deleted file mode 100644 index f6c91918f39..00000000000 --- a/doc/build/changelog/unreleased_20/11822.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, mssql - :tickets: 11822 - - Added error "The server failed to resume the transaction" to the list of - error strings for the pymssql driver in determining a disconnect scenario, - as observed by one user using pymssql under otherwise unknown conditions as - leaving an unusable connection in the connection pool which fails to ping - cleanly. diff --git a/doc/build/changelog/unreleased_20/array_type.rst b/doc/build/changelog/unreleased_20/array_type.rst deleted file mode 100644 index 23e0727fa26..00000000000 --- a/doc/build/changelog/unreleased_20/array_type.rst +++ /dev/null @@ -1,5 +0,0 @@ -.. change:: - :tags: bug, tests - - Added missing ``array_type`` property to the testing suite - ``SuiteRequirements`` class. diff --git a/doc/build/conf.py b/doc/build/conf.py index d85b42525df..41c2773bcf2 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.32" +release = "2.0.33" -release_date = "August 5, 2024" +release_date = "September 3, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From c2218b2633a36bf7d8a23d5c10df04d08f408501 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 3 Sep 2024 13:22:38 -0400 Subject: [PATCH 309/544] Version 2.0.34 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 40cd86afe93..2a432cd0dba 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.34 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.33 :released: September 3, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 8eec8fce271..61d7c672b5d 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.33" +__version__ = "2.0.34" def __go(lcls: Any) -> None: From 3d879a4413ae8c004b5613fde4003ce186ec51db Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 3 Sep 2024 23:08:21 -0400 Subject: [PATCH 310/544] use proper functions to get typing origin, args Fixed regression caused by issue :ticket:`11814` which broke support for certain flavors of :pep:`593` ``Annotated`` in the type_annotation_map when builtin types such as ``list``, ``dict`` were used without an element type. While this is an incomplete style of typing, these types nonetheless previously would be located in the type_annotation_map correctly. Fixes: #11831 Change-Id: I6ea7fc1bce462d44ffcf67ef18b60050dfc2c91e (cherry picked from commit e51ff826b9374cadb8eded370a808bc4dcbe56ba) --- doc/build/changelog/unreleased_20/11831.rst | 9 ++++++ lib/sqlalchemy/util/typing.py | 7 +++-- .../test_tm_future_annotations_sync.py | 30 +++++++++++++++---- test/orm/declarative/test_typed_mapping.py | 30 +++++++++++++++---- 4 files changed, 63 insertions(+), 13 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11831.rst diff --git a/doc/build/changelog/unreleased_20/11831.rst b/doc/build/changelog/unreleased_20/11831.rst new file mode 100644 index 00000000000..65699bf5d80 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11831.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 11831 + + Fixed regression caused by issue :ticket:`11814` which broke support for + certain flavors of :pep:`593` ``Annotated`` in the type_annotation_map when + builtin types such as ``list``, ``dict`` were used without an element type. + While this is an incomplete style of typing, these types nonetheless + previously would be located in the type_annotation_map correctly. diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 81e77f629f7..53e3c1732c7 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -199,9 +199,10 @@ def fixup_container_fwd_refs( and similar for list, set """ + if ( is_generic(type_) - and type_.__origin__ + and typing_get_origin(type_) in ( dict, set, @@ -221,11 +222,11 @@ def fixup_container_fwd_refs( ) ): # compat with py3.10 and earlier - return type_.__origin__.__class_getitem__( # type: ignore + return typing_get_origin(type_).__class_getitem__( # type: ignore tuple( [ ForwardRef(elem) if isinstance(elem, str) else elem - for elem in type_.__args__ + for elem in typing_get_args(type_) ] ) ) diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 765318cfa28..f8c6a380a53 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -1419,6 +1419,16 @@ class RefElementTwo(decl_base): Dict, (str, str), ), + (list, None, testing.requires.python310), + ( + List, + None, + ), + (dict, None, testing.requires.python310), + ( + Dict, + None, + ), id_="sa", argnames="container_typ,args", ) @@ -1428,22 +1438,30 @@ def test_extract_composed(self, container_typ, args, style): test #11814 + test #11831, regression from #11814 """ global TestType if style.pep593: - TestType = Annotated[container_typ[args], 0] + if args is None: + TestType = Annotated[container_typ, 0] + else: + TestType = Annotated[container_typ[args], 0] elif style.alias: - TestType = container_typ[args] + if args is None: + TestType = container_typ + else: + TestType = container_typ[args] elif style.direct: TestType = container_typ - double_strings = args == (str, str) class Base(DeclarativeBase): if style.direct: - if double_strings: + if args == (str, str): type_annotation_map = {TestType[str, str]: JSON()} + elif args is None: + type_annotation_map = {TestType: JSON()} else: type_annotation_map = {TestType[str]: JSON()} else: @@ -1455,8 +1473,10 @@ class MyClass(Base): id: Mapped[int] = mapped_column(primary_key=True) if style.direct: - if double_strings: + if args == (str, str): data: Mapped[TestType[str, str]] = mapped_column() + elif args is None: + data: Mapped[TestType] = mapped_column() else: data: Mapped[TestType[str]] = mapped_column() else: diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 8b10118f4c9..0213a0db3b0 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -1410,6 +1410,16 @@ class RefElementTwo(decl_base): Dict, (str, str), ), + (list, None, testing.requires.python310), + ( + List, + None, + ), + (dict, None, testing.requires.python310), + ( + Dict, + None, + ), id_="sa", argnames="container_typ,args", ) @@ -1419,22 +1429,30 @@ def test_extract_composed(self, container_typ, args, style): test #11814 + test #11831, regression from #11814 """ global TestType if style.pep593: - TestType = Annotated[container_typ[args], 0] + if args is None: + TestType = Annotated[container_typ, 0] + else: + TestType = Annotated[container_typ[args], 0] elif style.alias: - TestType = container_typ[args] + if args is None: + TestType = container_typ + else: + TestType = container_typ[args] elif style.direct: TestType = container_typ - double_strings = args == (str, str) class Base(DeclarativeBase): if style.direct: - if double_strings: + if args == (str, str): type_annotation_map = {TestType[str, str]: JSON()} + elif args is None: + type_annotation_map = {TestType: JSON()} else: type_annotation_map = {TestType[str]: JSON()} else: @@ -1446,8 +1464,10 @@ class MyClass(Base): id: Mapped[int] = mapped_column(primary_key=True) if style.direct: - if double_strings: + if args == (str, str): data: Mapped[TestType[str, str]] = mapped_column() + elif args is None: + data: Mapped[TestType] = mapped_column() else: data: Mapped[TestType[str]] = mapped_column() else: From c4282ecd253bf509caf9ad0a1535b5612a369f30 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 4 Sep 2024 08:45:52 -0400 Subject: [PATCH 311/544] Fix regular expression for `SQLiteDialect.get_check_constraints` Fixed regression in SQLite reflection caused by :ticket:`11677` which interfered with reflection for CHECK constraints that were followed by other kinds of constraints within the same table definition. Pull request courtesy Harutaka Kawamura. Fixes: #11832 Closes: #11834 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11834 Pull-request-sha: a10fcef45ccfad6ebf17ec94fc48d0965f0d4123 Change-Id: I50bcc7aa727f73be235895d154cd859f19adda09 (cherry picked from commit 44be2ef4484345298825f547e21d2881cc4921a9) --- doc/build/changelog/unreleased_20/11832.rst | 9 ++++++++ lib/sqlalchemy/dialects/sqlite/base.py | 2 +- test/dialect/test_sqlite.py | 25 ++++++++++++++++++++- 3 files changed, 34 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11832.rst diff --git a/doc/build/changelog/unreleased_20/11832.rst b/doc/build/changelog/unreleased_20/11832.rst new file mode 100644 index 00000000000..9c1a79df180 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11832.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, sqlite + :tickets: 11832 + + Fixed regression in SQLite reflection caused by :ticket:`11677` which + interfered with reflection for CHECK constraints that were followed + by other kinds of constraints within the same table definition. Pull + request courtesy Harutaka Kawamura. + diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index cf8f16966ba..5e32e2fbb06 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -2674,7 +2674,7 @@ def get_check_constraints(self, connection, table_name, schema=None, **kw): # (1) Matches end of check constraint with trailing comma, # optional whitespace (including newline), and the beginning # of the next constraint (either named or unnamed). - ,[\s\n]*(?=CONSTRAINT|CHECK) + ,[\s\n]*(?=CONSTRAINT|CHECK|UNIQUE|FOREIGN|PRIMARY) # OR operator, seperating (1) & (2) | # (2) Matches end parenthesis of table definition, seperated by diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index bdc3e7d09f0..d2fb752fa7a 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -1823,16 +1823,18 @@ def setup_test_class(cls): Column("id", Integer), Column("value", Integer), Column("prefix", String), - PrimaryKeyConstraint("id"), CheckConstraint("id > 0"), + UniqueConstraint("prefix", name="prefix_named"), # Constraint definition with newline and tab characters CheckConstraint( """((value > 0) AND \n\t(value < 100) AND \n\t (value != 50))""", name="ck_r_value_multiline", ), + UniqueConstraint("value"), # Constraint name with special chars and 'check' in the name CheckConstraint("value IS NOT NULL", name="^check-r* #\n\t"), + PrimaryKeyConstraint("id", name="pk_name"), # Constraint definition with special characters. CheckConstraint("prefix NOT GLOB '*[^-. /#,]*'"), ) @@ -2448,6 +2450,27 @@ def test_unique_constraint_unnamed_normal_temporary( [{"column_names": ["x"], "name": None}], ) + def test_unique_constraint_mixed_into_ck(self, connection): + """test #11832""" + + inspector = inspect(connection) + eq_( + inspector.get_unique_constraints("r"), + [ + {"name": "prefix_named", "column_names": ["prefix"]}, + {"name": None, "column_names": ["value"]}, + ], + ) + + def test_primary_key_constraint_mixed_into_ck(self, connection): + """test #11832""" + + inspector = inspect(connection) + eq_( + inspector.get_pk_constraint("r"), + {"constrained_columns": ["id"], "name": "pk_name"}, + ) + def test_primary_key_constraint_named(self): inspector = inspect(testing.db) eq_( From feea68a575d33375872a8b5ea95fbf0bfe15fdb7 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 4 Sep 2024 11:16:31 -0400 Subject: [PATCH 312/544] - 2.0.34 --- doc/build/changelog/changelog_20.rst | 22 ++++++++++++++++++++- doc/build/changelog/unreleased_20/11831.rst | 9 --------- doc/build/changelog/unreleased_20/11832.rst | 9 --------- doc/build/conf.py | 4 ++-- 4 files changed, 23 insertions(+), 21 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11831.rst delete mode 100644 doc/build/changelog/unreleased_20/11832.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 2a432cd0dba..1117f0abea3 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,27 @@ .. changelog:: :version: 2.0.34 - :include_notes_from: unreleased_20 + :released: September 4, 2024 + + .. change:: + :tags: bug, orm + :tickets: 11831 + + Fixed regression caused by issue :ticket:`11814` which broke support for + certain flavors of :pep:`593` ``Annotated`` in the type_annotation_map when + builtin types such as ``list``, ``dict`` were used without an element type. + While this is an incomplete style of typing, these types nonetheless + previously would be located in the type_annotation_map correctly. + + .. change:: + :tags: bug, sqlite + :tickets: 11832 + + Fixed regression in SQLite reflection caused by :ticket:`11677` which + interfered with reflection for CHECK constraints that were followed + by other kinds of constraints within the same table definition. Pull + request courtesy Harutaka Kawamura. + .. changelog:: :version: 2.0.33 diff --git a/doc/build/changelog/unreleased_20/11831.rst b/doc/build/changelog/unreleased_20/11831.rst deleted file mode 100644 index 65699bf5d80..00000000000 --- a/doc/build/changelog/unreleased_20/11831.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11831 - - Fixed regression caused by issue :ticket:`11814` which broke support for - certain flavors of :pep:`593` ``Annotated`` in the type_annotation_map when - builtin types such as ``list``, ``dict`` were used without an element type. - While this is an incomplete style of typing, these types nonetheless - previously would be located in the type_annotation_map correctly. diff --git a/doc/build/changelog/unreleased_20/11832.rst b/doc/build/changelog/unreleased_20/11832.rst deleted file mode 100644 index 9c1a79df180..00000000000 --- a/doc/build/changelog/unreleased_20/11832.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, sqlite - :tickets: 11832 - - Fixed regression in SQLite reflection caused by :ticket:`11677` which - interfered with reflection for CHECK constraints that were followed - by other kinds of constraints within the same table definition. Pull - request courtesy Harutaka Kawamura. - diff --git a/doc/build/conf.py b/doc/build/conf.py index 41c2773bcf2..a21225dded7 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.33" +release = "2.0.34" -release_date = "September 3, 2024" +release_date = "September 4, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From a154c7312541d6b61ef9a373f153005e98793079 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 4 Sep 2024 11:19:59 -0400 Subject: [PATCH 313/544] Version 2.0.35 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 1117f0abea3..cca32ca1faf 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.35 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.34 :released: September 4, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 61d7c672b5d..d28421a9514 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.34" +__version__ = "2.0.35" def __go(lcls: Any) -> None: From b46db2d5fcf22cb861363533ac1e221c027a0829 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 4 Sep 2024 19:10:31 +0200 Subject: [PATCH 314/544] fix typo in `elect.slice` docs Change-Id: I859b48e320a04cedc6084d067cb20b89ac5d76bb (cherry picked from commit 6d0379f0565db1b6bf3aa7bead44d759407abadc) --- lib/sqlalchemy/sql/selectable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 2a96a7a1008..c6aa8a18815 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -4160,7 +4160,7 @@ def slice( For example, :: - stmt = select(User).order_by(User).id.slice(1, 3) + stmt = select(User).order_by(User.id).slice(1, 3) renders as From a48c04e94f79169b73a331f3ac2e172d05be40af Mon Sep 17 00:00:00 2001 From: Jimmy AUDEBERT <109511155+jaudebert@users.noreply.github.com> Date: Wed, 4 Sep 2024 19:23:53 +0200 Subject: [PATCH 315/544] Include operators in postgres JSONB documentation (#11828) (cherry picked from commit 06ca61066ee312a5198cf1db869f388255212559) --- lib/sqlalchemy/dialects/postgresql/json.py | 25 ++++++++++++++-------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 3790fa359b1..1cdafbd03d9 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -256,22 +256,27 @@ class Comparator(JSON.Comparator): """Define comparison operations for :class:`_types.JSON`.""" def has_key(self, other): - """Boolean expression. Test for presence of a key. Note that the - key may be a SQLA expression. + """Boolean expression. Test for presence of a key (equivalent of + the ``?`` operator). Note that the key may be a SQLA expression. """ return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean) def has_all(self, other): - """Boolean expression. Test for presence of all keys in jsonb""" + """Boolean expression. Test for presence of all keys in jsonb + (equivalent of the ``?&`` operator) + """ return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean) def has_any(self, other): - """Boolean expression. Test for presence of any key in jsonb""" + """Boolean expression. Test for presence of any key in jsonb + (equivalent of the ``?|`` operator) + """ return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean) def contains(self, other, **kwargs): """Boolean expression. Test if keys (or array) are a superset - of/contained the keys of the argument jsonb expression. + of/contained the keys of the argument jsonb expression + (equivalent of the ``@>`` operator). kwargs may be ignored by this operator but are required for API conformance. @@ -280,7 +285,8 @@ def contains(self, other, **kwargs): def contained_by(self, other): """Boolean expression. Test if keys are a proper subset of the - keys of the argument jsonb expression. + keys of the argument jsonb expression + (equivalent of the ``<@`` operator). """ return self.operate( CONTAINED_BY, other, result_type=sqltypes.Boolean @@ -288,7 +294,7 @@ def contained_by(self, other): def delete_path(self, array): """JSONB expression. Deletes field or array element specified in - the argument array. + the argument array (equivalent of the ``#-`` operator). The input may be a list of strings that will be coerced to an ``ARRAY`` or an instance of :meth:`_postgres.array`. @@ -302,7 +308,7 @@ def delete_path(self, array): def path_exists(self, other): """Boolean expression. Test for presence of item given by the - argument JSONPath expression. + argument JSONPath expression (equivalent of the ``@?`` operator). .. versionadded:: 2.0 """ @@ -312,7 +318,8 @@ def path_exists(self, other): def path_match(self, other): """Boolean expression. Test if JSONPath predicate given by the - argument JSONPath expression matches. + argument JSONPath expression matches + (equivalent of the ``@@`` operator). Only the first item of the result is taken into account. From a538996c9baaf9cb4c6c1b7519f08419a86c9fe7 Mon Sep 17 00:00:00 2001 From: BOBOTANG Date: Thu, 5 Sep 2024 02:26:52 +0800 Subject: [PATCH 316/544] Fix the runtime issue related to the join operation in the association example (#11721) (cherry picked from commit e8dfefb97cd43bc03d202872b99931f61324fe80) --- examples/association/basic_association.py | 2 +- examples/association/proxied_association.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/examples/association/basic_association.py b/examples/association/basic_association.py index d2271ad430e..7a5b46097e3 100644 --- a/examples/association/basic_association.py +++ b/examples/association/basic_association.py @@ -105,7 +105,7 @@ def __init__(self, item, price=None): ) # print customers who bought 'MySQL Crowbar' on sale - q = session.query(Order).join("order_items", "item") + q = session.query(Order).join(OrderItem).join(Item) q = q.filter( and_(Item.description == "MySQL Crowbar", Item.price > OrderItem.price) ) diff --git a/examples/association/proxied_association.py b/examples/association/proxied_association.py index 0ec8fa899ac..65dcd6c0b66 100644 --- a/examples/association/proxied_association.py +++ b/examples/association/proxied_association.py @@ -112,7 +112,8 @@ def __init__(self, item, price=None): # print customers who bought 'MySQL Crowbar' on sale orders = ( session.query(Order) - .join("order_items", "item") + .join(OrderItem) + .join(Item) .filter(Item.description == "MySQL Crowbar") .filter(Item.price > OrderItem.price) ) From 4a72df72cd2bb890034a9843b27a51e662ceccf7 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 5 Sep 2024 10:06:36 -0400 Subject: [PATCH 317/544] move py313 tests to greenlet main vstinner's branch merged and was immediately deleted from that repo. greenlet still not released. so keep on chasing it :/ Change-Id: I79927061566db75b4e26b3dbc39b817786531db6 (cherry picked from commit 88dd18cd89598d0569d761db206d4559e8cd57be) --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 93b86dd5f75..37dd8f282f8 100644 --- a/tox.ini +++ b/tox.ini @@ -54,7 +54,7 @@ deps= # tracked by https://github.com/pytest-dev/pytest-xdist/issues/907 pytest-xdist!=3.3.0 - py313: git+https://github.com/vstinner/greenlet@py313\#egg=greenlet + py313: git+https://github.com/python-greenlet/greenlet.git\#egg=greenlet dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git\#egg=aiosqlite dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git\#egg=sqlcipher3 From 7c422e3a3a02ec702b8cfa51b97d4ff6c299e2d0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 5 Sep 2024 11:54:32 -0400 Subject: [PATCH 318/544] cherry-pick changelog from 1.4.54 (cherry picked from commit 9a40da5b7785e15f4c4d2f45477154f578b7bddd) --- doc/build/changelog/changelog_14.rst | 54 ++++++++++++++++++++- doc/build/changelog/unreleased_14/11728.rst | 9 ---- doc/build/changelog/unreleased_14/11819.rst | 14 ------ 3 files changed, 53 insertions(+), 24 deletions(-) delete mode 100644 doc/build/changelog/unreleased_14/11728.rst delete mode 100644 doc/build/changelog/unreleased_14/11819.rst diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index e96d41bcca4..fde57fe3860 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -15,7 +15,59 @@ This document details individual issue-level changes made throughout .. changelog:: :version: 1.4.54 - :include_notes_from: unreleased_14 + :released: September 5, 2024 + + .. change:: + :tags: bug, regression, orm + :tickets: 11728 + :versions: 2.0.33 + + Fixed regression from 1.3 where the column key used for a hybrid property + might be populated with that of the underlying column that it returns, for + a property that returns an ORM mapped column directly, rather than the key + used by the hybrid property itself. + + .. change:: + :tags: change, general + :tickets: 11818 + :versions: 2.0.33 1.4.54 + + The pin for ``setuptools<69.3`` in ``pyproject.toml`` has been removed. + This pin was to prevent a sudden change in setuptools to use :pep:`625` + from taking place, which would change the file name of SQLAlchemy's source + distribution on pypi to be an all lower case name, which is likely to cause + problems with various build environments that expected the previous naming + style. However, the presence of this pin is holding back environments that + otherwise want to use a newer setuptools, so we've decided to move forward + with this change, with the assumption that build environments will have + largely accommodated the setuptools change by now. + + This change was first released in version 2.0.33 however is being + backported to 1.4.54 to support ongoing releases. + + + .. change:: + :tags: bug, postgresql + :tickets: 11819 + :versions: 2.0.33, 1.4.54 + + Fixed critical issue in the asyncpg driver where a rollback or commit that + fails specifically for the ``MissingGreenlet`` condition or any other error + that is not raised by asyncpg itself would discard the asyncpg transaction + in any case, even though the transaction were still idle, leaving to a + server side condition with an idle transaction that then goes back into the + connection pool. The flags for "transaction closed" are now not reset for + errors that are raised outside of asyncpg itself. When asyncpg itself + raises an error for ``.commit()`` or ``.rollback()``, asyncpg does then + discard of this transaction. + + .. change:: + :tags: change, general + + The setuptools "test" command is removed from the 1.4 series as modern + versions of setuptools actively refuse to accommodate this extension being + present. This change was already part of the 2.0 series. To run the + test suite use the ``tox`` command. .. changelog:: :version: 1.4.53 diff --git a/doc/build/changelog/unreleased_14/11728.rst b/doc/build/changelog/unreleased_14/11728.rst deleted file mode 100644 index b27aa3333d7..00000000000 --- a/doc/build/changelog/unreleased_14/11728.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, regression, orm - :tickets: 11728 - :versions: 2.0.33 - - Fixed regression from 1.3 where the column key used for a hybrid property - might be populated with that of the underlying column that it returns, for - a property that returns an ORM mapped column directly, rather than the key - used by the hybrid property itself. diff --git a/doc/build/changelog/unreleased_14/11819.rst b/doc/build/changelog/unreleased_14/11819.rst deleted file mode 100644 index 6211eb487ee..00000000000 --- a/doc/build/changelog/unreleased_14/11819.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 11819 - :versions: 2.0.33, 1.4.54 - - Fixed critical issue in the asyncpg driver where a rollback or commit that - fails specifically for the ``MissingGreenlet`` condition or any other error - that is not raised by asyncpg itself would discard the asyncpg transaction - in any case, even though the transaction were still idle, leaving to a - server side condition with an idle transaction that then goes back into the - connection pool. The flags for "transaction closed" are now not reset for - errors that are raised outside of asyncpg itself. When asyncpg itself - raises an error for ``.commit()`` or ``.rollback()``, asyncpg does then - discard of this transaction. From 86f7ea1f02f1ea71b228302a210d648cb81bb480 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 5 Sep 2024 11:54:33 -0400 Subject: [PATCH 319/544] cherry-pick changelog update for 1.4.55 (cherry picked from commit 7949426428f1ec19381116e025b95f86417a85fc) --- doc/build/changelog/changelog_14.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index fde57fe3860..f5a69b2fb68 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -13,6 +13,10 @@ This document details individual issue-level changes made throughout :start-line: 5 +.. changelog:: + :version: 1.4.55 + :include_notes_from: unreleased_14 + .. changelog:: :version: 1.4.54 :released: September 5, 2024 From 878de30dd934072f9f397086c3dcce845c8a298c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 5 Sep 2024 13:21:17 -0400 Subject: [PATCH 320/544] revert SQLite CHECK constraint changes and add new tests The changes made for SQLite CHECK constraint reflection in versions 2.0.33 and 2.0.34 , :ticket:`11832` and :ticket:`11677`, have now been fully reverted, as users continued to identify existing use cases that stopped working after this change. For the moment, because SQLite does not provide any consistent way of delivering information about CHECK constraints, SQLAlchemy is limited in what CHECK constraint syntaxes can be reflected, including that a CHECK constraint must be stated all on a single, independent line (or inline on a column definition) without newlines, tabs in the constraint definition or unusual characters in the constraint name. Overall, reflection for SQLite is tailored towards being able to reflect CREATE TABLE statements that were originally created by SQLAlchemy DDL constructs. Long term work on a DDL parser that does not rely upon regular expressions may eventually improve upon this situation. A wide range of additional cross-dialect CHECK constraint reflection tests have been added as it was also a bug that these changes did not trip any existing tests. Fixes: #11840 Change-Id: Iaa4f9651d0c3dd5dbb530ccaa6688169eb7f3bb8 (cherry picked from commit 75ab6b370034e69bc798f9a77751afb200e24f1d) --- doc/build/changelog/unreleased_20/11840.rst | 20 ++ lib/sqlalchemy/dialects/sqlite/base.py | 76 ++------ lib/sqlalchemy/testing/requirements.py | 5 + .../testing/suite/test_reflection.py | 177 ++++++++++++++---- test/dialect/test_sqlite.py | 35 +++- test/requirements.py | 10 + 6 files changed, 217 insertions(+), 106 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11840.rst diff --git a/doc/build/changelog/unreleased_20/11840.rst b/doc/build/changelog/unreleased_20/11840.rst new file mode 100644 index 00000000000..42074e3d2b3 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11840.rst @@ -0,0 +1,20 @@ +.. change:: + :tags: bug, sqlite, regression + :tickets: 11840 + + The changes made for SQLite CHECK constraint reflection in versions 2.0.33 + and 2.0.34 , :ticket:`11832` and :ticket:`11677`, have now been fully + reverted, as users continued to identify existing use cases that stopped + working after this change. For the moment, because SQLite does not + provide any consistent way of delivering information about CHECK + constraints, SQLAlchemy is limited in what CHECK constraint syntaxes can be + reflected, including that a CHECK constraint must be stated all on a + single, independent line (or inline on a column definition) without + newlines, tabs in the constraint definition or unusual characters in the + constraint name. Overall, reflection for SQLite is tailored towards being + able to reflect CREATE TABLE statements that were originally created by + SQLAlchemy DDL constructs. Long term work on a DDL parser that does not + rely upon regular expressions may eventually improve upon this situation. + A wide range of additional cross-dialect CHECK constraint reflection tests + have been added as it was also a bug that these changes did not trip any + existing tests. diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 5e32e2fbb06..0e2dc3b6394 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -2624,69 +2624,21 @@ def get_check_constraints(self, connection, table_name, schema=None, **kw): connection, table_name, schema=schema, **kw ) - # Notes: - # * The pattern currently matches any character for the name of the - # constraint, including newline characters (re.S flag) as long as - # none of the SQLite's table constraints keywords are encountered - # by a negative lookahead. - # This prevents the pattern from matching subsequent constraints - # as part of the name. - # This is only done for those keywords if seperated by spaces, to - # support constraint names that contains them e.g. "check_value". - # - # * Because check constraint definitions can also contain newline - # or tab characters, the pattern matches any character untill either - # the beginning of the next constraint statement using a - # non-capturing and non-consuming group, allowing the next one - # to match, or the end of the table definition - # e.g. newline and closing ')'. - CHECK_PATTERN = r""" - # Non-capturing group for the name part of named check constraints. - # This group is optional as unnamed check constraints can exist. - (?: - # Match beginning of constraint definition seperated by whitespace. - CONSTRAINT\s - - # First capturing group that matches the actual name of the constraint. - # Any characters is allowed, as long as none of the reserved table - # constraint keywords are encountered using a negative lookahead. - ((?:(?!\sPRIMARY\s|\sFOREIGN\sKEY|\sUNIQUE\s|\sCHECK\s).)+) - - # End of optional non-capturing name group seperated by whitespace. - \s)? - - # Match beginning of the check expression with starting parenthesis - # and optional whitespace. - CHECK\s?\( - - # Match actual expression, which can be any character. - (.+?) - - # End parenthesis of the check expression. - \) - - # Non-capturing group that helps denote the end of the check - # expression part. - # This can either be (1) the beginning of the next constraint, - # or (2) the end of the table definition. - (?: - - # (1) Matches end of check constraint with trailing comma, - # optional whitespace (including newline), and the beginning - # of the next constraint (either named or unnamed). - ,[\s\n]*(?=CONSTRAINT|CHECK|UNIQUE|FOREIGN|PRIMARY) - # OR operator, seperating (1) & (2) - | - # (2) Matches end parenthesis of table definition, seperated by - # newline. - \n\) - # End of non-capturing group. - ) - """ + # NOTE NOTE NOTE + # DO NOT CHANGE THIS REGULAR EXPRESSION. There is no known way + # to parse CHECK constraints that contain newlines themselves using + # regular expressions, and the approach here relies upon each + # individual + # CHECK constraint being on a single line by itself. This + # necessarily makes assumptions as to how the CREATE TABLE + # was emitted. A more comprehensive DDL parsing solution would be + # needed to improve upon the current situation. See #11840 for + # background + CHECK_PATTERN = r"(?:CONSTRAINT (.+) +)?CHECK *\( *(.+) *\),? *" cks = [] - for match in re.finditer( - CHECK_PATTERN, table_data or "", re.I | re.S | re.VERBOSE - ): + + for match in re.finditer(CHECK_PATTERN, table_data or "", re.I): + name = match.group(1) if name: diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index b23230f8b2c..a11e23631b3 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -796,6 +796,11 @@ def unique_constraint_reflection(self): """target dialect supports reflection of unique constraints""" return exclusions.open() + @property + def inline_check_constraint_reflection(self): + """target dialect supports reflection of inline check constraints""" + return exclusions.closed() + @property def check_constraint_reflection(self): """target dialect supports reflection of check constraints""" diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index f257d2fcbc8..91113be9b49 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -6,6 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors +import contextlib import operator import re @@ -2454,62 +2455,158 @@ def test_get_columns_view_no_columns(self, connection, view_no_columns): class ComponentReflectionTestExtra(ComparesIndexes, fixtures.TestBase): __backend__ = True - @testing.combinations( - (True, testing.requires.schemas), (False,), argnames="use_schema" - ) - @testing.requires.check_constraint_reflection - def test_get_check_constraints(self, metadata, connection, use_schema): - if use_schema: - schema = config.test_schema + @testing.fixture(params=[True, False]) + def use_schema_fixture(self, request): + if request.param: + return config.test_schema else: - schema = None + return None - Table( - "sa_cc", - metadata, - Column("a", Integer()), - sa.CheckConstraint("a > 1 AND a < 5", name="cc1"), - sa.CheckConstraint( - "a = 1 OR (a > 2 AND a < 5)", name="UsesCasing" - ), - schema=schema, - ) - Table( - "no_constraints", - metadata, - Column("data", sa.String(20)), - schema=schema, - ) + @testing.fixture() + def inspect_for_table(self, metadata, connection, use_schema_fixture): + @contextlib.contextmanager + def go(tablename): + yield use_schema_fixture, inspect(connection) - metadata.create_all(connection) + metadata.create_all(connection) - insp = inspect(connection) - reflected = sorted( - insp.get_check_constraints("sa_cc", schema=schema), - key=operator.itemgetter("name"), - ) + return go + def ck_eq(self, reflected, expected): # trying to minimize effect of quoting, parenthesis, etc. # may need to add more to this as new dialects get CHECK # constraint reflection support def normalize(sqltext): return " ".join( - re.findall(r"and|\d|=|a|or|<|>", sqltext.lower(), re.I) + re.findall(r"and|\d|=|a|b|c|or|<|>", sqltext.lower(), re.I) ) - reflected = [ - {"name": item["name"], "sqltext": normalize(item["sqltext"])} - for item in reflected - ] - eq_( + reflected = sorted( + [ + {"name": item["name"], "sqltext": normalize(item["sqltext"])} + for item in reflected + ], + key=lambda item: (item["sqltext"]), + ) + + expected = sorted( + expected, + key=lambda item: (item["sqltext"]), + ) + eq_(reflected, expected) + + @testing.requires.check_constraint_reflection + def test_check_constraint_no_constraint(self, metadata, inspect_for_table): + with inspect_for_table("no_constraints") as (schema, inspector): + Table( + "no_constraints", + metadata, + Column("data", sa.String(20)), + schema=schema, + ) + + self.ck_eq( + inspector.get_check_constraints("no_constraints", schema=schema), + [], + ) + + @testing.requires.inline_check_constraint_reflection + @testing.combinations( + "my_inline", "MyInline", None, argnames="constraint_name" + ) + def test_check_constraint_inline( + self, metadata, inspect_for_table, constraint_name + ): + + with inspect_for_table("sa_cc") as (schema, inspector): + Table( + "sa_cc", + metadata, + Column("id", Integer(), primary_key=True), + Column( + "a", + Integer(), + sa.CheckConstraint( + "a > 1 AND a < 5", name=constraint_name + ), + ), + Column("data", String(50)), + schema=schema, + ) + + reflected = inspector.get_check_constraints("sa_cc", schema=schema) + + self.ck_eq( + reflected, + [ + { + "name": constraint_name or mock.ANY, + "sqltext": "a > 1 and a < 5", + }, + ], + ) + + @testing.requires.check_constraint_reflection + @testing.combinations( + "my_ck_const", "MyCkConst", None, argnames="constraint_name" + ) + def test_check_constraint_standalone( + self, metadata, inspect_for_table, constraint_name + ): + with inspect_for_table("sa_cc") as (schema, inspector): + Table( + "sa_cc", + metadata, + Column("a", Integer()), + sa.CheckConstraint( + "a = 1 OR (a > 2 AND a < 5)", name=constraint_name + ), + schema=schema, + ) + + reflected = inspector.get_check_constraints("sa_cc", schema=schema) + + self.ck_eq( + reflected, + [ + { + "name": constraint_name or mock.ANY, + "sqltext": "a = 1 or a > 2 and a < 5", + }, + ], + ) + + @testing.requires.inline_check_constraint_reflection + def test_check_constraint_mixed(self, metadata, inspect_for_table): + with inspect_for_table("sa_cc") as (schema, inspector): + Table( + "sa_cc", + metadata, + Column("id", Integer(), primary_key=True), + Column("a", Integer(), sa.CheckConstraint("a > 1 AND a < 5")), + Column( + "b", + Integer(), + sa.CheckConstraint("b > 1 AND b < 5", name="my_inline"), + ), + Column("c", Integer()), + Column("data", String(50)), + sa.UniqueConstraint("data", name="some_uq"), + sa.CheckConstraint("c > 1 AND c < 5", name="cc1"), + sa.UniqueConstraint("c", name="some_c_uq"), + schema=schema, + ) + + reflected = inspector.get_check_constraints("sa_cc", schema=schema) + + self.ck_eq( reflected, [ - {"name": "UsesCasing", "sqltext": "a = 1 or a > 2 and a < 5"}, - {"name": "cc1", "sqltext": "a > 1 and a < 5"}, + {"name": "cc1", "sqltext": "c > 1 and c < 5"}, + {"name": "my_inline", "sqltext": "b > 1 and b < 5"}, + {"name": mock.ANY, "sqltext": "a > 1 and a < 5"}, ], ) - no_cst = "no_constraints" - eq_(insp.get_check_constraints(no_cst, schema=schema), []) @testing.requires.indexes_with_expressions def test_reflect_expression_based_indexes(self, metadata, connection): diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index d2fb752fa7a..d1ddcc55037 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -1874,8 +1874,20 @@ def setup_test_class(cls): conn.exec_driver_sql( "CREATE TABLE cp (" - "q INTEGER check (q > 1 AND q < 6),\n" - "CONSTRAINT cq CHECK (q == 1 OR (q > 2 AND q < 5))\n" + "id INTEGER NOT NULL,\n" + "q INTEGER, \n" + "p INTEGER, \n" + "CONSTRAINT cq CHECK (p = 1 OR (p > 2 AND p < 5)),\n" + "PRIMARY KEY (id)\n" + ")" + ) + + conn.exec_driver_sql( + "CREATE TABLE cp_inline (\n" + "id INTEGER NOT NULL,\n" + "q INTEGER CHECK (q > 1 AND q < 6), \n" + "p INTEGER CONSTRAINT cq CHECK (p = 1 OR (p > 2 AND p < 5)),\n" + "PRIMARY KEY (id)\n" ")" ) @@ -2492,15 +2504,30 @@ def test_primary_key_constraint_no_pk(self): {"constrained_columns": [], "name": None}, ) - def test_check_constraint(self): + def test_check_constraint_plain(self): inspector = inspect(testing.db) eq_( inspector.get_check_constraints("cp"), [ - {"sqltext": "q == 1 OR (q > 2 AND q < 5)", "name": "cq"}, + {"sqltext": "p = 1 OR (p > 2 AND p < 5)", "name": "cq"}, + ], + ) + + def test_check_constraint_inline_plain(self): + inspector = inspect(testing.db) + eq_( + inspector.get_check_constraints("cp_inline"), + [ + {"sqltext": "p = 1 OR (p > 2 AND p < 5)", "name": "cq"}, {"sqltext": "q > 1 AND q < 6", "name": None}, ], ) + + @testing.fails("need to come up with new regex and/or DDL parsing") + def test_check_constraint_multiline(self): + """test for #11677""" + + inspector = inspect(testing.db) eq_( inspector.get_check_constraints("r"), [ diff --git a/test/requirements.py b/test/requirements.py index 2e74f81aeb6..ec65436e0d0 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -613,6 +613,16 @@ def unique_constraint_reflection_no_index_overlap(self): + skip_if("oracle") ) + @property + def inline_check_constraint_reflection(self): + return only_on( + [ + "postgresql", + "sqlite", + "oracle", + ] + ) + @property def check_constraint_reflection(self): return only_on( From 0fc87b845d853b4ac375711e6f5d8c87aa95f539 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 7 Sep 2024 17:41:16 -0400 Subject: [PATCH 321/544] test for Concatenable in ORM evaluator for concat_op Fixed issue in ORM evaluator where two datatypes being evaluated with the SQL concatenator operator would not be checked for :class:`.UnevaluatableError` based on their datatype; this missed the case of :class:`_postgresql.JSONB` values being used in a concatenate operation which is supported by PostgreSQL as well as how SQLAlchemy renders the SQL for this operation, but does not work at the Python level. By implementing :class:`.UnevaluatableError` for this combination, ORM update statements will now fall back to "expire" when a concatenated JSON value used in a SET clause is to be synchronized to a Python object. Fixes: #11849 Change-Id: Iccd97edf57b99b9a606ab3a47d2e3e5b63f0db07 (cherry picked from commit 8b08e9ba2420e856c5073129b351cfd5cf95422b) --- doc/build/changelog/unreleased_20/11849.rst | 13 +++++++ lib/sqlalchemy/orm/evaluator.py | 11 ++++++ test/orm/dml/test_evaluator.py | 8 ++++ test/orm/dml/test_update_delete_where.py | 41 +++++++++++++++++++++ 4 files changed, 73 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11849.rst diff --git a/doc/build/changelog/unreleased_20/11849.rst b/doc/build/changelog/unreleased_20/11849.rst new file mode 100644 index 00000000000..4a274702ecb --- /dev/null +++ b/doc/build/changelog/unreleased_20/11849.rst @@ -0,0 +1,13 @@ +.. change:: + :tags: orm, bug + :tickets: 11849 + + Fixed issue in ORM evaluator where two datatypes being evaluated with the + SQL concatenator operator would not be checked for + :class:`.UnevaluatableError` based on their datatype; this missed the case + of :class:`_postgresql.JSONB` values being used in a concatenate operation + which is supported by PostgreSQL as well as how SQLAlchemy renders the SQL + for this operation, but does not work at the Python level. By implementing + :class:`.UnevaluatableError` for this combination, ORM update statements + will now fall back to "expire" when a concatenated JSON value used in a SET + clause is to be synchronized to a Python object. diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py index f2644548c11..2c10ec55afa 100644 --- a/lib/sqlalchemy/orm/evaluator.py +++ b/lib/sqlalchemy/orm/evaluator.py @@ -28,6 +28,7 @@ from .. import inspect from ..sql import and_ from ..sql import operators +from ..sql.sqltypes import Concatenable from ..sql.sqltypes import Integer from ..sql.sqltypes import Numeric from ..util import warn_deprecated @@ -311,6 +312,16 @@ def visit_not_in_op_binary_op( def visit_concat_op_binary_op( self, operator, eval_left, eval_right, clause ): + + if not issubclass( + clause.left.type._type_affinity, Concatenable + ) or not issubclass(clause.right.type._type_affinity, Concatenable): + raise UnevaluatableError( + f"Cannot evaluate concatenate operator " + f'"{operator.__name__}" for ' + f"datatypes {clause.left.type}, {clause.right.type}" + ) + return self._straight_evaluate( lambda a, b: a + b, eval_left, eval_right, clause ) diff --git a/test/orm/dml/test_evaluator.py b/test/orm/dml/test_evaluator.py index 81da16914b7..3fc82db6944 100644 --- a/test/orm/dml/test_evaluator.py +++ b/test/orm/dml/test_evaluator.py @@ -370,6 +370,14 @@ def test_custom_op(self): r"Cannot evaluate math operator \"add\" for " r"datatypes JSON, INTEGER", ), + ( + lambda User: User.json + {"bar": "bat"}, + "json", + {"foo": "bar"}, + evaluator.UnevaluatableError, + r"Cannot evaluate concatenate operator \"concat_op\" for " + r"datatypes JSON, JSON", + ), ( lambda User: User.json - 12, "json", diff --git a/test/orm/dml/test_update_delete_where.py b/test/orm/dml/test_update_delete_where.py index 6e5d29fe97b..3f7b08b470c 100644 --- a/test/orm/dml/test_update_delete_where.py +++ b/test/orm/dml/test_update_delete_where.py @@ -3294,3 +3294,44 @@ def test_load_from_delete(self, connection, use_from_statement): ) # TODO: state of above objects should be "deleted" + + +class PGIssue11849Test(fixtures.DeclarativeMappedTest): + __backend__ = True + __only_on__ = ("postgresql",) + + @classmethod + def setup_classes(cls): + + from sqlalchemy.dialects.postgresql import JSONB + + Base = cls.DeclarativeBasic + + class TestTbl(Base): + __tablename__ = "testtbl" + + test_id = Column(Integer, primary_key=True) + test_field = Column(JSONB) + + def test_issue_11849(self): + TestTbl = self.classes.TestTbl + + session = fixture_session() + + obj = TestTbl( + test_id=1, test_field={"test1": 1, "test2": "2", "test3": [3, "3"]} + ) + session.add(obj) + + query = ( + update(TestTbl) + .where(TestTbl.test_id == 1) + .values(test_field=TestTbl.test_field + {"test3": {"test4": 4}}) + ) + session.execute(query) + + # not loaded + assert "test_field" not in obj.__dict__ + + # synchronizes on load + eq_(obj.test_field, {"test1": 1, "test2": "2", "test3": {"test4": 4}}) From fb02aa96c24204cb317f6369d74e85e903a6d978 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 9 Sep 2024 09:21:20 -0400 Subject: [PATCH 322/544] deprecate joinedload, subqueryload with DML; use correct statement An ORM exception is raised if :func:`_orm.joinedload` or :func:`_orm.subqueryload` are used as a top level option against a statement that is not a SELECT statement, such as with an ``insert().returning()``. There are no JOINs in INSERT statements nor is there a "subquery" that can be repurposed for subquery eager loading, and for UPDATE/DELETE joinedload does not support these either, so it is never appropriate for this use to pass silently. Fixed issue where using eager loaders such as :func:`_orm.selectinload` with additional criteria in combination with ORM DML such as :func:`_sql.insert` with RETURNING would not correctly set up internal contexts required for caching to work correctly, leading to incorrect results. Fixes: #11853 Fixes: #11855 Change-Id: Ibbf46ba4f83e472441074c3257e23388e0fcec37 (cherry picked from commit 9ea449bf41006e94273186a974d3a1b091a0552a) --- doc/build/changelog/unreleased_20/11853.rst | 11 + doc/build/changelog/unreleased_20/11855.rst | 9 + lib/sqlalchemy/orm/bulk_persistence.py | 1 + lib/sqlalchemy/orm/context.py | 15 +- lib/sqlalchemy/orm/query.py | 2 + lib/sqlalchemy/orm/strategies.py | 29 ++- lib/sqlalchemy/orm/strategy_options.py | 2 +- test/orm/dml/test_bulk_statements.py | 211 ++++++++++++++++++++ 8 files changed, 275 insertions(+), 5 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11853.rst create mode 100644 doc/build/changelog/unreleased_20/11855.rst diff --git a/doc/build/changelog/unreleased_20/11853.rst b/doc/build/changelog/unreleased_20/11853.rst new file mode 100644 index 00000000000..92e6abdb680 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11853.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, orm + :tickets: 11853 + + An warning is emitted if :func:`_orm.joinedload` or + :func:`_orm.subqueryload` are used as a top level option against a + statement that is not a SELECT statement, such as with an + ``insert().returning()``. There are no JOINs in INSERT statements nor is + there a "subquery" that can be repurposed for subquery eager loading, and + for UPDATE/DELETE joinedload does not support these either, so it is never + appropriate for this use to pass silently. diff --git a/doc/build/changelog/unreleased_20/11855.rst b/doc/build/changelog/unreleased_20/11855.rst new file mode 100644 index 00000000000..cee30cf8b3a --- /dev/null +++ b/doc/build/changelog/unreleased_20/11855.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 11855 + + Fixed issue where using loader options such as :func:`_orm.selectinload` + with additional criteria in combination with ORM DML such as + :func:`_sql.insert` with RETURNING would not correctly set up internal + contexts required for caching to work correctly, leading to incorrect + results. diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index d07cde85cd1..2a23caad53f 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -619,6 +619,7 @@ def _return_orm_returning( querycontext = QueryContext( compile_state.from_statement_ctx, compile_state.select_statement, + statement, params, session, load_options, diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index c2cb54e191c..6bacd77ebac 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -104,6 +104,7 @@ class QueryContext: "top_level_context", "compile_state", "query", + "user_passed_query", "params", "load_options", "bind_arguments", @@ -148,6 +149,10 @@ def __init__( self, compile_state: CompileState, statement: Union[Select[Any], FromStatement[Any]], + user_passed_query: Union[ + Select[Any], + FromStatement[Any], + ], params: _CoreSingleExecuteParams, session: Session, load_options: Union[ @@ -162,6 +167,13 @@ def __init__( self.bind_arguments = bind_arguments or _EMPTY_DICT self.compile_state = compile_state self.query = statement + + # the query that the end user passed to Session.execute() or similar. + # this is usually the same as .query, except in the bulk_persistence + # routines where a separate FromStatement is manufactured in the + # compile stage; this allows differentiation in that case. + self.user_passed_query = user_passed_query + self.session = session self.loaders_require_buffering = False self.loaders_require_uniquing = False @@ -169,7 +181,7 @@ def __init__( self.top_level_context = load_options._sa_top_level_orm_context cached_options = compile_state.select_statement._with_options - uncached_options = statement._with_options + uncached_options = user_passed_query._with_options # see issue #7447 , #8399 for some background # propagated loader options will be present on loaded InstanceState @@ -578,6 +590,7 @@ def orm_setup_cursor_result( querycontext = QueryContext( compile_state, statement, + statement, params, session, load_options, diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 4f0b4891fd6..5c1a45b5ffa 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -2939,6 +2939,7 @@ def instances( context = QueryContext( compile_state, compile_state.statement, + compile_state.statement, self._params, self.session, self.load_options, @@ -3298,6 +3299,7 @@ def _compile_context(self, for_statement: bool = False) -> QueryContext: context = QueryContext( compile_state, compile_state.statement, + compile_state.statement, self._params, self.session, self.load_options, diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index c6b9a4c7fa9..790ce28e56f 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -1961,6 +1961,18 @@ def create_row_processor( adapter, populators, ): + if ( + loadopt + and context.compile_state.statement is not None + and context.compile_state.statement.is_dml + ): + util.warn_deprecated( + "The subqueryload loader option is not compatible with DML " + "statements such as INSERT, UPDATE. Only SELECT may be used." + "This warning will become an exception in a future release.", + "2.0", + ) + if context.refresh_state: return self._immediateload_create_row_processor( context, @@ -2126,6 +2138,17 @@ def setup_query( if not compile_state.compile_options._enable_eagerloads: return + elif ( + loadopt + and compile_state.statement is not None + and compile_state.statement.is_dml + ): + util.warn_deprecated( + "The joinedload loader option is not compatible with DML " + "statements such as INSERT, UPDATE. Only SELECT may be used." + "This warning will become an exception in a future release.", + "2.0", + ) elif self.uselist: compile_state.multi_row_eager_loaders = True @@ -3152,7 +3175,7 @@ def _load_for_path( orig_query = context.compile_state.select_statement # the actual statement that was requested is this one: - # context_query = context.query + # context_query = context.user_passed_query # # that's not the cached one, however. So while it is of the identical # structure, if it has entities like AliasedInsp, which we get from @@ -3176,11 +3199,11 @@ def _load_for_path( effective_path = path[self.parent_property] - if orig_query is context.query: + if orig_query is context.user_passed_query: new_options = orig_query._with_options else: cached_options = orig_query._with_options - uncached_options = context.query._with_options + uncached_options = context.user_passed_query._with_options # propagate compile state options from the original query, # updating their "extra_criteria" as necessary. diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 974a1ff45f6..b4bfea14726 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1081,7 +1081,7 @@ def _adjust_for_extra_criteria(self, context: QueryContext) -> Load: else: return self - replacement_cache_key = context.query._generate_cache_key() + replacement_cache_key = context.user_passed_query._generate_cache_key() if replacement_cache_key is None: return self diff --git a/test/orm/dml/test_bulk_statements.py b/test/orm/dml/test_bulk_statements.py index 1e5c17c9de4..8c6acf4dec6 100644 --- a/test/orm/dml/test_bulk_statements.py +++ b/test/orm/dml/test_bulk_statements.py @@ -25,13 +25,21 @@ from sqlalchemy.orm import aliased from sqlalchemy.orm import Bundle from sqlalchemy.orm import column_property +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import immediateload +from sqlalchemy.orm import joinedload +from sqlalchemy.orm import lazyload from sqlalchemy.orm import load_only from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import orm_insert_sentinel +from sqlalchemy.orm import relationship +from sqlalchemy.orm import selectinload from sqlalchemy.orm import Session +from sqlalchemy.orm import subqueryload from sqlalchemy.testing import config from sqlalchemy.testing import eq_ +from sqlalchemy.testing import expect_deprecated from sqlalchemy.testing import expect_raises_message from sqlalchemy.testing import expect_warnings from sqlalchemy.testing import fixtures @@ -2298,3 +2306,206 @@ def test_select_from_insert_cte( asserter.assert_( CompiledSQL(expected, [{"param_1": id_, "param_2": "some user"}]) ) + + +class EagerLoadTest( + fixtures.DeclarativeMappedTest, testing.AssertsExecutionResults +): + run_inserts = "each" + + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class A(Base): + __tablename__ = "a" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + cs = relationship("C") + + class B(Base): + __tablename__ = "b" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) + a = relationship("A") + + class C(Base): + __tablename__ = "c" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) + + @classmethod + def insert_data(cls, connection): + A = cls.classes.A + C = cls.classes.C + with Session(connection) as sess: + sess.add_all( + [ + A(id=1, cs=[C(id=1), C(id=2)]), + A(id=2), + A(id=3, cs=[C(id=3), C(id=4)]), + ] + ) + sess.commit() + + @testing.fixture + def fixture_with_loader_opt(self): + def go(lazy): + class Base(DeclarativeBase): + pass + + class A(Base): + __tablename__ = "a" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + + class B(Base): + __tablename__ = "b" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) + a = relationship("A", lazy=lazy) + + return A, B + + return go + + @testing.combinations( + (selectinload,), + (immediateload,), + ) + def test_insert_supported(self, loader): + A, B = self.classes("A", "B") + + sess = fixture_session() + + result = sess.execute( + insert(B).returning(B).options(loader(B.a)), + [ + {"id": 1, "a_id": 1}, + {"id": 2, "a_id": 1}, + {"id": 3, "a_id": 2}, + {"id": 4, "a_id": 3}, + {"id": 5, "a_id": 3}, + ], + ).scalars() + + for b in result: + assert "a" in b.__dict__ + + @testing.combinations( + (joinedload,), + (subqueryload,), + ) + def test_insert_not_supported(self, loader): + """test #11853""" + + A, B = self.classes("A", "B") + + sess = fixture_session() + + stmt = insert(B).returning(B).options(loader(B.a)) + + with expect_deprecated( + f"The {loader.__name__} loader option is not compatible " + "with DML statements", + ): + sess.execute(stmt, [{"id": 1, "a_id": 1}]) + + @testing.combinations( + (joinedload,), + (subqueryload,), + (selectinload,), + (immediateload,), + ) + def test_secondary_opt_ok(self, loader): + A, B = self.classes("A", "B") + + sess = fixture_session() + + opt = selectinload(B.a) + opt = getattr(opt, loader.__name__)(A.cs) + + result = sess.execute( + insert(B).returning(B).options(opt), + [ + {"id": 1, "a_id": 1}, + {"id": 2, "a_id": 1}, + {"id": 3, "a_id": 2}, + {"id": 4, "a_id": 3}, + {"id": 5, "a_id": 3}, + ], + ).scalars() + + for b in result: + assert "a" in b.__dict__ + assert "cs" in b.a.__dict__ + + @testing.combinations( + ("joined",), + ("select",), + ("subquery",), + ("selectin",), + ("immediate",), + argnames="lazy_opt", + ) + def test_insert_handles_implicit(self, fixture_with_loader_opt, lazy_opt): + """test #11853""" + + A, B = fixture_with_loader_opt(lazy_opt) + + sess = fixture_session() + + for b_obj in sess.execute( + insert(B).returning(B), + [ + {"id": 1, "a_id": 1}, + {"id": 2, "a_id": 1}, + {"id": 3, "a_id": 2}, + {"id": 4, "a_id": 3}, + {"id": 5, "a_id": 3}, + ], + ).scalars(): + + if lazy_opt in ("select", "joined", "subquery"): + # these aren't supported by DML + assert "a" not in b_obj.__dict__ + else: + # the other three are + assert "a" in b_obj.__dict__ + + @testing.combinations( + (lazyload,), (selectinload,), (immediateload,), argnames="loader_opt" + ) + @testing.combinations( + (joinedload,), + (subqueryload,), + (selectinload,), + (immediateload,), + (lazyload,), + argnames="secondary_opt", + ) + def test_secondary_w_criteria_caching(self, loader_opt, secondary_opt): + """test #11855""" + A, B, C = self.classes("A", "B", "C") + + for i in range(3): + with fixture_session() as sess: + + opt = loader_opt(B.a) + opt = getattr(opt, secondary_opt.__name__)( + A.cs.and_(C.a_id == 1) + ) + stmt = insert(B).returning(B).options(opt) + + b1 = sess.scalar(stmt, [{"a_id": 1}]) + + eq_({c.id for c in b1.a.cs}, {1, 2}) + + opt = loader_opt(B.a) + opt = getattr(opt, secondary_opt.__name__)( + A.cs.and_(C.a_id == 3) + ) + + stmt = insert(B).returning(B).options(opt) + + b3 = sess.scalar(stmt, [{"a_id": 3}]) + + eq_({c.id for c in b3.a.cs}, {3, 4}) From a6e59c6373468f373742e547f1fb87f60665cdc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Brigitta=20Sip=C5=91cz?= Date: Fri, 6 Sep 2024 02:44:53 -0400 Subject: [PATCH 323/544] MAINT: cleanup the lasts of datetime.utcnow() ### Description I'm chasing some loose datetime.datetime.utcnow() deprecation warning in some test suites, and one of these was seemingly coming from sqlalchemy. It wasn't, but nevertheless these minor cleanup changes may still be found useful. ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed **Have a nice day!** Closes: #11736 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11736 Pull-request-sha: 9bee8af8d1082c3cde5f64c78f1e565ef4ab14cd Change-Id: Ib1b85fa3d66b665165d908e7c8394482b714c57f (cherry picked from commit 6fefae897a576bce9ec74101e3a5ebcda0557c00) --- examples/extending_query/temporal_range.py | 5 ++++- lib/sqlalchemy/orm/events.py | 4 +++- test/orm/test_relationship_criteria.py | 5 ++++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/examples/extending_query/temporal_range.py b/examples/extending_query/temporal_range.py index 50cbb664591..29ea1193623 100644 --- a/examples/extending_query/temporal_range.py +++ b/examples/extending_query/temporal_range.py @@ -5,6 +5,7 @@ """ import datetime +from functools import partial from sqlalchemy import Column from sqlalchemy import create_engine @@ -23,7 +24,9 @@ class HasTemporal: """Mixin that identifies a class as having a timestamp column""" timestamp = Column( - DateTime, default=datetime.datetime.utcnow, nullable=False + DateTime, + default=partial(datetime.datetime.now, datetime.timezone.utc), + nullable=False, ) diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 1cd51bfd017..f2eae852b3c 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -3155,7 +3155,9 @@ def no_deleted(query, update_context): entity = desc['entity'] query = query.filter(entity.deleted == False) - update_context.values['timestamp'] = datetime.utcnow() + update_context.values['timestamp'] = ( + datetime.datetime.now(datetime.UTC) + ) return query The ``.values`` dictionary of the "update context" object can also diff --git a/test/orm/test_relationship_criteria.py b/test/orm/test_relationship_criteria.py index 96c178e5e22..29720f7dc86 100644 --- a/test/orm/test_relationship_criteria.py +++ b/test/orm/test_relationship_criteria.py @@ -1,6 +1,7 @@ from __future__ import annotations import datetime +from functools import partial import random from typing import List @@ -1661,7 +1662,9 @@ class HasTemporal: """Mixin that identifies a class as having a timestamp column""" timestamp = Column( - DateTime, default=datetime.datetime.utcnow, nullable=False + DateTime, + default=partial(datetime.datetime.now, datetime.timezone.utc), + nullable=False, ) cls.HasTemporal = HasTemporal From 9dc96cef939fdf44918641bd1575ba2ccb09c8b1 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 5 Sep 2024 07:29:47 -0400 Subject: [PATCH 324/544] Fix use of typing.Literal on Python 3.8 and 3.9 Fixed issue where it was not possible to use ``typing.Literal`` with ``Mapped[]`` on Python 3.8 and 3.9. Pull request courtesy Frazer McLean. Closes: #11825 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11825 Pull-request-sha: e1e50a97d2a6e0e9ef7ba8dc1a5f07d252e79fa4 Change-Id: Idf04326abcba45813ad555127e81d581a0353587 (cherry picked from commit 509c6d58501679a5844631faf9d7cb751218d7a0) --- doc/build/changelog/unreleased_20/11820.rst | 6 ++++++ lib/sqlalchemy/util/typing.py | 9 +++++++- .../test_tm_future_annotations_sync.py | 21 +++++++++++++++++++ test/orm/declarative/test_typed_mapping.py | 21 +++++++++++++++++++ 4 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11820.rst diff --git a/doc/build/changelog/unreleased_20/11820.rst b/doc/build/changelog/unreleased_20/11820.rst new file mode 100644 index 00000000000..ae03040a65f --- /dev/null +++ b/doc/build/changelog/unreleased_20/11820.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, orm, typing + :tickets: 11814 + + Fixed issue where it was not possible to use ``typing.Literal`` with + ``Mapped[]`` on Python 3.8 and 3.9. Pull request courtesy Frazer McLean. diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 53e3c1732c7..a3df9777054 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -62,6 +62,13 @@ _VT = TypeVar("_VT") _VT_co = TypeVar("_VT_co", covariant=True) +if compat.py38: + # typing_extensions.Literal is different from typing.Literal until + # Python 3.10.1 + _LITERAL_TYPES = frozenset([typing.Literal, Literal]) +else: + _LITERAL_TYPES = frozenset([Literal]) + if compat.py310: # why they took until py310 to put this in stdlib is beyond me, @@ -356,7 +363,7 @@ def is_non_string_iterable(obj: Any) -> TypeGuard[Iterable[Any]]: def is_literal(type_: _AnnotationScanType) -> bool: - return get_origin(type_) is Literal + return get_origin(type_) in _LITERAL_TYPES def is_newtype(type_: Optional[_AnnotationScanType]) -> TypeGuard[NewType]: diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index f8c6a380a53..a58da96c151 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -29,6 +29,7 @@ from typing import Union import uuid +import typing_extensions from typing_extensions import get_args as get_args from typing_extensions import Literal as Literal from typing_extensions import TypeAlias as TypeAlias @@ -119,6 +120,10 @@ class _SomeDict2(TypedDict): _Recursive695_1: TypeAlias = _Recursive695_0 _Recursive695_2: TypeAlias = _Recursive695_1 +if compat.py38: + _TypingLiteral = typing.Literal["a", "b"] +_TypingExtensionsLiteral = typing_extensions.Literal["a", "b"] + if compat.py312: exec( """ @@ -897,6 +902,22 @@ class Foo(decl_base): eq_(col.type.enums, ["to-do", "in-progress", "done"]) is_(col.type.native_enum, False) + @testing.requires.python38 + def test_typing_literal_identity(self, decl_base): + """See issue #11820""" + + class Foo(decl_base): + __tablename__ = "footable" + + id: Mapped[int] = mapped_column(primary_key=True) + t: Mapped[_TypingLiteral] + te: Mapped[_TypingExtensionsLiteral] + + for col in (Foo.__table__.c.t, Foo.__table__.c.te): + is_true(isinstance(col.type, Enum)) + eq_(col.type.enums, ["a", "b"]) + is_(col.type.native_enum, False) + @testing.requires.python310 def test_we_got_all_attrs_test_annotated(self): argnames = _py_inspect.getfullargspec(mapped_column) diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 0213a0db3b0..ffa83aec25d 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -20,6 +20,7 @@ from typing import Union import uuid +import typing_extensions from typing_extensions import get_args as get_args from typing_extensions import Literal as Literal from typing_extensions import TypeAlias as TypeAlias @@ -110,6 +111,10 @@ class _SomeDict2(TypedDict): _Recursive695_1: TypeAlias = _Recursive695_0 _Recursive695_2: TypeAlias = _Recursive695_1 +if compat.py38: + _TypingLiteral = typing.Literal["a", "b"] +_TypingExtensionsLiteral = typing_extensions.Literal["a", "b"] + if compat.py312: exec( """ @@ -888,6 +893,22 @@ class Foo(decl_base): eq_(col.type.enums, ["to-do", "in-progress", "done"]) is_(col.type.native_enum, False) + @testing.requires.python38 + def test_typing_literal_identity(self, decl_base): + """See issue #11820""" + + class Foo(decl_base): + __tablename__ = "footable" + + id: Mapped[int] = mapped_column(primary_key=True) + t: Mapped[_TypingLiteral] + te: Mapped[_TypingExtensionsLiteral] + + for col in (Foo.__table__.c.t, Foo.__table__.c.te): + is_true(isinstance(col.type, Enum)) + eq_(col.type.enums, ["a", "b"]) + is_(col.type.native_enum, False) + @testing.requires.python310 def test_we_got_all_attrs_test_annotated(self): argnames = _py_inspect.getfullargspec(mapped_column) From afea6af62e100ddfaa10ad9bcdbb7c5a3659c870 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 13 Sep 2024 21:57:47 +0200 Subject: [PATCH 325/544] Fix wrong ticket reference in changelog for #11820 (#11867) (cherry picked from commit 5959648abc0ff71e38cc12da0261833ea565c154) --- doc/build/changelog/unreleased_20/11820.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/changelog/unreleased_20/11820.rst b/doc/build/changelog/unreleased_20/11820.rst index ae03040a65f..3f76d30bee0 100644 --- a/doc/build/changelog/unreleased_20/11820.rst +++ b/doc/build/changelog/unreleased_20/11820.rst @@ -1,6 +1,6 @@ .. change:: :tags: bug, orm, typing - :tickets: 11814 + :tickets: 11820 Fixed issue where it was not possible to use ``typing.Literal`` with ``Mapped[]`` on Python 3.8 and 3.9. Pull request courtesy Frazer McLean. From 895f249503a267f795b6013ae4e17edd18b9dd4e Mon Sep 17 00:00:00 2001 From: Studnikov Dmitry Date: Fri, 13 Sep 2024 23:01:26 +0300 Subject: [PATCH 326/544] Fix subquery typos in documentation and changelog (#11807) * fix: subquery typo * fix: subquery typo in changelog --------- Co-authored-by: Dmitry Studnikov (cherry picked from commit 08c916df4ec21b6929d90b90eb3cfe50343f2260) --- doc/build/changelog/changelog_14.rst | 4 ++-- doc/build/orm/queryguide/select.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/build/changelog/changelog_14.rst b/doc/build/changelog/changelog_14.rst index f5a69b2fb68..1c41c586c47 100644 --- a/doc/build/changelog/changelog_14.rst +++ b/doc/build/changelog/changelog_14.rst @@ -2473,7 +2473,7 @@ This document details individual issue-level changes made throughout it were only against a table that's now being replaced. It also allows for correct behavior when constructing a :func:`_orm.aliased` without a selectable argument against a :func:`_orm.aliased` that's against a - subuquery, to create an alias of that subquery (i.e. to change its name). + subquery, to create an alias of that subquery (i.e. to change its name). The nesting behavior of :func:`_orm.aliased` remains in place for the case where the outer :func:`_orm.aliased` object is against a subquery which in @@ -5057,7 +5057,7 @@ This document details individual issue-level changes made throughout columns clause of a :class:`_sql.Select` construct, which is better handled by using a :func:`_sql.literal_column` construct, would nonetheless prevent constructs like :func:`_sql.union` from working correctly. Other use cases, - such as constructing subuqeries, continue to work the same as in prior + such as constructing subqueries, continue to work the same as in prior versions where the :func:`_sql.text` construct is silently omitted from the collection of exported columns. Also repairs similar use within the ORM. diff --git a/doc/build/orm/queryguide/select.rst b/doc/build/orm/queryguide/select.rst index 678565932dd..a8b273a62dc 100644 --- a/doc/build/orm/queryguide/select.rst +++ b/doc/build/orm/queryguide/select.rst @@ -360,7 +360,7 @@ Selecting Entities from Subqueries ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The :func:`_orm.aliased` construct discussed in the previous section -can be used with any :class:`_sql.Subuqery` construct that comes from a +can be used with any :class:`_sql.Subquery` construct that comes from a method such as :meth:`_sql.Select.subquery` to link ORM entities to the columns returned by that subquery; there must be a **column correspondence** relationship between the columns delivered by the subquery and the columns @@ -721,7 +721,7 @@ Joining to Subqueries ^^^^^^^^^^^^^^^^^^^^^ The target of a join may be any "selectable" entity which includes -subuqeries. When using the ORM, it is typical +subqueries. When using the ORM, it is typical that these targets are stated in terms of an :func:`_orm.aliased` construct, but this is not strictly required, particularly if the joined entity is not being returned in the results. For example, to join from the From de98dbf932424fe47230e4ec447848b56072a3b1 Mon Sep 17 00:00:00 2001 From: Vitalii Fuglaev Date: Fri, 13 Sep 2024 23:03:04 +0300 Subject: [PATCH 327/544] Update index.rst (#11799) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 08/28/2024 была обновлена версия sqlalchemy-greenplum в PyPI и сейчас она поддерживает SQLAlchemy 2.0 и выше (cherry picked from commit 3fbef3b6755503a8369670e4d9439379a52eba8e) --- doc/build/dialects/index.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 1a230481961..676886f259e 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -99,7 +99,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Google Sheets | gsheets_ | +------------------------------------------------+---------------------------------------+ -| Greenplum [2]_ | sqlalchemy-greenplum_ | +| Greenplum | sqlalchemy-greenplum_ | +------------------------------------------------+---------------------------------------+ | IBM DB2 and Informix | ibm-db-sa_ | +------------------------------------------------+---------------------------------------+ @@ -133,7 +133,6 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ .. [1] Supports version 1.3.x only at the moment. -.. [2] Supports version 1.4.x only at the moment. .. _openGauss-sqlalchemy: https://gitee.com/opengauss/openGauss-sqlalchemy .. _rockset-sqlalchemy: https://pypi.org/project/rockset-sqlalchemy From 2552801d2c9f6b906cb8f13f2f5061de4383476b Mon Sep 17 00:00:00 2001 From: Tobias Petersen Date: Fri, 13 Sep 2024 14:34:33 -0400 Subject: [PATCH 328/544] Merge url query args to opts in mariadbconnector like mysqldb Fixed issue in mariadbconnector dialect where query string arguments that weren't checked integer or boolean arguments would be ignored, such as string arguments like ``unix_socket``, etc. As part of this change, the argument parsing for particular elements such as ``client_flags``, ``compress``, ``local_infile`` has been made more consistent across all MySQL / MariaDB dialect which accept each argument. Pull request courtesy Tobias Alex-Petersen. Fixes: #11870 Closes: #11869 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11869 Pull-request-sha: 8fdcabc83b548e3fc19aa1625035d43ebc0e1875 Change-Id: I3a11a0e65e118c94928027478409488b0d5e94f8 (cherry picked from commit 5e16d25cc7c32e6cfaea44ceec5a2730d766952c) --- doc/build/changelog/unreleased_20/11870.rst | 12 ++++++ .../dialects/mysql/mariadbconnector.py | 2 + .../dialects/mysql/mysqlconnector.py | 1 + lib/sqlalchemy/dialects/mysql/mysqldb.py | 2 +- lib/sqlalchemy/dialects/mysql/provision.py | 3 ++ test/dialect/mysql/test_dialect.py | 39 ++++++++++++++----- 6 files changed, 49 insertions(+), 10 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11870.rst diff --git a/doc/build/changelog/unreleased_20/11870.rst b/doc/build/changelog/unreleased_20/11870.rst new file mode 100644 index 00000000000..9625a20f8c8 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11870.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: bug, mysql + :tickets: 11870 + + Fixed issue in mariadbconnector dialect where query string arguments that + weren't checked integer or boolean arguments would be ignored, such as + string arguments like ``unix_socket``, etc. As part of this change, the + argument parsing for particular elements such as ``client_flags``, + ``compress``, ``local_infile`` has been made more consistent across all + MySQL / MariaDB dialect which accept each argument. Pull request courtesy + Tobias Alex-Petersen. + diff --git a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py index 9bb3fa4d75c..1730c1a6f29 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py @@ -165,6 +165,7 @@ def is_disconnect(self, e, connection, cursor): def create_connect_args(self, url): opts = url.translate_connect_args() + opts.update(url.query) int_params = [ "connect_timeout", @@ -179,6 +180,7 @@ def create_connect_args(self, url): "ssl_verify_cert", "ssl", "pool_reset_connection", + "compress", ] for key in int_params: diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index b1523392d8c..8f4b4174184 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -96,6 +96,7 @@ def create_connect_args(self, url): util.coerce_kw_type(opts, "allow_local_infile", bool) util.coerce_kw_type(opts, "autocommit", bool) util.coerce_kw_type(opts, "buffered", bool) + util.coerce_kw_type(opts, "client_flag", int) util.coerce_kw_type(opts, "compress", bool) util.coerce_kw_type(opts, "connection_timeout", int) util.coerce_kw_type(opts, "connect_timeout", int) diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index 0c632b66f3e..0baf10f7056 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -212,7 +212,7 @@ def create_connect_args(self, url, _translate_args=None): util.coerce_kw_type(opts, "read_timeout", int) util.coerce_kw_type(opts, "write_timeout", int) util.coerce_kw_type(opts, "client_flag", int) - util.coerce_kw_type(opts, "local_infile", int) + util.coerce_kw_type(opts, "local_infile", bool) # Note: using either of the below will cause all strings to be # returned as Unicode, both in raw SQL operations and with column # types like String and MSString. diff --git a/lib/sqlalchemy/dialects/mysql/provision.py b/lib/sqlalchemy/dialects/mysql/provision.py index 3f05bcee74d..836ffa1df43 100644 --- a/lib/sqlalchemy/dialects/mysql/provision.py +++ b/lib/sqlalchemy/dialects/mysql/provision.py @@ -40,6 +40,9 @@ def generate_driver_url(url, driver, query_str): drivername="%s+%s" % (backend, driver) ).update_query_string(query_str) + if driver == "mariadbconnector": + new_url = new_url.difference_update_query(["charset"]) + try: new_url.get_dialect() except exc.NoSuchModuleError: diff --git a/test/dialect/mysql/test_dialect.py b/test/dialect/mysql/test_dialect.py index c50755df414..cf74f17ad66 100644 --- a/test/dialect/mysql/test_dialect.py +++ b/test/dialect/mysql/test_dialect.py @@ -257,21 +257,40 @@ def test_ssl_arguments(self, driver_name): ("read_timeout", 30), ("write_timeout", 30), ("client_flag", 1234), - ("local_infile", 1234), + ("local_infile", 1), + ("local_infile", True), + ("local_infile", False), ("use_unicode", False), ("charset", "hello"), + ("unix_socket", "somesocket"), + argnames="kwarg, value", ) - def test_normal_arguments_mysqldb(self, kwarg, value): - from sqlalchemy.dialects.mysql import mysqldb + @testing.combinations( + ("mysql+mysqldb", ()), + ("mysql+mariadbconnector", {"use_unicode", "charset"}), + ("mariadb+mariadbconnector", {"use_unicode", "charset"}), + ("mysql+pymysql", ()), + ( + "mysql+mysqlconnector", + {"read_timeout", "write_timeout", "local_infile"}, + ), + argnames="dialect_name,skip", + ) + def test_query_arguments(self, kwarg, value, dialect_name, skip): - dialect = mysqldb.dialect() - connect_args = dialect.create_connect_args( - make_url( - "mysql+mysqldb://scott:tiger@localhost:3306/test" - "?%s=%s" % (kwarg, value) - ) + if kwarg in skip: + return + + url_value = {True: "true", False: "false"}.get(value, value) + + url = make_url( + f"{dialect_name}://scott:tiger@" + f"localhost:3306/test?{kwarg}={url_value}" ) + dialect = url.get_dialect()() + + connect_args = dialect.create_connect_args(url) eq_(connect_args[1][kwarg], value) def test_mysqlconnector_buffered_arg(self): @@ -320,8 +339,10 @@ def test_mysqlconnector_raise_on_warnings_arg(self): [ "mysql+mysqldb", "mysql+pymysql", + "mysql+mariadbconnector", "mariadb+mysqldb", "mariadb+pymysql", + "mariadb+mariadbconnector", ] ) def test_random_arg(self): From 5cf0f06480c9074405a0de72264dc5b59617ad08 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 16 Sep 2024 13:43:30 -0400 Subject: [PATCH 329/544] require insert_returning for new EagerLoadTest DML suite Change-Id: I354e3ba68ba6efaab6618e514d11355d72652bb5 (cherry picked from commit 52385c44d2bcdc9986d4ec8691cf72342b170dcd) --- test/orm/dml/test_bulk_statements.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/orm/dml/test_bulk_statements.py b/test/orm/dml/test_bulk_statements.py index 8c6acf4dec6..431eb3076fc 100644 --- a/test/orm/dml/test_bulk_statements.py +++ b/test/orm/dml/test_bulk_statements.py @@ -2312,6 +2312,7 @@ class EagerLoadTest( fixtures.DeclarativeMappedTest, testing.AssertsExecutionResults ): run_inserts = "each" + __requires__ = ("insert_returning",) @classmethod def setup_classes(cls): From ca501d9d38d50dc4eda52c82ebc9c51766bc141b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 16 Sep 2024 16:01:26 -0400 Subject: [PATCH 330/544] - 2.0.35 --- doc/build/changelog/changelog_20.rst | 79 ++++++++++++++++++++- doc/build/changelog/unreleased_20/11820.rst | 6 -- doc/build/changelog/unreleased_20/11840.rst | 20 ------ doc/build/changelog/unreleased_20/11849.rst | 13 ---- doc/build/changelog/unreleased_20/11853.rst | 11 --- doc/build/changelog/unreleased_20/11855.rst | 9 --- doc/build/changelog/unreleased_20/11870.rst | 12 ---- doc/build/conf.py | 4 +- 8 files changed, 80 insertions(+), 74 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11820.rst delete mode 100644 doc/build/changelog/unreleased_20/11840.rst delete mode 100644 doc/build/changelog/unreleased_20/11849.rst delete mode 100644 doc/build/changelog/unreleased_20/11853.rst delete mode 100644 doc/build/changelog/unreleased_20/11855.rst delete mode 100644 doc/build/changelog/unreleased_20/11870.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index cca32ca1faf..e282c022173 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,84 @@ .. changelog:: :version: 2.0.35 - :include_notes_from: unreleased_20 + :released: September 16, 2024 + + .. change:: + :tags: bug, orm, typing + :tickets: 11820 + + Fixed issue where it was not possible to use ``typing.Literal`` with + ``Mapped[]`` on Python 3.8 and 3.9. Pull request courtesy Frazer McLean. + + .. change:: + :tags: bug, sqlite, regression + :tickets: 11840 + + The changes made for SQLite CHECK constraint reflection in versions 2.0.33 + and 2.0.34 , :ticket:`11832` and :ticket:`11677`, have now been fully + reverted, as users continued to identify existing use cases that stopped + working after this change. For the moment, because SQLite does not + provide any consistent way of delivering information about CHECK + constraints, SQLAlchemy is limited in what CHECK constraint syntaxes can be + reflected, including that a CHECK constraint must be stated all on a + single, independent line (or inline on a column definition) without + newlines, tabs in the constraint definition or unusual characters in the + constraint name. Overall, reflection for SQLite is tailored towards being + able to reflect CREATE TABLE statements that were originally created by + SQLAlchemy DDL constructs. Long term work on a DDL parser that does not + rely upon regular expressions may eventually improve upon this situation. + A wide range of additional cross-dialect CHECK constraint reflection tests + have been added as it was also a bug that these changes did not trip any + existing tests. + + .. change:: + :tags: orm, bug + :tickets: 11849 + + Fixed issue in ORM evaluator where two datatypes being evaluated with the + SQL concatenator operator would not be checked for + :class:`.UnevaluatableError` based on their datatype; this missed the case + of :class:`_postgresql.JSONB` values being used in a concatenate operation + which is supported by PostgreSQL as well as how SQLAlchemy renders the SQL + for this operation, but does not work at the Python level. By implementing + :class:`.UnevaluatableError` for this combination, ORM update statements + will now fall back to "expire" when a concatenated JSON value used in a SET + clause is to be synchronized to a Python object. + + .. change:: + :tags: bug, orm + :tickets: 11853 + + An warning is emitted if :func:`_orm.joinedload` or + :func:`_orm.subqueryload` are used as a top level option against a + statement that is not a SELECT statement, such as with an + ``insert().returning()``. There are no JOINs in INSERT statements nor is + there a "subquery" that can be repurposed for subquery eager loading, and + for UPDATE/DELETE joinedload does not support these either, so it is never + appropriate for this use to pass silently. + + .. change:: + :tags: bug, orm + :tickets: 11855 + + Fixed issue where using loader options such as :func:`_orm.selectinload` + with additional criteria in combination with ORM DML such as + :func:`_sql.insert` with RETURNING would not correctly set up internal + contexts required for caching to work correctly, leading to incorrect + results. + + .. change:: + :tags: bug, mysql + :tickets: 11870 + + Fixed issue in mariadbconnector dialect where query string arguments that + weren't checked integer or boolean arguments would be ignored, such as + string arguments like ``unix_socket``, etc. As part of this change, the + argument parsing for particular elements such as ``client_flags``, + ``compress``, ``local_infile`` has been made more consistent across all + MySQL / MariaDB dialect which accept each argument. Pull request courtesy + Tobias Alex-Petersen. + .. changelog:: :version: 2.0.34 diff --git a/doc/build/changelog/unreleased_20/11820.rst b/doc/build/changelog/unreleased_20/11820.rst deleted file mode 100644 index 3f76d30bee0..00000000000 --- a/doc/build/changelog/unreleased_20/11820.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, orm, typing - :tickets: 11820 - - Fixed issue where it was not possible to use ``typing.Literal`` with - ``Mapped[]`` on Python 3.8 and 3.9. Pull request courtesy Frazer McLean. diff --git a/doc/build/changelog/unreleased_20/11840.rst b/doc/build/changelog/unreleased_20/11840.rst deleted file mode 100644 index 42074e3d2b3..00000000000 --- a/doc/build/changelog/unreleased_20/11840.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. change:: - :tags: bug, sqlite, regression - :tickets: 11840 - - The changes made for SQLite CHECK constraint reflection in versions 2.0.33 - and 2.0.34 , :ticket:`11832` and :ticket:`11677`, have now been fully - reverted, as users continued to identify existing use cases that stopped - working after this change. For the moment, because SQLite does not - provide any consistent way of delivering information about CHECK - constraints, SQLAlchemy is limited in what CHECK constraint syntaxes can be - reflected, including that a CHECK constraint must be stated all on a - single, independent line (or inline on a column definition) without - newlines, tabs in the constraint definition or unusual characters in the - constraint name. Overall, reflection for SQLite is tailored towards being - able to reflect CREATE TABLE statements that were originally created by - SQLAlchemy DDL constructs. Long term work on a DDL parser that does not - rely upon regular expressions may eventually improve upon this situation. - A wide range of additional cross-dialect CHECK constraint reflection tests - have been added as it was also a bug that these changes did not trip any - existing tests. diff --git a/doc/build/changelog/unreleased_20/11849.rst b/doc/build/changelog/unreleased_20/11849.rst deleted file mode 100644 index 4a274702ecb..00000000000 --- a/doc/build/changelog/unreleased_20/11849.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. change:: - :tags: orm, bug - :tickets: 11849 - - Fixed issue in ORM evaluator where two datatypes being evaluated with the - SQL concatenator operator would not be checked for - :class:`.UnevaluatableError` based on their datatype; this missed the case - of :class:`_postgresql.JSONB` values being used in a concatenate operation - which is supported by PostgreSQL as well as how SQLAlchemy renders the SQL - for this operation, but does not work at the Python level. By implementing - :class:`.UnevaluatableError` for this combination, ORM update statements - will now fall back to "expire" when a concatenated JSON value used in a SET - clause is to be synchronized to a Python object. diff --git a/doc/build/changelog/unreleased_20/11853.rst b/doc/build/changelog/unreleased_20/11853.rst deleted file mode 100644 index 92e6abdb680..00000000000 --- a/doc/build/changelog/unreleased_20/11853.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11853 - - An warning is emitted if :func:`_orm.joinedload` or - :func:`_orm.subqueryload` are used as a top level option against a - statement that is not a SELECT statement, such as with an - ``insert().returning()``. There are no JOINs in INSERT statements nor is - there a "subquery" that can be repurposed for subquery eager loading, and - for UPDATE/DELETE joinedload does not support these either, so it is never - appropriate for this use to pass silently. diff --git a/doc/build/changelog/unreleased_20/11855.rst b/doc/build/changelog/unreleased_20/11855.rst deleted file mode 100644 index cee30cf8b3a..00000000000 --- a/doc/build/changelog/unreleased_20/11855.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11855 - - Fixed issue where using loader options such as :func:`_orm.selectinload` - with additional criteria in combination with ORM DML such as - :func:`_sql.insert` with RETURNING would not correctly set up internal - contexts required for caching to work correctly, leading to incorrect - results. diff --git a/doc/build/changelog/unreleased_20/11870.rst b/doc/build/changelog/unreleased_20/11870.rst deleted file mode 100644 index 9625a20f8c8..00000000000 --- a/doc/build/changelog/unreleased_20/11870.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 11870 - - Fixed issue in mariadbconnector dialect where query string arguments that - weren't checked integer or boolean arguments would be ignored, such as - string arguments like ``unix_socket``, etc. As part of this change, the - argument parsing for particular elements such as ``client_flags``, - ``compress``, ``local_infile`` has been made more consistent across all - MySQL / MariaDB dialect which accept each argument. Pull request courtesy - Tobias Alex-Petersen. - diff --git a/doc/build/conf.py b/doc/build/conf.py index a21225dded7..6e8a9ee9432 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.34" +release = "2.0.35" -release_date = "September 4, 2024" +release_date = "September 16, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 82d6f250ddaea55a58ee327a9464b9aecb653f3e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 16 Sep 2024 16:30:25 -0400 Subject: [PATCH 331/544] Version 2.0.36 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index e282c022173..0270cee9998 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.36 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.35 :released: September 16, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index d28421a9514..f94278fe927 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.35" +__version__ = "2.0.36" def __go(lcls: Any) -> None: From d0a982ca59c8693f9dfbc2ebf6d999e760440f5b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 20:02:26 +0200 Subject: [PATCH 332/544] Bump pypa/cibuildwheel from 2.20.0 to 2.21.1 (#11885) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.20.0 to 2.21.1. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.20.0...v2.21.1) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit ea4dcdd3e7a42b16ddeabc78b4f455e37ecdbe7c) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 7fbc8e9ea21..5f31d230d05 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.20.0 + uses: pypa/cibuildwheel@v2.21.1 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From 819d2438b3e90d3e758000079d0f98e049586d98 Mon Sep 17 00:00:00 2001 From: Yunus Koning Date: Tue, 17 Sep 2024 20:05:09 +0200 Subject: [PATCH 333/544] update MonetDB dialect information (#11884) (cherry picked from commit 8da20140fe2d57584211d85de589cbce7172a2da) --- doc/build/dialects/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 676886f259e..449004154fc 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -113,7 +113,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Microsoft SQL Server (via turbodbc) | sqlalchemy-turbodbc_ | +------------------------------------------------+---------------------------------------+ -| MonetDB [1]_ | sqlalchemy-monetdb_ | +| MonetDB | sqlalchemy-monetdb_ | +------------------------------------------------+---------------------------------------+ | OpenGauss | openGauss-sqlalchemy_ | +------------------------------------------------+---------------------------------------+ @@ -148,7 +148,7 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _sqlalchemy-solr: https://github.com/aadel/sqlalchemy-solr .. _sqlalchemy_exasol: https://github.com/blue-yonder/sqlalchemy_exasol .. _sqlalchemy-sqlany: https://github.com/sqlanywhere/sqlalchemy-sqlany -.. _sqlalchemy-monetdb: https://github.com/gijzelaerr/sqlalchemy-monetdb +.. _sqlalchemy-monetdb: https://github.com/MonetDB/sqlalchemy-monetdb .. _snowflake-sqlalchemy: https://github.com/snowflakedb/snowflake-sqlalchemy .. _sqlalchemy-pytds: https://pypi.org/project/sqlalchemy-pytds/ .. _sqlalchemy-cratedb: https://github.com/crate/sqlalchemy-cratedb From 888f1bb163dcf2ade162a3468a372448702e82eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Brigitta=20Sip=C5=91cz?= Date: Fri, 13 Sep 2024 16:01:04 -0400 Subject: [PATCH 334/544] MAINT: pytest doesn't have any more python 3.12 deprecations ### Description I believe this workaround is not needed any more, pytest doesn't have any more python deprecations. (Arguably neither 3.13) ### Checklist This pull request is: - [x] Tweek to the (testing) infrastructure - New category as I didn't think this PR fit in any of the 3 options offered. - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #11838 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11838 Pull-request-sha: 7d500de8707cb6d89ba59922d0671afc4062de82 Change-Id: I5fef8ee799fa2dbed1d00f5a779d6b25ce3e5ae8 (cherry picked from commit ffb470cf54c7593e02569102665642be4b85bec2) --- tox.ini | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tox.ini b/tox.ini index 37dd8f282f8..26254801d72 100644 --- a/tox.ini +++ b/tox.ini @@ -101,11 +101,7 @@ setenv= PYTEST_COLOR={tty:--color=yes} MYPY_COLOR={tty:--color-output} - # pytest 'rewrite' is hitting lots of deprecation warnings under py312 and - # i can't find any way to ignore those warnings, so this turns it off - py312: PYTEST_ARGS=--assert plain - - BASECOMMAND=python -m pytest {env:PYTEST_ARGS} {env:PYTEST_COLOR} --rootdir {toxinidir} --log-info=sqlalchemy.testing + BASECOMMAND=python -m pytest {env:PYTEST_COLOR} --rootdir {toxinidir} --log-info=sqlalchemy.testing WORKERS={env:TOX_WORKERS:-n4 --max-worker-restart=5} From 3919acdffd90c6513dbe07e69896e6491f28b56d Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 17 Sep 2024 20:22:11 +0200 Subject: [PATCH 335/544] Remove test warning in python 3.13 Change-Id: Ib098754ef6d157e8dd1eac32b3cb114a9ca66e4a (cherry picked from commit 0cca754f2101cf0e63f0c67b0220e7a4eb3a0f9c) --- lib/sqlalchemy/util/__init__.py | 1 + lib/sqlalchemy/util/compat.py | 1 + test/ext/test_extendedattr.py | 3 ++- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index 69424e7ccb6..33b7addb668 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -65,6 +65,7 @@ from .compat import py310 as py310 from .compat import py311 as py311 from .compat import py312 as py312 +from .compat import py313 as py313 from .compat import py38 as py38 from .compat import py39 as py39 from .compat import pypy as pypy diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index fea881e730a..7620a364591 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -32,6 +32,7 @@ from typing import TypeVar +py313 = sys.version_info >= (3, 13) py312 = sys.version_info >= (3, 12) py311 = sys.version_info >= (3, 11) py310 = sys.version_info >= (3, 10) diff --git a/test/ext/test_extendedattr.py b/test/ext/test_extendedattr.py index 41637c358e5..47756c94958 100644 --- a/test/ext/test_extendedattr.py +++ b/test/ext/test_extendedattr.py @@ -169,7 +169,8 @@ def __sa_instrumentation_manager__(cls): ) # This proves SA can handle a class with non-string dict keys - if util.cpython: + # Since python 3.13 non-string key raise a runtime warning. + if util.cpython and not util.py313: locals()[42] = 99 # Don't remove this line! def __init__(self, **kwargs): From 1efc34bd3993851b29eede08e5d40b02dd9321e5 Mon Sep 17 00:00:00 2001 From: am-kinetica <85610855+am-kinetica@users.noreply.github.com> Date: Fri, 20 Sep 2024 02:16:11 +0530 Subject: [PATCH 336/544] Updated link for the Kinetica dialect (#11895) (cherry picked from commit a32b917a8836fe0670385c2d11aff58589aefd9c) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 449004154fc..f35d0b026dd 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -107,6 +107,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Impala | impyla_ | +------------------------------------------------+---------------------------------------+ +| Kinetica | sqlalchemy-kinetica_ | ++------------------------------------------------+---------------------------------------+ | Microsoft Access (via pyodbc) | sqlalchemy-access_ | +------------------------------------------------+---------------------------------------+ | Microsoft SQL Server (via python-tds) | sqlalchemy-pytds_ | @@ -168,3 +170,4 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _sqlalchemy-greenplum: https://github.com/PlaidCloud/sqlalchemy-greenplum .. _databricks: https://docs.databricks.com/en/dev-tools/sqlalchemy.html .. _clickhouse-sqlalchemy: https://pypi.org/project/clickhouse-sqlalchemy/ +.. _sqlalchemy-kinetica: https://github.com/kineticadb/sqlalchemy-kinetica/ From e526b53bb0d1c77572d01e485b4a3bf3b56c4681 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 5 Aug 2024 23:26:13 +0200 Subject: [PATCH 337/544] add python 3.13 to pipeline Change-Id: Id223cfa08b187c2225ea7a6c29817d79474acfc1 (cherry picked from commit 19f145d7f614372b740e7de8dd3ac54e279eb061) (cherry picked from commit 767e5bf1570684e76e58f5d1b30c9ce158faf88b) --- .github/workflows/create-wheels.yaml | 8 ++++---- .github/workflows/run-test.yaml | 28 +++++++++++++++------------- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 5f31d230d05..4f7830e3512 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -20,9 +20,9 @@ jobs: matrix: # emulated wheels on linux take too much time, split wheels into multiple runs python: - - "cp37-* cp38-*" - - "cp39-* cp310-*" - - "cp311-* cp312-*" + - "cp37-* cp38-* cp39-*" + - "cp310-* cp311-*" + - "cp312-* cp313-*" wheel_mode: - compiled os: @@ -85,7 +85,7 @@ jobs: - name: Set up Python for twine and pure-python wheel uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" - name: Build pure-python wheel if: ${{ matrix.wheel_mode == 'pure-python' && runner.os == 'Linux' }} diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index 1d384294a0a..85272a21a6f 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -37,7 +37,8 @@ jobs: - "3.10" - "3.11" - "3.12" - - "pypy-3.9" + - "3.13.0-alpha - 3.13" + - "pypy-3.10" build-type: - "cext" - "nocext" @@ -48,7 +49,7 @@ jobs: include: # autocommit tests fail on the ci for some reason - - python-version: "pypy-3.9" + - python-version: "pypy-3.10" pytest-args: "-k 'not test_autocommit_on and not test_turn_autocommit_off_via_default_iso_level and not test_autocommit_isolation_level'" - os: "ubuntu-latest" pytest-args: "--dbdriver pysqlite --dbdriver aiosqlite" @@ -80,10 +81,10 @@ jobs: - os: "macos-13" architecture: x86 # pypy does not have cext or x86 - - python-version: "pypy-3.9" + - python-version: "pypy-3.10" build-type: "cext" - os: "windows-latest" - python-version: "pypy-3.9" + python-version: "pypy-3.10" architecture: x86 fail-fast: false @@ -113,7 +114,7 @@ jobs: - name: Run tests run: tox -e github-${{ matrix.build-type }} -- -q --nomemory --notimingintensive ${{ matrix.pytest-args }} - continue-on-error: ${{ matrix.python-version == 'pypy-3.9' }} + continue-on-error: ${{ matrix.python-version == 'pypy-3.10' }} run-test-arm64: # Hopefully something native can be used at some point https://github.blog/changelog/2023-10-30-accelerate-your-ci-cd-with-arm-based-hosted-runners-in-github-actions/ @@ -127,6 +128,8 @@ jobs: - cp39-cp39 - cp310-cp310 - cp311-cp311 + - cp312-cp312 + - cp313-cp313 build-type: - "cext" - "nocext" @@ -167,19 +170,18 @@ jobs: - "3.9" - "3.10" - "3.11" + - "3.12" + - "3.13.0-alpha - 3.13" tox-env: - mypy - - lint - pep484 - exclude: - # run lint only on 3.11 - - tox-env: lint - python-version: "3.8" - - tox-env: lint - python-version: "3.9" + include: + # run lint only on 3.12 - tox-env: lint - python-version: "3.10" + python-version: "3.12" + os: "ubuntu-latest" + exclude: # run pep484 only on 3.10+ - tox-env: pep484 python-version: "3.8" From adaeda9c8a23be5dc0b4d79e31d87e5feb72d5f1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 22 Sep 2024 11:34:48 -0400 Subject: [PATCH 338/544] propagate populate_existing for ORM bulk update Similar to #9742 Fixed bug in ORM bulk update/delete where using RETURNING with bulk update/delete in combination with populate existing would fail to accommodate the populate_existing option. Fixes: #11912 Change-Id: Ib9ef659512a1d1ae438eab67332a691941c06f43 (cherry picked from commit 64c1299180c2d944142d54bea741355d474bcbde) --- doc/build/changelog/unreleased_20/11912.rst | 7 ++ lib/sqlalchemy/orm/bulk_persistence.py | 12 ++++ test/orm/dml/test_bulk_statements.py | 73 +++++++++++++++++++++ 3 files changed, 92 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11912.rst diff --git a/doc/build/changelog/unreleased_20/11912.rst b/doc/build/changelog/unreleased_20/11912.rst new file mode 100644 index 00000000000..c0814b6cba1 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11912.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, orm + :tickets: 11912 + + Fixed bug in ORM bulk update/delete where using RETURNING with bulk + update/delete in combination with populate existing would fail to + accommodate the populate_existing option. diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 2a23caad53f..155de56dbe1 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -644,6 +644,7 @@ class default_update_options(Options): _eval_condition = None _matched_rows = None _identity_token = None + _populate_existing: bool = False @classmethod def can_use_returning( @@ -676,6 +677,7 @@ def orm_pre_session_exec( { "synchronize_session", "autoflush", + "populate_existing", "identity_token", "is_delete_using", "is_update_from", @@ -1590,10 +1592,20 @@ def orm_execute_statement( bind_arguments: _BindArguments, conn: Connection, ) -> _result.Result: + update_options = execution_options.get( "_sa_orm_update_options", cls.default_update_options ) + if update_options._populate_existing: + load_options = execution_options.get( + "_sa_orm_load_options", QueryContext.default_load_options + ) + load_options += {"_populate_existing": True} + execution_options = execution_options.union( + {"_sa_orm_load_options": load_options} + ) + if update_options._dml_strategy not in ( "orm", "auto", diff --git a/test/orm/dml/test_bulk_statements.py b/test/orm/dml/test_bulk_statements.py index 431eb3076fc..3943a9ab6cc 100644 --- a/test/orm/dml/test_bulk_statements.py +++ b/test/orm/dml/test_bulk_statements.py @@ -602,6 +602,79 @@ class Employee(ComparableEntity, decl_base): class UpdateStmtTest(testing.AssertsExecutionResults, fixtures.TestBase): __backend__ = True + @testing.variation("populate_existing", [True, False]) + @testing.requires.update_returning + def test_update_populate_existing(self, decl_base, populate_existing): + """test #11912""" + + class Employee(ComparableEntity, decl_base): + __tablename__ = "employee" + + uuid: Mapped[uuid.UUID] = mapped_column(primary_key=True) + user_name: Mapped[str] = mapped_column(nullable=False) + some_server_value: Mapped[str] + + decl_base.metadata.create_all(testing.db) + s = fixture_session() + + uuid1 = uuid.uuid4() + e1 = Employee( + uuid=uuid1, user_name="e1 old name", some_server_value="value 1" + ) + s.add(e1) + s.flush() + + stmt = ( + update(Employee) + .values(user_name="e1 new name") + .where(Employee.uuid == uuid1) + .returning(Employee) + ) + # perform out of band UPDATE on server value to simulate + # a computed col + s.connection().execute( + update(Employee.__table__).values(some_server_value="value 2") + ) + if populate_existing: + rows = s.scalars( + stmt, execution_options={"populate_existing": True} + ) + # SPECIAL: before we actually receive the returning rows, + # the existing objects have not been updated yet + eq_(e1.some_server_value, "value 1") + + eq_( + set(rows), + { + Employee( + uuid=uuid1, + user_name="e1 new name", + some_server_value="value 2", + ), + }, + ) + + # now they are updated + eq_(e1.some_server_value, "value 2") + else: + # no populate existing + rows = s.scalars(stmt) + eq_(e1.some_server_value, "value 1") + eq_( + set(rows), + { + Employee( + uuid=uuid1, + user_name="e1 new name", + some_server_value="value 1", + ), + }, + ) + eq_(e1.some_server_value, "value 1") + s.commit() + s.expire_all() + eq_(e1.some_server_value, "value 2") + @testing.variation( "returning_executemany", [ From 35d5622551f28cd193a074b08dbe09115aeacd9e Mon Sep 17 00:00:00 2001 From: sh-at-cs <112704226+sh-at-cs@users.noreply.github.com> Date: Mon, 23 Sep 2024 19:21:40 +0200 Subject: [PATCH 339/544] Add type annotations to CreateSchema & DropSchema (#11914) (cherry picked from commit 40ccf772d377ec5f0b07691d3505292ddbbd2435) --- lib/sqlalchemy/sql/ddl.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index d9e3f673a21..ab717e2b37e 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -470,8 +470,8 @@ class CreateSchema(_CreateBase): def __init__( self, - name, - if_not_exists=False, + name: str, + if_not_exists: bool = False, ): """Create a new :class:`.CreateSchema` construct.""" @@ -491,9 +491,9 @@ class DropSchema(_DropBase): def __init__( self, - name, - cascade=False, - if_exists=False, + name: str, + cascade: bool = False, + if_exists: bool = False, ): """Create a new :class:`.DropSchema` construct.""" From d34901ddac1466851c09d1f6f7ec32966e6fe44f Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 23 Sep 2024 23:11:06 +0200 Subject: [PATCH 340/544] Link scalar result method in scalar_one / scalar_one_or_none References: #11919 Change-Id: Iccbcd3fc3a6143be902683837b36260e5dd31c60 (cherry picked from commit 74e8e777f9aab33830d0625ef03d4a349cab24f4) --- lib/sqlalchemy/engine/result.py | 16 ++++++++-------- lib/sqlalchemy/ext/asyncio/result.py | 16 ++++++++-------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 56b3a68bc65..5ff41f1cbda 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1452,11 +1452,11 @@ def scalar_one(self) -> Any: """Return exactly one scalar result or raise an exception. This is equivalent to calling :meth:`_engine.Result.scalars` and - then :meth:`_engine.Result.one`. + then :meth:`_engine.ScalarResult.one`. .. seealso:: - :meth:`_engine.Result.one` + :meth:`_engine.ScalarResult.one` :meth:`_engine.Result.scalars` @@ -1475,11 +1475,11 @@ def scalar_one_or_none(self) -> Optional[Any]: """Return exactly one scalar result or ``None``. This is equivalent to calling :meth:`_engine.Result.scalars` and - then :meth:`_engine.Result.one_or_none`. + then :meth:`_engine.ScalarResult.one_or_none`. .. seealso:: - :meth:`_engine.Result.one_or_none` + :meth:`_engine.ScalarResult.one_or_none` :meth:`_engine.Result.scalars` @@ -1917,11 +1917,11 @@ def scalar_one(self) -> Any: """Return exactly one scalar result or raise an exception. This is equivalent to calling :meth:`_engine.Result.scalars` - and then :meth:`_engine.Result.one`. + and then :meth:`_engine.ScalarResult.one`. .. seealso:: - :meth:`_engine.Result.one` + :meth:`_engine.ScalarResult.one` :meth:`_engine.Result.scalars` @@ -1940,11 +1940,11 @@ def scalar_one_or_none(self) -> Optional[Any]: """Return exactly one or no scalar result. This is equivalent to calling :meth:`_engine.Result.scalars` - and then :meth:`_engine.Result.one_or_none`. + and then :meth:`_engine.ScalarResult.one_or_none`. .. seealso:: - :meth:`_engine.Result.one_or_none` + :meth:`_engine.ScalarResult.one_or_none` :meth:`_engine.Result.scalars` diff --git a/lib/sqlalchemy/ext/asyncio/result.py b/lib/sqlalchemy/ext/asyncio/result.py index 7dcbe3280e7..745a51b81b7 100644 --- a/lib/sqlalchemy/ext/asyncio/result.py +++ b/lib/sqlalchemy/ext/asyncio/result.py @@ -333,11 +333,11 @@ async def scalar_one(self) -> Any: """Return exactly one scalar result or raise an exception. This is equivalent to calling :meth:`_asyncio.AsyncResult.scalars` and - then :meth:`_asyncio.AsyncResult.one`. + then :meth:`_asyncio.AsyncScalarResult.one`. .. seealso:: - :meth:`_asyncio.AsyncResult.one` + :meth:`_asyncio.AsyncScalarResult.one` :meth:`_asyncio.AsyncResult.scalars` @@ -356,11 +356,11 @@ async def scalar_one_or_none(self) -> Optional[Any]: """Return exactly one scalar result or ``None``. This is equivalent to calling :meth:`_asyncio.AsyncResult.scalars` and - then :meth:`_asyncio.AsyncResult.one_or_none`. + then :meth:`_asyncio.AsyncScalarResult.one_or_none`. .. seealso:: - :meth:`_asyncio.AsyncResult.one_or_none` + :meth:`_asyncio.AsyncScalarResult.one_or_none` :meth:`_asyncio.AsyncResult.scalars` @@ -869,11 +869,11 @@ async def scalar_one(self) -> Any: """Return exactly one scalar result or raise an exception. This is equivalent to calling :meth:`_engine.Result.scalars` - and then :meth:`_engine.Result.one`. + and then :meth:`_engine.AsyncScalarResult.one`. .. seealso:: - :meth:`_engine.Result.one` + :meth:`_engine.AsyncScalarResult.one` :meth:`_engine.Result.scalars` @@ -892,11 +892,11 @@ async def scalar_one_or_none(self) -> Optional[Any]: """Return exactly one or no scalar result. This is equivalent to calling :meth:`_engine.Result.scalars` - and then :meth:`_engine.Result.one_or_none`. + and then :meth:`_engine.AsyncScalarResult.one_or_none`. .. seealso:: - :meth:`_engine.Result.one_or_none` + :meth:`_engine.AsyncScalarResult.one_or_none` :meth:`_engine.Result.scalars` From 063b96e0c992cd5a9f2fa5163d40dbe14dd4c3ff Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 24 Sep 2024 09:35:48 -0400 Subject: [PATCH 341/544] block mariadb 1.1.10 does not build for any python version see https://jira.mariadb.org/browse/CONPY-293 Change-Id: I1bf53d79eda7ded017b233f1639aae4bf9578ae6 (cherry picked from commit 833775adc225a358e194092a3cd50be2f52ca8c9) --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index f9dcc52667f..b80da1ee377 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,7 +54,7 @@ mysql = mysql_connector = mysql-connector-python mariadb_connector = - mariadb>=1.0.1,!=1.1.2,!=1.1.5 + mariadb>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10 oracle = cx_oracle>=8 oracle_oracledb = From 28e08dc007dee811596bb3b9d7be1cf611ee3bc8 Mon Sep 17 00:00:00 2001 From: huuyafwww Date: Sat, 5 Oct 2024 02:04:13 -0400 Subject: [PATCH 342/544] Fixed syntax error in mysql function defaults Fixed a bug that caused a syntax error when a function was specified to server_default when creating a column in MySQL or MariaDB. Fixes #11317 Closes: #11953 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11953 Pull-request-sha: d93ac419a9201134e9c4845dd2e4dc48db4b6f78 Change-Id: I67fc83867df2b7dcf591c8f53b7a97afb90ebba9 (cherry picked from commit 40e990aab3f92051f3c693a81de938ab3b4eb5e4) --- doc/build/changelog/unreleased_20/11317.rst | 7 +++ lib/sqlalchemy/dialects/mysql/base.py | 21 ++++++++- test/dialect/mysql/test_compiler.py | 51 +++++++++++++++++++++ 3 files changed, 78 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11317.rst diff --git a/doc/build/changelog/unreleased_20/11317.rst b/doc/build/changelog/unreleased_20/11317.rst new file mode 100644 index 00000000000..e41a0733d2c --- /dev/null +++ b/doc/build/changelog/unreleased_20/11317.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, schema + :tickets: 11317 + + Fixed a bug that caused a syntax error when a function was specified + to server_default when creating a column in MySQL or MariaDB. + Pull request courtesy of huuya. diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index e512b9c75c2..89f78b6a7f0 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1850,7 +1850,15 @@ def get_column_specification(self, column, **kw): else: default = self.get_column_default_string(column) if default is not None: - colspec.append("DEFAULT " + default) + if ( + isinstance( + column.server_default.arg, functions.FunctionElement + ) + and self.dialect._support_default_function + ): + colspec.append(f"DEFAULT ({default})") + else: + colspec.append("DEFAULT " + default) return " ".join(colspec) def post_create_table(self, table): @@ -2895,6 +2903,17 @@ def _support_float_cast(self): # ref https://dev.mysql.com/doc/relnotes/mysql/8.0/en/news-8-0-17.html#mysqld-8-0-17-feature # noqa return self.server_version_info >= (8, 0, 17) + @property + def _support_default_function(self): + if not self.server_version_info: + return False + elif self.is_mariadb: + # ref https://mariadb.com/kb/en/mariadb-1021-release-notes/ + return self.server_version_info >= (10, 2, 1) + else: + # ref https://dev.mysql.com/doc/refman/8.0/en/data-type-defaults.html # noqa + return self.server_version_info >= (8, 0, 13) + @property def _is_mariadb(self): return self.is_mariadb diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index 189390659ad..f0dcb583884 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -25,6 +25,7 @@ from sqlalchemy import INT from sqlalchemy import Integer from sqlalchemy import Interval +from sqlalchemy import JSON from sqlalchemy import LargeBinary from sqlalchemy import literal from sqlalchemy import MetaData @@ -406,6 +407,56 @@ def test_create_pk_with_using(self): "PRIMARY KEY (data) USING btree)", ) + @testing.combinations( + (True, True, (10, 2, 2)), + (True, True, (10, 2, 1)), + (False, True, (10, 2, 0)), + (True, False, (8, 0, 14)), + (True, False, (8, 0, 13)), + (False, False, (8, 0, 12)), + argnames="has_brackets,is_mariadb,version", + ) + def test_create_server_default_with_function_using( + self, has_brackets, is_mariadb, version + ): + dialect = mysql.dialect(is_mariadb=is_mariadb) + dialect.server_version_info = version + + m = MetaData() + tbl = Table( + "testtbl", + m, + Column("time", DateTime, server_default=func.current_timestamp()), + Column("name", String(255), server_default="some str"), + Column( + "description", String(255), server_default=func.lower("hi") + ), + Column("data", JSON, server_default=func.json_object()), + ) + + eq_(dialect._support_default_function, has_brackets) + + if has_brackets: + self.assert_compile( + schema.CreateTable(tbl), + "CREATE TABLE testtbl (" + "time DATETIME DEFAULT (CURRENT_TIMESTAMP), " + "name VARCHAR(255) DEFAULT 'some str', " + "description VARCHAR(255) DEFAULT (lower('hi')), " + "data JSON DEFAULT (json_object()))", + dialect=dialect, + ) + else: + self.assert_compile( + schema.CreateTable(tbl), + "CREATE TABLE testtbl (" + "time DATETIME DEFAULT CURRENT_TIMESTAMP, " + "name VARCHAR(255) DEFAULT 'some str', " + "description VARCHAR(255) DEFAULT lower('hi'), " + "data JSON DEFAULT json_object())", + dialect=dialect, + ) + def test_create_index_expr(self): m = MetaData() t1 = Table("foo", m, Column("x", Integer)) From 89bb895a910da331ad83bb38a1e29ee5fb880182 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 5 Oct 2024 09:43:30 +0200 Subject: [PATCH 343/544] Bump pypa/cibuildwheel from 2.21.1 to 2.21.2 (#11947) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.21.1 to 2.21.2. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.21.1...v2.21.2) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit a22545381d72bdebcd506476d07c84913ed37f2c) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 4f7830e3512..cf9145e857f 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.21.1 + uses: pypa/cibuildwheel@v2.21.2 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From 9ec263afaaad5b0a4006e0b3d74b69fede2c6915 Mon Sep 17 00:00:00 2001 From: Kevin Kirsche Date: Wed, 2 Oct 2024 13:06:59 -0400 Subject: [PATCH 344/544] Add type hints to `sqlalchemy.ext.compiler` References: #6810 Closes: #11902 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11902 Pull-request-sha: 3a7719ff96c754af2575c5385c8d4fa4d5492113 Change-Id: I29c92ade40d36d186eb37534dc0318f9b2b25840 (cherry picked from commit 0883ee5bf8779edb2ab0ba78f4668ebeb5164781) --- lib/sqlalchemy/ext/compiler.py | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index 01462ad0b48..b870adce92c 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -4,7 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors r"""Provides an API for creation of custom ClauseElements and compilers. @@ -452,15 +451,29 @@ def int_false(element, compiler, **kw): ) """ +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Dict +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar + from .. import exc from ..sql import sqltypes +if TYPE_CHECKING: + from ..sql.compiler import SQLCompiler + +_F = TypeVar("_F", bound=Callable[..., Any]) + -def compiles(class_, *specs): +def compiles(class_: Type[Any], *specs: str) -> Callable[[_F], _F]: """Register a function as a compiler for a given :class:`_expression.ClauseElement` type.""" - def decorate(fn): + def decorate(fn: _F) -> _F: # get an existing @compiles handler existing = class_.__dict__.get("_compiler_dispatcher", None) @@ -473,7 +486,9 @@ def decorate(fn): if existing_dispatch: - def _wrap_existing_dispatch(element, compiler, **kw): + def _wrap_existing_dispatch( + element: Any, compiler: SQLCompiler, **kw: Any + ) -> Any: try: return existing_dispatch(element, compiler, **kw) except exc.UnsupportedCompilationError as uce: @@ -505,7 +520,7 @@ def _wrap_existing_dispatch(element, compiler, **kw): return decorate -def deregister(class_): +def deregister(class_: Type[Any]) -> None: """Remove all custom compilers associated with a given :class:`_expression.ClauseElement` type. @@ -517,10 +532,10 @@ def deregister(class_): class _dispatcher: - def __init__(self): - self.specs = {} + def __init__(self) -> None: + self.specs: Dict[str, Callable[..., Any]] = {} - def __call__(self, element, compiler, **kw): + def __call__(self, element: Any, compiler: SQLCompiler, **kw: Any) -> Any: # TODO: yes, this could also switch off of DBAPI in use. fn = self.specs.get(compiler.dialect.name, None) if not fn: From 92f9ffd07d59c5987e9c267be70f45c435ec7e82 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Mon, 7 Oct 2024 19:26:18 +0300 Subject: [PATCH 345/544] Add classifier declaring support for Python 3.13 (#11960) (cherry picked from commit afe08a915556f2b1beb5e15aaec770c330ea84a2) Change-Id: I7c2b6b0ea8a830b63de9dd893bad957fcaf60b17 --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index b80da1ee377..4767e674539 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,6 +22,7 @@ classifiers = Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy Topic :: Database :: Front-Ends From d4d9fd0212de42c259e5badd397ce55fd5822af8 Mon Sep 17 00:00:00 2001 From: Gord Thompson Date: Mon, 7 Oct 2024 06:31:22 -0600 Subject: [PATCH 346/544] Apply fix to reflection of table comments Fixes: #11961 Change-Id: Ia3e704973a17cdf5c45bb5b8127435ee562c7d15 (cherry picked from commit 5c48094cbde3cbfaaed0b137ced4887bef14dc29) --- doc/build/changelog/unreleased_20/11961.rst | 7 +++++++ lib/sqlalchemy/dialects/postgresql/base.py | 2 ++ 2 files changed, 9 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11961.rst diff --git a/doc/build/changelog/unreleased_20/11961.rst b/doc/build/changelog/unreleased_20/11961.rst new file mode 100644 index 00000000000..c6ffceb0364 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11961.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, postgresql, reflection + :tickets: 11961 + + Fixed bug in reflection of table comments where unrelated text would be + returned if an entry in the pg_description table happened to share the + same oid (objoid) as the table being reflected. \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 4ab3ca24d16..19af90961c9 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -4689,6 +4689,8 @@ def _comment_query(self, schema, has_filter_names, scope, kind): pg_catalog.pg_class.c.oid == pg_catalog.pg_description.c.objoid, pg_catalog.pg_description.c.objsubid == 0, + pg_catalog.pg_description.c.classoid + == sql.func.cast("pg_catalog.pg_class", REGCLASS), ), ) .where(self._pg_class_relkind_condition(relkinds)) From 701b23803e858cd5dcecf1c661bbba38fc16c9fc Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 8 Oct 2024 14:01:40 -0400 Subject: [PATCH 347/544] correct mis-cherry-picked commit for #11449 in 2.0.31 The main review for #11449 at Ie8f0e8d9bb7958baac33c7c2231e4afae15cf5b1 had three revisions but the cherry pick to 2.0 somehow missed the second two cherry picks. it's not clear if this was intentional. however, as we need to continue on with correcting this behavior it seems like we really should have the full "main" behavior in the release. Fixes: #11449 Change-Id: I1e79c4e0c4843268b2fce7fc2046900bd7b48f00 --- lib/sqlalchemy/orm/strategies.py | 231 ++++++++++++++++++++----------- 1 file changed, 147 insertions(+), 84 deletions(-) diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 790ce28e56f..996bdbc1d97 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -16,8 +16,10 @@ import itertools from typing import Any from typing import Dict +from typing import Optional from typing import Tuple from typing import TYPE_CHECKING +from typing import Union from . import attributes from . import exc as orm_exc @@ -57,8 +59,10 @@ from ..sql import visitors from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL from ..sql.selectable import Select +from ..util.typing import Literal if TYPE_CHECKING: + from .mapper import Mapper from .relationships import RelationshipProperty from ..sql.elements import ColumnElement @@ -2539,7 +2543,7 @@ def _create_eager_join( else: # all other cases are innerjoin=='nested' approach eagerjoin = self._splice_nested_inner_join( - path, towrap, clauses, onclause, extra_join_criteria + path, path[-2], towrap, clauses, onclause, extra_join_criteria ) compile_state.eager_joins[query_entity_key] = eagerjoin @@ -2575,115 +2579,174 @@ def _create_eager_join( def _splice_nested_inner_join( self, path, + entity_we_want_to_splice_onto, join_obj, clauses, onclause, extra_criteria, - splicing=False, - detected_existing_path=None, + entity_inside_join_structure: Union[ + Mapper, None, Literal[False] + ] = False, + detected_existing_path: Optional[path_registry.PathRegistry] = None, ): # recursive fn to splice a nested join into an existing one. - # splicing=False means this is the outermost call, and it - # should return a value. splicing= is the recursive - # form, where it can return None to indicate the end of the recursion + # entity_inside_join_structure=False means this is the outermost call, + # and it should return a value. entity_inside_join_structure= + # indicates we've descended into a join and are looking at a FROM + # clause representing this mapper; if this is not + # entity_we_want_to_splice_onto then return None to end the recursive + # branch - if splicing is False: - # first call is always handed a join object - # from the outside + assert entity_we_want_to_splice_onto is path[-2] + + if entity_inside_join_structure is False: assert isinstance(join_obj, orm_util._ORMJoin) - elif isinstance(join_obj, sql.selectable.FromGrouping): + + if isinstance(join_obj, sql.selectable.FromGrouping): + # FromGrouping - continue descending into the structure return self._splice_nested_inner_join( path, + entity_we_want_to_splice_onto, join_obj.element, clauses, onclause, extra_criteria, - splicing, - ) - elif not isinstance(join_obj, orm_util._ORMJoin): - if path[-2].isa(splicing): - - if detected_existing_path: - # TODO: refine this into a more efficient method - if not detected_existing_path.contains_mapper(splicing): - return None - elif path_registry.PathRegistry.coerce( - detected_existing_path[len(path) :] - ).contains_mapper(splicing): - return None - - return orm_util._ORMJoin( - join_obj, - clauses.aliased_insp, - onclause, - isouter=False, - _left_memo=splicing, - _right_memo=path[path[-1].mapper], - _extra_criteria=extra_criteria, - ) - else: - return None + entity_inside_join_structure, + ) + elif isinstance(join_obj, orm_util._ORMJoin): + # _ORMJoin - continue descending into the structure - target_join = self._splice_nested_inner_join( - path, - join_obj.right, - clauses, - onclause, - extra_criteria, - # NOTE: this is the one place _right_memo is consumed - splicing=( - join_obj._right_memo[-1].mapper - if join_obj._right_memo is not None - else None - ), - ) - if target_join is None: - right_splice = False + join_right_path = join_obj._right_memo + + # see if right side of join is viable target_join = self._splice_nested_inner_join( path, - join_obj.left, + entity_we_want_to_splice_onto, + join_obj.right, clauses, onclause, extra_criteria, - join_obj._left_memo, - detected_existing_path=join_obj._right_memo, + entity_inside_join_structure=( + join_right_path[-1].mapper + if join_right_path is not None + else None + ), ) - if target_join is None: - # should only return None when recursively called, - # e.g. splicing refers to a from obj - assert ( - splicing is not False - ), "assertion failed attempting to produce joined eager loads" - return None - else: - right_splice = True - - if right_splice: - # for a right splice, attempt to flatten out - # a JOIN b JOIN c JOIN .. to avoid needless - # parenthesis nesting - if not join_obj.isouter and not target_join.isouter: - eagerjoin = join_obj._splice_into_center(target_join) + if target_join is not None: + # for a right splice, attempt to flatten out + # a JOIN b JOIN c JOIN .. to avoid needless + # parenthesis nesting + if not join_obj.isouter and not target_join.isouter: + eagerjoin = join_obj._splice_into_center(target_join) + else: + eagerjoin = orm_util._ORMJoin( + join_obj.left, + target_join, + join_obj.onclause, + isouter=join_obj.isouter, + _left_memo=join_obj._left_memo, + ) + + eagerjoin._target_adapter = target_join._target_adapter + return eagerjoin + else: - eagerjoin = orm_util._ORMJoin( + # see if left side of join is viable + target_join = self._splice_nested_inner_join( + path, + entity_we_want_to_splice_onto, join_obj.left, - target_join, - join_obj.onclause, - isouter=join_obj.isouter, - _left_memo=join_obj._left_memo, + clauses, + onclause, + extra_criteria, + entity_inside_join_structure=join_obj._left_memo, + detected_existing_path=join_right_path, ) - else: - eagerjoin = orm_util._ORMJoin( - target_join, - join_obj.right, - join_obj.onclause, - isouter=join_obj.isouter, - _right_memo=join_obj._right_memo, - ) - eagerjoin._target_adapter = target_join._target_adapter - return eagerjoin + if target_join is not None: + eagerjoin = orm_util._ORMJoin( + target_join, + join_obj.right, + join_obj.onclause, + isouter=join_obj.isouter, + _right_memo=join_obj._right_memo, + ) + eagerjoin._target_adapter = target_join._target_adapter + return eagerjoin + + # neither side viable, return None, or fail if this was the top + # most call + if entity_inside_join_structure is False: + assert ( + False + ), "assertion failed attempting to produce joined eager loads" + return None + + # reached an endpoint (e.g. a table that's mapped, or an alias of that + # table). determine if we can use this endpoint to splice onto + + # is this the entity we want to splice onto in the first place? + if not entity_we_want_to_splice_onto.isa(entity_inside_join_structure): + return None + + # path check. if we know the path how this join endpoint got here, + # lets look at our path we are satisfying and see if we're in the + # wrong place. This is specifically for when our entity may + # appear more than once in the path, issue #11449 + if detected_existing_path: + # this assertion is currently based on how this call is made, + # where given a join_obj, the call will have these parameters as + # entity_inside_join_structure=join_obj._left_memo + # and entity_inside_join_structure=join_obj._right_memo.mapper + assert detected_existing_path[-3] is entity_inside_join_structure + + # from that, see if the path we are targeting matches the + # "existing" path of this join all the way up to the midpoint + # of this join object (e.g. the relationship). + # if not, then this is not our target + # + # a test condition where this test is false looks like: + # + # desired splice: Node->kind->Kind + # path of desired splice: NodeGroup->nodes->Node->kind + # path we've located: NodeGroup->nodes->Node->common_node->Node + # + # above, because we want to splice kind->Kind onto + # NodeGroup->nodes->Node, this is not our path because it actually + # goes more steps than we want into self-referential + # ->common_node->Node + # + # a test condition where this test is true looks like: + # + # desired splice: B->c2s->C2 + # path of desired splice: A->bs->B->c2s + # path we've located: A->bs->B->c1s->C1 + # + # above, we want to splice c2s->C2 onto B, and the located path + # shows that the join ends with B->c1s->C1. so we will + # add another join onto that, which would create a "branch" that + # we might represent in a pseudopath as: + # + # B->c1s->C1 + # ->c2s->C2 + # + # i.e. A JOIN B ON JOIN C1 ON + # JOIN C2 ON + # + + if detected_existing_path[0:-2] != path.path[0:-1]: + return None + + return orm_util._ORMJoin( + join_obj, + clauses.aliased_insp, + onclause, + isouter=False, + _left_memo=entity_inside_join_structure, + _right_memo=path[path[-1].mapper], + _extra_criteria=extra_criteria, + ) def _create_eager_adapter(self, context, result, adapter, path, loadopt): compile_state = context.compile_state From 9ad928b61b17dcec459e051814b0618288604da0 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 8 Oct 2024 23:22:20 +0200 Subject: [PATCH 348/544] fix typo in mapper doc string Change-Id: I10fd7bdb0f0564a5beadfe3fa9fbb7e5ea88362c (cherry picked from commit 74a8e2ced922183d6ad072eced904cb989113fa2) --- lib/sqlalchemy/orm/mapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 06e3884be63..5bb203b3a92 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -442,7 +442,7 @@ class User(Base): mapping of the class to an alternate selectable, for loading only. - .. seealso:: + .. seealso:: :ref:`relationship_aliased_class` - the new pattern that removes the need for the :paramref:`_orm.Mapper.non_primary` flag. From cbd43c847d534cd610a704a86037aa88ed3c71ce Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 25 Sep 2024 14:19:02 -0400 Subject: [PATCH 349/544] honor prefetch_cols and postfetch_cols in ORM update w/ WHERE criteria Continuing from :ticket:`11912`, columns marked with :paramref:`.mapped_column.onupdate`, :paramref:`.mapped_column.server_onupdate`, or :class:`.Computed` are now refreshed in ORM instances when running an ORM enabled UPDATE with WHERE criteria, even if the statement does not use RETURNING or populate_existing. this moves the test we added in #11912 to be in test_update_delete_where, since this behavior is not related to bulk statements. For bulk statements, we're building onto the "many rows fast" use case and we as yet intentionally don't do any "bookkeeping", which means none of the expiration or any of that. would need to rethink "bulk update" a bit to get onupdates to refresh. Fixes: #11917 Change-Id: I9601be7afed523b356ce47a6daf98cc6584f4ad3 (cherry picked from commit bd1c17f11318d0b581f59c8c6521979246abc9b8) --- doc/build/changelog/unreleased_20/11917.rst | 10 + lib/sqlalchemy/orm/bulk_persistence.py | 44 +++- test/orm/dml/test_bulk_statements.py | 142 ++++++++----- test/orm/dml/test_update_delete_where.py | 223 ++++++++++++++++++++ 4 files changed, 363 insertions(+), 56 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11917.rst diff --git a/doc/build/changelog/unreleased_20/11917.rst b/doc/build/changelog/unreleased_20/11917.rst new file mode 100644 index 00000000000..951b191605f --- /dev/null +++ b/doc/build/changelog/unreleased_20/11917.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, orm + :tickets: 11917 + + Continuing from :ticket:`11912`, columns marked with + :paramref:`.mapped_column.onupdate`, + :paramref:`.mapped_column.server_onupdate`, or :class:`.Computed` are now + refreshed in ORM instances when running an ORM enabled UPDATE with WHERE + criteria, even if the statement does not use RETURNING or + populate_existing. diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 155de56dbe1..01a39049b07 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -1761,7 +1761,10 @@ def _do_post_synchronize_evaluate( session, update_options, statement, + result.context.compiled_parameters[0], [(obj, state, dict_) for obj, state, dict_, _ in matched_objects], + result.prefetch_cols(), + result.postfetch_cols(), ) @classmethod @@ -1806,6 +1809,7 @@ def _do_post_synchronize_fetch( session, update_options, statement, + result.context.compiled_parameters[0], [ ( obj, @@ -1814,16 +1818,26 @@ def _do_post_synchronize_fetch( ) for obj in objs ], + result.prefetch_cols(), + result.postfetch_cols(), ) @classmethod def _apply_update_set_values_to_objects( - cls, session, update_options, statement, matched_objects + cls, + session, + update_options, + statement, + effective_params, + matched_objects, + prefetch_cols, + postfetch_cols, ): """apply values to objects derived from an update statement, e.g. UPDATE..SET """ + mapper = update_options._subject_mapper target_cls = mapper.class_ evaluator_compiler = evaluator._EvaluatorCompiler(target_cls) @@ -1846,7 +1860,35 @@ def _apply_update_set_values_to_objects( attrib = {k for k, v in resolved_keys_as_propnames} states = set() + + to_prefetch = { + c + for c in prefetch_cols + if c.key in effective_params + and c in mapper._columntoproperty + and c.key not in evaluated_keys + } + to_expire = { + mapper._columntoproperty[c].key + for c in postfetch_cols + if c in mapper._columntoproperty + }.difference(evaluated_keys) + + prefetch_transfer = [ + (mapper._columntoproperty[c].key, c.key) for c in to_prefetch + ] + for obj, state, dict_ in matched_objects: + + dict_.update( + { + col_to_prop: effective_params[c_key] + for col_to_prop, c_key in prefetch_transfer + } + ) + + state._expire_attributes(state.dict, to_expire) + to_evaluate = state.unmodified.intersection(evaluated_keys) for key in to_evaluate: diff --git a/test/orm/dml/test_bulk_statements.py b/test/orm/dml/test_bulk_statements.py index 3943a9ab6cc..992a18947b7 100644 --- a/test/orm/dml/test_bulk_statements.py +++ b/test/orm/dml/test_bulk_statements.py @@ -8,8 +8,10 @@ import uuid from sqlalchemy import bindparam +from sqlalchemy import Computed from sqlalchemy import event from sqlalchemy import exc +from sqlalchemy import FetchedValue from sqlalchemy import ForeignKey from sqlalchemy import func from sqlalchemy import Identity @@ -602,78 +604,102 @@ class Employee(ComparableEntity, decl_base): class UpdateStmtTest(testing.AssertsExecutionResults, fixtures.TestBase): __backend__ = True - @testing.variation("populate_existing", [True, False]) - @testing.requires.update_returning - def test_update_populate_existing(self, decl_base, populate_existing): - """test #11912""" + @testing.variation( + "use_onupdate", + [ + "none", + "server", + "callable", + "clientsql", + ("computed", testing.requires.computed_columns), + ], + ) + def test_bulk_update_onupdates( + self, + decl_base, + use_onupdate, + ): + """assert that for now, bulk ORM update by primary key does not + expire or refresh onupdates.""" class Employee(ComparableEntity, decl_base): __tablename__ = "employee" uuid: Mapped[uuid.UUID] = mapped_column(primary_key=True) - user_name: Mapped[str] = mapped_column(nullable=False) - some_server_value: Mapped[str] + user_name: Mapped[str] = mapped_column(String(200), nullable=False) + + if use_onupdate.server: + some_server_value: Mapped[str] = mapped_column( + server_onupdate=FetchedValue() + ) + elif use_onupdate.callable: + some_server_value: Mapped[str] = mapped_column( + onupdate=lambda: "value 2" + ) + elif use_onupdate.clientsql: + some_server_value: Mapped[str] = mapped_column( + onupdate=literal("value 2") + ) + elif use_onupdate.computed: + some_server_value: Mapped[str] = mapped_column( + String(255), + Computed(user_name + " computed value"), + nullable=True, + ) + else: + some_server_value: Mapped[str] decl_base.metadata.create_all(testing.db) s = fixture_session() uuid1 = uuid.uuid4() - e1 = Employee( - uuid=uuid1, user_name="e1 old name", some_server_value="value 1" - ) + + if use_onupdate.computed: + server_old_value, server_new_value = ( + "e1 old name computed value", + "e1 new name computed value", + ) + e1 = Employee(uuid=uuid1, user_name="e1 old name") + else: + server_old_value, server_new_value = ("value 1", "value 2") + e1 = Employee( + uuid=uuid1, + user_name="e1 old name", + some_server_value="value 1", + ) s.add(e1) s.flush() - stmt = ( - update(Employee) - .values(user_name="e1 new name") - .where(Employee.uuid == uuid1) - .returning(Employee) - ) + # for computed col, make sure e1.some_server_value is loaded. + # this will already be the case for all RETURNING backends, so this + # suits just MySQL. + if use_onupdate.computed: + e1.some_server_value + + stmt = update(Employee) + # perform out of band UPDATE on server value to simulate # a computed col - s.connection().execute( - update(Employee.__table__).values(some_server_value="value 2") - ) - if populate_existing: - rows = s.scalars( - stmt, execution_options={"populate_existing": True} + if use_onupdate.none or use_onupdate.server: + s.connection().execute( + update(Employee.__table__).values(some_server_value="value 2") ) - # SPECIAL: before we actually receive the returning rows, - # the existing objects have not been updated yet - eq_(e1.some_server_value, "value 1") - eq_( - set(rows), - { - Employee( - uuid=uuid1, - user_name="e1 new name", - some_server_value="value 2", - ), - }, - ) + execution_options = {} - # now they are updated - eq_(e1.some_server_value, "value 2") - else: - # no populate existing - rows = s.scalars(stmt) - eq_(e1.some_server_value, "value 1") - eq_( - set(rows), - { - Employee( - uuid=uuid1, - user_name="e1 new name", - some_server_value="value 1", - ), - }, - ) - eq_(e1.some_server_value, "value 1") + s.execute( + stmt, + execution_options=execution_options, + params=[{"uuid": uuid1, "user_name": "e1 new name"}], + ) + + assert "some_server_value" in e1.__dict__ + eq_(e1.some_server_value, server_old_value) + + # do a full expire, now the new value is definitely there s.commit() s.expire_all() - eq_(e1.some_server_value, "value 2") + eq_(e1.some_server_value, server_new_value) @testing.variation( "returning_executemany", @@ -2393,18 +2419,24 @@ def setup_classes(cls): class A(Base): __tablename__ = "a" - id: Mapped[int] = mapped_column(Integer, primary_key=True) + id: Mapped[int] = mapped_column( + Integer, Identity(), primary_key=True + ) cs = relationship("C") class B(Base): __tablename__ = "b" - id: Mapped[int] = mapped_column(Integer, primary_key=True) + id: Mapped[int] = mapped_column( + Integer, Identity(), primary_key=True + ) a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) a = relationship("A") class C(Base): __tablename__ = "c" - id: Mapped[int] = mapped_column(Integer, primary_key=True) + id: Mapped[int] = mapped_column( + Integer, Identity(), primary_key=True + ) a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) @classmethod diff --git a/test/orm/dml/test_update_delete_where.py b/test/orm/dml/test_update_delete_where.py index 3f7b08b470c..8d9feaf63c2 100644 --- a/test/orm/dml/test_update_delete_where.py +++ b/test/orm/dml/test_update_delete_where.py @@ -1,15 +1,22 @@ +from __future__ import annotations + +import uuid + from sqlalchemy import Boolean from sqlalchemy import case from sqlalchemy import column +from sqlalchemy import Computed from sqlalchemy import delete from sqlalchemy import event from sqlalchemy import exc +from sqlalchemy import FetchedValue from sqlalchemy import ForeignKey from sqlalchemy import func from sqlalchemy import insert from sqlalchemy import inspect from sqlalchemy import Integer from sqlalchemy import lambda_stmt +from sqlalchemy import literal from sqlalchemy import literal_column from sqlalchemy import MetaData from sqlalchemy import or_ @@ -25,6 +32,8 @@ from sqlalchemy.orm import exc as orm_exc from sqlalchemy.orm import immediateload from sqlalchemy.orm import joinedload +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship from sqlalchemy.orm import selectinload from sqlalchemy.orm import Session @@ -44,6 +53,7 @@ from sqlalchemy.testing import not_in from sqlalchemy.testing.assertions import expect_raises_message from sqlalchemy.testing.assertsql import CompiledSQL +from sqlalchemy.testing.entities import ComparableEntity from sqlalchemy.testing.fixtures import fixture_session from sqlalchemy.testing.schema import Column from sqlalchemy.testing.schema import Table @@ -3296,6 +3306,219 @@ def test_load_from_delete(self, connection, use_from_statement): # TODO: state of above objects should be "deleted" +class OnUpdatePopulationTest(fixtures.TestBase): + __backend__ = True + + @testing.variation("populate_existing", [True, False]) + @testing.variation( + "use_onupdate", + [ + "none", + "server", + "callable", + "clientsql", + ("computed", testing.requires.computed_columns), + ], + ) + @testing.variation( + "use_returning", + [ + ("returning", testing.requires.update_returning), + ("defaults", testing.requires.update_returning), + "none", + ], + ) + @testing.variation("synchronize", ["auto", "fetch", "evaluate"]) + def test_update_populate_existing( + self, + decl_base, + populate_existing, + use_onupdate, + use_returning, + synchronize, + ): + """test #11912 and #11917""" + + class Employee(ComparableEntity, decl_base): + __tablename__ = "employee" + + uuid: Mapped[uuid.UUID] = mapped_column(primary_key=True) + user_name: Mapped[str] = mapped_column(String(200), nullable=False) + + if use_onupdate.server: + some_server_value: Mapped[str] = mapped_column( + server_onupdate=FetchedValue() + ) + elif use_onupdate.callable: + some_server_value: Mapped[str] = mapped_column( + onupdate=lambda: "value 2" + ) + elif use_onupdate.clientsql: + some_server_value: Mapped[str] = mapped_column( + onupdate=literal("value 2") + ) + elif use_onupdate.computed: + some_server_value: Mapped[str] = mapped_column( + String(255), + Computed(user_name + " computed value"), + nullable=True, + ) + else: + some_server_value: Mapped[str] + + decl_base.metadata.create_all(testing.db) + s = fixture_session() + + uuid1 = uuid.uuid4() + + if use_onupdate.computed: + server_old_value, server_new_value = ( + "e1 old name computed value", + "e1 new name computed value", + ) + e1 = Employee(uuid=uuid1, user_name="e1 old name") + else: + server_old_value, server_new_value = ("value 1", "value 2") + e1 = Employee( + uuid=uuid1, + user_name="e1 old name", + some_server_value="value 1", + ) + s.add(e1) + s.flush() + + stmt = ( + update(Employee) + .values(user_name="e1 new name") + .where(Employee.uuid == uuid1) + ) + + if use_returning.returning: + stmt = stmt.returning(Employee) + elif use_returning.defaults: + # NOTE: the return_defaults case here has not been analyzed for + # #11912 or #11917. future enhancements may change its behavior + stmt = stmt.return_defaults() + + # perform out of band UPDATE on server value to simulate + # a computed col + if use_onupdate.none or use_onupdate.server: + s.connection().execute( + update(Employee.__table__).values(some_server_value="value 2") + ) + + execution_options = {} + + if populate_existing: + execution_options["populate_existing"] = True + + if synchronize.evaluate: + execution_options["synchronize_session"] = "evaluate" + if synchronize.fetch: + execution_options["synchronize_session"] = "fetch" + + if use_returning.returning: + rows = s.scalars(stmt, execution_options=execution_options) + else: + s.execute(stmt, execution_options=execution_options) + + if ( + use_onupdate.clientsql + or use_onupdate.server + or use_onupdate.computed + ): + if not use_returning.defaults: + # if server-side onupdate was generated, the col should have + # been expired + assert "some_server_value" not in e1.__dict__ + + # and refreshes when called. this is even if we have RETURNING + # rows we didn't fetch yet. + eq_(e1.some_server_value, server_new_value) + else: + # using return defaults here is not expiring. have not + # researched why, it may be because the explicit + # return_defaults interferes with the ORMs call + assert "some_server_value" in e1.__dict__ + eq_(e1.some_server_value, server_old_value) + + elif use_onupdate.callable: + if not use_returning.defaults or not synchronize.fetch: + # for python-side onupdate, col is populated with local value + assert "some_server_value" in e1.__dict__ + + # and is refreshed + eq_(e1.some_server_value, server_new_value) + else: + assert "some_server_value" in e1.__dict__ + + # and is not refreshed + eq_(e1.some_server_value, server_old_value) + + else: + # no onupdate, then the value was not touched yet, + # even if we used RETURNING with populate_existing, because + # we did not fetch the rows yet + assert "some_server_value" in e1.__dict__ + eq_(e1.some_server_value, server_old_value) + + # now see if we can fetch rows + if use_returning.returning: + + if populate_existing or not use_onupdate.none: + eq_( + set(rows), + { + Employee( + uuid=uuid1, + user_name="e1 new name", + some_server_value=server_new_value, + ), + }, + ) + + else: + # if no populate existing and no server default, that column + # is not touched at all + eq_( + set(rows), + { + Employee( + uuid=uuid1, + user_name="e1 new name", + some_server_value=server_old_value, + ), + }, + ) + + if use_returning.defaults: + # as mentioned above, the return_defaults() case here remains + # unanalyzed. + if synchronize.fetch or ( + use_onupdate.clientsql + or use_onupdate.server + or use_onupdate.computed + or use_onupdate.none + ): + eq_(e1.some_server_value, server_old_value) + else: + eq_(e1.some_server_value, server_new_value) + + elif ( + populate_existing and use_returning.returning + ) or not use_onupdate.none: + eq_(e1.some_server_value, server_new_value) + else: + # no onupdate specified, and no populate existing with returning, + # the attribute is not refreshed + eq_(e1.some_server_value, server_old_value) + + # do a full expire, now the new value is definitely there + s.commit() + s.expire_all() + eq_(e1.some_server_value, server_new_value) + + class PGIssue11849Test(fixtures.DeclarativeMappedTest): __backend__ = True __only_on__ = ("postgresql",) From 54b43998ca3d9fcc1d516607c29c07dcdaa20f91 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 8 Oct 2024 10:29:34 -0400 Subject: [PATCH 350/544] re-apply right memo for nested ORMJoin when splicing Fixed regression caused by fixes to joined eager loading in :ticket:`11449`, where a particular joinedload case could not be asserted correctly. We now have an example of that case so the assertion has been repaired to allow for it. Fixes: #11965 Change-Id: I2e0a594981534f4aaeff361a2f8cf1a0fba8de8f (cherry picked from commit 43b974a34957f22963e7faf44f0798c8179adcfc) --- doc/build/changelog/unreleased_20/11965.rst | 9 ++ lib/sqlalchemy/orm/strategies.py | 3 +- lib/sqlalchemy/orm/util.py | 2 +- test/orm/test_eager_relations.py | 93 +++++++++++++++++++++ 4 files changed, 105 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11965.rst diff --git a/doc/build/changelog/unreleased_20/11965.rst b/doc/build/changelog/unreleased_20/11965.rst new file mode 100644 index 00000000000..1f9294c0d90 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11965.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 11965 + + Fixed regression caused by fixes to joined eager loading in + :ticket:`11449`, where a particular joinedload case could not be asserted + correctly. We now have an example of that case so the assertion has been + repaired to allow for it. + diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 996bdbc1d97..3f947a8d743 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -2694,7 +2694,8 @@ def _splice_nested_inner_join( # lets look at our path we are satisfying and see if we're in the # wrong place. This is specifically for when our entity may # appear more than once in the path, issue #11449 - if detected_existing_path: + # updated in issue #11965. + if detected_existing_path and len(detected_existing_path) > 2: # this assertion is currently based on how this call is made, # where given a join_obj, the call will have these parameters as # entity_inside_join_structure=join_obj._left_memo diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 2b4ac3c9d7c..c9fa79c804b 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1944,7 +1944,7 @@ def _splice_into_center(self, other): self.onclause, isouter=self.isouter, _left_memo=self._left_memo, - _right_memo=None, + _right_memo=other._left_memo._path_registry, ) return _ORMJoin( diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index bc3d8f10c2c..7e0eca62c65 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -26,6 +26,8 @@ from sqlalchemy.orm import lazyload from sqlalchemy.orm import Load from sqlalchemy.orm import load_only +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship from sqlalchemy.orm import Session from sqlalchemy.orm import undefer @@ -7110,3 +7112,94 @@ def go(): ) self.assert_sql_count(testing.db, go, 1) + + +class NestedInnerjoinTestIssue11965( + fixtures.DeclarativeMappedTest, testing.AssertsCompiledSQL +): + """test for issue #11965, regression from #11449""" + + __dialect__ = "default" + + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class Source(Base): + __tablename__ = "source" + id: Mapped[int] = mapped_column(primary_key=True) + + class Day(Base): + __tablename__ = "day" + id: Mapped[int] = mapped_column(primary_key=True) + + class Run(Base): + __tablename__ = "run" + id: Mapped[int] = mapped_column(primary_key=True) + + source_id: Mapped[int] = mapped_column( + ForeignKey(Source.id), nullable=False + ) + source = relationship(Source, lazy="joined", innerjoin=True) + + day = relationship( + Day, + lazy="joined", + innerjoin=True, + ) + day_id: Mapped[int] = mapped_column( + ForeignKey(Day.id), nullable=False + ) + + class Event(Base): + __tablename__ = "event" + + id: Mapped[int] = mapped_column(primary_key=True) + run_id: Mapped[int] = mapped_column( + ForeignKey(Run.id), nullable=False + ) + run = relationship(Run, lazy="joined", innerjoin=True) + + class Room(Base): + __tablename__ = "room" + + id: Mapped[int] = mapped_column(primary_key=True) + event_id: Mapped[int] = mapped_column( + ForeignKey(Event.id), nullable=False + ) + event = relationship(Event, foreign_keys=event_id, lazy="joined") + + @classmethod + def insert_data(cls, connection): + Room, Run, Source, Event, Day = cls.classes( + "Room", "Run", "Source", "Event", "Day" + ) + run = Run(source=Source(), day=Day()) + event = Event(run=run) + room = Room(event=event) + with Session(connection) as session: + session.add(room) + session.commit() + + def test_compile(self): + Room = self.classes.Room + self.assert_compile( + select(Room), + "SELECT room.id, room.event_id, source_1.id AS id_1, " + "day_1.id AS id_2, run_1.id AS id_3, run_1.source_id, " + "run_1.day_id, event_1.id AS id_4, event_1.run_id " + "FROM room LEFT OUTER JOIN " + "(event AS event_1 " + "JOIN run AS run_1 ON run_1.id = event_1.run_id " + "JOIN day AS day_1 ON day_1.id = run_1.day_id " + "JOIN source AS source_1 ON source_1.id = run_1.source_id) " + "ON event_1.id = room.event_id", + ) + + def test_roundtrip(self): + Room = self.classes.Room + session = fixture_session() + rooms = session.scalars(select(Room)).unique().all() + session.close() + # verify eager-loaded correctly + assert rooms[0].event.run.day From 4dbc3ebd395e6bfda1c85297916b419bd8e358d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 18:42:21 +0200 Subject: [PATCH 351/544] Bump pypa/cibuildwheel from 2.21.2 to 2.21.3 (#11976) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.21.2 to 2.21.3. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.21.2...v2.21.3) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit 2892aaa108c3fb9d703083579abea938a1ea75a3) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index cf9145e857f..677ac32881a 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.21.2 + uses: pypa/cibuildwheel@v2.21.3 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From a1cf687380f99d20ffa13dd41771937e084e54b6 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 9 Oct 2024 22:05:05 -0400 Subject: [PATCH 352/544] _Binary as generic to LargeBinary Datatypes that are binary based such as :class:`.VARBINARY` will resolve to :class:`.LargeBinary` when the :meth:`.TypeEngine.as_generic()` method is called. Fixes: #11978 Change-Id: I2e0586324fb0f1c367da61f0074b35c96fbe2fd0 (cherry picked from commit 858eba6156f210e24d39cc066069a3dac700e33a) --- doc/build/changelog/unreleased_20/11978.rst | 7 +++++++ lib/sqlalchemy/sql/sqltypes.py | 6 ++++++ test/sql/test_types.py | 9 +++++++++ 3 files changed, 22 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11978.rst diff --git a/doc/build/changelog/unreleased_20/11978.rst b/doc/build/changelog/unreleased_20/11978.rst new file mode 100644 index 00000000000..a8a9cdaf579 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11978.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: usecase, sql + :tickets: 11978 + + Datatypes that are binary based such as :class:`.VARBINARY` will resolve to + :class:`.LargeBinary` when the :meth:`.TypeEngine.as_generic()` method is + called. diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index ad9a696ee82..dd7110e8801 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -870,6 +870,12 @@ class _Binary(TypeEngine[bytes]): def __init__(self, length: Optional[int] = None): self.length = length + @util.ro_memoized_property + def _generic_type_affinity( + self, + ) -> Type[TypeEngine[bytes]]: + return LargeBinary + def literal_processor(self, dialect): def process(value): # TODO: this is useless for real world scenarios; implement diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 999919c5f51..702f70edc78 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -62,6 +62,7 @@ from sqlalchemy import types from sqlalchemy import Unicode from sqlalchemy import util +from sqlalchemy import VARBINARY from sqlalchemy import VARCHAR import sqlalchemy.dialects.mysql as mysql import sqlalchemy.dialects.oracle as oracle @@ -450,6 +451,11 @@ def load_dialect_impl(self, dialect): class AsGenericTest(fixtures.TestBase): @testing.combinations( (String(), String()), + (VARBINARY(), LargeBinary()), + (mysql.BINARY(), LargeBinary()), + (mysql.MEDIUMBLOB(), LargeBinary()), + (oracle.RAW(), LargeBinary()), + (pg.BYTEA(), LargeBinary()), (VARCHAR(length=100), String(length=100)), (NVARCHAR(length=100), Unicode(length=100)), (DATE(), Date()), @@ -472,6 +478,9 @@ def test_as_generic(self, t1, t2): (t,) for t in _all_types(omit_special_types=True) if not util.method_is_overridden(t, TypeEngine.as_generic) + and not util.method_is_overridden( + t, TypeEngine._generic_type_affinity + ) ] ) def test_as_generic_all_types_heuristic(self, type_): From be66fec79fd3c4ca4cd8b8b5c4f9fe26df17ca6b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 10 Sep 2024 18:42:58 +0200 Subject: [PATCH 353/544] Mention that extract.field is used as sql string Change-Id: Ieb32e298e8a1df3a31bf3a6e26b1aca381ef7a4f (cherry picked from commit e79517d571ab6ab8a2e4e1a9bbd026bbb682df29) --- lib/sqlalchemy/sql/_elements_constructors.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index 51d8ac39995..bdc0534abe2 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -1159,6 +1159,9 @@ def extract(field: str, expr: _ColumnExpressionArgument[Any]) -> Extract: :param field: The field to extract. + .. warning:: This field is used as a literal SQL string. + **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**. + :param expr: A column or Python scalar expression serving as the right side of the ``EXTRACT`` expression. From 270b46cef3043d0e675ccb72b1e3a590f835dd4b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 9 Oct 2024 20:35:16 +0200 Subject: [PATCH 354/544] Improve error in dataclasses with table Added a better error when trying to map as dataclass a class while also manually providing the ``__table__`` attribute. This usage is currently not supported. Fixes: #11973 Change-Id: I54c721b3f7447b2f062fa0cfb53b6a88c381df42 (cherry picked from commit 6ae7f2378971b16f024eb5ab851cc4533bc4e61a) --- doc/build/changelog/unreleased_20/11973.rst | 7 +++++++ lib/sqlalchemy/orm/decl_base.py | 10 ++++++++++ test/orm/declarative/test_dc_transforms.py | 16 ++++++++++++++++ 3 files changed, 33 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/11973.rst diff --git a/doc/build/changelog/unreleased_20/11973.rst b/doc/build/changelog/unreleased_20/11973.rst new file mode 100644 index 00000000000..bad0f220885 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11973.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: orm, bug + :tickets: 11973 + + Improved the error message emitted when trying to map as dataclass a class + while also manually providing the ``__table__`` attribute. + This usage is currently not supported. diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index d43fbffc576..b069d23c0f5 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -1067,6 +1067,16 @@ def _setup_dataclasses_transforms(self) -> None: "'@registry.mapped_as_dataclass'" ) + # can't create a dataclass if __table__ is already there. This would + # fail an assertion when calling _get_arguments_for_make_dataclass: + # assert False, "Mapped[] received without a mapping declaration" + if "__table__" in self.cls.__dict__: + raise exc.InvalidRequestError( + f"Class {self.cls} already defines a '__table__'. " + "ORM Annotated Dataclasses do not support a pre-existing " + "'__table__' element" + ) + warn_for_non_dc_attrs = collections.defaultdict(list) def _allow_dataclass_field( diff --git a/test/orm/declarative/test_dc_transforms.py b/test/orm/declarative/test_dc_transforms.py index 8408f696176..4c4f158513f 100644 --- a/test/orm/declarative/test_dc_transforms.py +++ b/test/orm/declarative/test_dc_transforms.py @@ -27,6 +27,7 @@ from sqlalchemy import JSON from sqlalchemy import select from sqlalchemy import String +from sqlalchemy import Table from sqlalchemy import testing from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.orm import column_property @@ -742,6 +743,21 @@ class Mixin(MappedAsDataclass): class Foo(Mixin): bar_value: Mapped[float] = mapped_column(default=78) + def test_MappedAsDataclass_table_provided(self, registry): + """test #11973""" + + with expect_raises_message( + exc.InvalidRequestError, + "Class .*Foo.* already defines a '__table__'. " + "ORM Annotated Dataclasses do not support a pre-existing " + "'__table__' element", + ): + + @registry.mapped_as_dataclass + class Foo: + __table__ = Table("foo", registry.metadata) + foo: Mapped[float] + def test_dataclass_exception_wrapped(self, dc_decl_base): with expect_raises_message( exc.InvalidRequestError, From 1f7c6a7ab63ce644f409c8b0d6c84b9c41978030 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 10 Oct 2024 22:25:39 +0200 Subject: [PATCH 355/544] remove fully tested in ci reference since it's confusing Change-Id: I5d1c14b2c2b3bcbb55861e1c4a90ffafe8ee00fa (cherry picked from commit 8684c8dda6cde2f470ad16827b09eb6d4bb1c6d8) --- doc/build/dialects/index.rst | 25 +++++++++++----------- lib/sqlalchemy/dialects/mssql/base.py | 1 - lib/sqlalchemy/dialects/mysql/base.py | 1 - lib/sqlalchemy/dialects/oracle/base.py | 1 - lib/sqlalchemy/dialects/postgresql/base.py | 1 - lib/sqlalchemy/dialects/sqlite/base.py | 1 - 6 files changed, 12 insertions(+), 18 deletions(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index f35d0b026dd..eff7d91de80 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -24,8 +24,8 @@ Included Dialects oracle mssql -Support Levels for Included Dialects -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Supported versions for Included Dialects +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The following table summarizes the support level for each included dialect. @@ -35,21 +35,20 @@ The following table summarizes the support level for each included dialect. Support Definitions ^^^^^^^^^^^^^^^^^^^ -.. glossary:: + .. Fully tested in CI + .. **Fully tested in CI** indicates a version that is tested in the sqlalchemy + .. CI system and passes all the tests in the test suite. - Fully tested in CI - **Fully tested in CI** indicates a version that is tested in the sqlalchemy - CI system and passes all the tests in the test suite. +.. glossary:: - Normal support - **Normal support** indicates that most features should work, - but not all versions are tested in the ci configuration so there may - be some not supported edge cases. We will try to fix issues that affect - these versions. + Supported version + **Supported version** indicates that most SQLAlchemy features should work + for the mentioned database version. Since not all database versions may be + tested in the ci there may be some not working edge cases. Best effort - **Best effort** indicates that we try to support basic features on them, - but most likely there will be unsupported features or errors in some use cases. + **Best effort** indicates that SQLAlchemy tries to support basic features on these + versions, but most likely there will be unsupported features or errors in some use cases. Pull requests with associated issues may be accepted to continue supporting older versions, which are reviewed on a case-by-case basis. diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index ddee9a5a739..07dbe401d43 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -9,7 +9,6 @@ """ .. dialect:: mssql :name: Microsoft SQL Server - :full_support: 2017 :normal_support: 2012+ :best_effort: 2005+ diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 89f78b6a7f0..69279b1009f 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -11,7 +11,6 @@ .. dialect:: mysql :name: MySQL / MariaDB - :full_support: 5.6, 5.7, 8.0 / 10.8, 10.9 :normal_support: 5.6+ / 10+ :best_effort: 5.0.2+ / 5.0.2+ diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 9dd3acf7c14..abf3645c768 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -10,7 +10,6 @@ r""" .. dialect:: oracle :name: Oracle - :full_support: 18c :normal_support: 11+ :best_effort: 9+ diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 19af90961c9..e5e7fceb188 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -9,7 +9,6 @@ r""" .. dialect:: postgresql :name: PostgreSQL - :full_support: 12, 13, 14, 15 :normal_support: 9.6+ :best_effort: 9+ diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 0e2dc3b6394..84bb8937e16 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -10,7 +10,6 @@ r""" .. dialect:: sqlite :name: SQLite - :full_support: 3.36.0 :normal_support: 3.12+ :best_effort: 3.7.16+ From 7f65d89c8f0bdc87a9a5f5943246d4fdb8315d05 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Fri, 11 Oct 2024 21:20:15 +0200 Subject: [PATCH 356/544] Add hash to field-like methods Added the dataclass field ``hash`` parameter to the orm field-like methods, like :meth:`_orn.mapped_column`, :meth:`_orm.relationship`, etc. Fixes: #11923 Change-Id: I80220f6dcd9c42f465d8a4c4ae2e4efa45279ecc (cherry picked from commit c14111b5bb2c624dd0bcb677fc3c9d811b46a2e7) --- doc/build/changelog/unreleased_20/11923.rst | 6 ++ lib/sqlalchemy/ext/associationproxy.py | 10 ++- lib/sqlalchemy/orm/_orm_constructors.py | 79 ++++++++++++++++--- lib/sqlalchemy/orm/interfaces.py | 5 ++ test/orm/declarative/test_dc_transforms.py | 34 +++++++- .../test_tm_future_annotations_sync.py | 8 ++ test/orm/declarative/test_typed_mapping.py | 8 ++ 7 files changed, 137 insertions(+), 13 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11923.rst diff --git a/doc/build/changelog/unreleased_20/11923.rst b/doc/build/changelog/unreleased_20/11923.rst new file mode 100644 index 00000000000..5b5fbceee31 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11923.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: usecase, orm + :tickets: 11923 + + Added the dataclass field ``hash`` parameter to the orm field-like methods, + like :meth:`_orn.mapped_column`, :meth:`_orm.relationship`, etc. diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 80e6fdac987..99cb266e324 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -98,6 +98,7 @@ def association_proxy( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 ) -> AssociationProxy[Any]: r"""Return a Python property implementing a view of a target attribute which references an attribute on members of the @@ -198,6 +199,13 @@ def association_proxy( .. versionadded:: 2.0.0b4 + :param hash: Specific to + :ref:`orm_declarative_native_dataclasses`, controls if this field + is included when generating the ``__hash__()`` method for the mapped + class. + + .. versionadded:: 2.0.36 + :param info: optional, will be assigned to :attr:`.AssociationProxy.info` if present. @@ -237,7 +245,7 @@ def association_proxy( cascade_scalar_deletes=cascade_scalar_deletes, create_on_none_assignment=create_on_none_assignment, attribute_options=_AttributeOptions( - init, repr, default, default_factory, compare, kw_only + init, repr, default, default_factory, compare, kw_only, hash ), ) diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index d9379da4a64..4c777b3b3ac 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -108,6 +108,7 @@ def mapped_column( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 nullable: Optional[ Union[bool, Literal[SchemaConst.NULL_UNSPECIFIED]] ] = SchemaConst.NULL_UNSPECIFIED, @@ -331,6 +332,13 @@ def mapped_column( :ref:`orm_declarative_native_dataclasses`, indicates if this field should be marked as keyword-only when generating the ``__init__()``. + :param hash: Specific to + :ref:`orm_declarative_native_dataclasses`, controls if this field + is included when generating the ``__hash__()`` method for the mapped + class. + + .. versionadded:: 2.0.36 + :param \**kw: All remaining keyword arguments are passed through to the constructor for the :class:`_schema.Column`. @@ -345,7 +353,7 @@ def mapped_column( autoincrement=autoincrement, insert_default=insert_default, attribute_options=_AttributeOptions( - init, repr, default, default_factory, compare, kw_only + init, repr, default, default_factory, compare, kw_only, hash ), doc=doc, key=key, @@ -440,12 +448,13 @@ def column_property( deferred: bool = False, raiseload: bool = False, comparator_factory: Optional[Type[PropComparator[_T]]] = None, - init: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 + init: Union[_NoArg, bool] = _NoArg.NO_ARG, repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002 default: Optional[Any] = _NoArg.NO_ARG, default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 active_history: bool = False, expire_on_flush: bool = True, info: Optional[_InfoType] = None, @@ -534,13 +543,43 @@ def column_property( :ref:`orm_queryguide_deferred_raiseload` - :param init: + :param init: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__init__()`` + method as generated by the dataclass process. + :param repr: Specific to :ref:`orm_declarative_native_dataclasses`, + specifies if the mapped attribute should be part of the ``__repr__()`` + method as generated by the dataclass process. + :param default_factory: Specific to + :ref:`orm_declarative_native_dataclasses`, + specifies a default-value generation function that will take place + as part of the ``__init__()`` + method as generated by the dataclass process. + + .. seealso:: + + :ref:`defaults_default_factory_insert_default` - :param default: + :paramref:`_orm.mapped_column.default` - :param default_factory: + :paramref:`_orm.mapped_column.insert_default` - :param kw_only: + :param compare: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be included in comparison operations when generating the + ``__eq__()`` and ``__ne__()`` methods for the mapped class. + + .. versionadded:: 2.0.0b4 + + :param kw_only: Specific to + :ref:`orm_declarative_native_dataclasses`, indicates if this field + should be marked as keyword-only when generating the ``__init__()``. + + :param hash: Specific to + :ref:`orm_declarative_native_dataclasses`, controls if this field + is included when generating the ``__hash__()`` method for the mapped + class. + + .. versionadded:: 2.0.36 """ return MappedSQLExpression( @@ -553,6 +592,7 @@ def column_property( default_factory, compare, kw_only, + hash, ), group=group, deferred=deferred, @@ -581,6 +621,7 @@ def composite( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 info: Optional[_InfoType] = None, doc: Optional[str] = None, **__kw: Any, @@ -602,6 +643,7 @@ def composite( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 info: Optional[_InfoType] = None, doc: Optional[str] = None, **__kw: Any, @@ -623,6 +665,7 @@ def composite( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 info: Optional[_InfoType] = None, doc: Optional[str] = None, **__kw: Any, @@ -645,6 +688,7 @@ def composite( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 info: Optional[_InfoType] = None, doc: Optional[str] = None, **__kw: Any, @@ -719,6 +763,12 @@ def composite( :ref:`orm_declarative_native_dataclasses`, indicates if this field should be marked as keyword-only when generating the ``__init__()``. + :param hash: Specific to + :ref:`orm_declarative_native_dataclasses`, controls if this field + is included when generating the ``__hash__()`` method for the mapped + class. + + .. versionadded:: 2.0.36 """ if __kw: raise _no_kw() @@ -727,7 +777,7 @@ def composite( _class_or_attr, *attrs, attribute_options=_AttributeOptions( - init, repr, default, default_factory, compare, kw_only + init, repr, default, default_factory, compare, kw_only, hash ), group=group, deferred=deferred, @@ -955,6 +1005,7 @@ def relationship( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 lazy: _LazyLoadArgumentType = "select", passive_deletes: Union[Literal["all"], bool] = False, passive_updates: bool = True, @@ -1778,7 +1829,12 @@ class that will be synchronized with this one. It is usually :ref:`orm_declarative_native_dataclasses`, indicates if this field should be marked as keyword-only when generating the ``__init__()``. + :param hash: Specific to + :ref:`orm_declarative_native_dataclasses`, controls if this field + is included when generating the ``__hash__()`` method for the mapped + class. + .. versionadded:: 2.0.36 """ return _RelationshipDeclared( @@ -1796,7 +1852,7 @@ class that will be synchronized with this one. It is usually cascade=cascade, viewonly=viewonly, attribute_options=_AttributeOptions( - init, repr, default, default_factory, compare, kw_only + init, repr, default, default_factory, compare, kw_only, hash ), lazy=lazy, passive_deletes=passive_deletes, @@ -1831,6 +1887,7 @@ def synonym( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 info: Optional[_InfoType] = None, doc: Optional[str] = None, ) -> Synonym[Any]: @@ -1941,7 +1998,7 @@ def _job_status_descriptor(self): descriptor=descriptor, comparator_factory=comparator_factory, attribute_options=_AttributeOptions( - init, repr, default, default_factory, compare, kw_only + init, repr, default, default_factory, compare, kw_only, hash ), doc=doc, info=info, @@ -2072,6 +2129,7 @@ def deferred( default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG, compare: Union[_NoArg, bool] = _NoArg.NO_ARG, kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG, + hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002 active_history: bool = False, expire_on_flush: bool = True, info: Optional[_InfoType] = None, @@ -2106,7 +2164,7 @@ def deferred( column, *additional_columns, attribute_options=_AttributeOptions( - init, repr, default, default_factory, compare, kw_only + init, repr, default, default_factory, compare, kw_only, hash ), group=group, deferred=True, @@ -2149,6 +2207,7 @@ def query_expression( _NoArg.NO_ARG, compare, _NoArg.NO_ARG, + _NoArg.NO_ARG, ), expire_on_flush=expire_on_flush, info=info, diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 36336e7a2c2..094053fa040 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -206,6 +206,7 @@ class _AttributeOptions(NamedTuple): dataclasses_default_factory: Union[_NoArg, Callable[[], Any]] dataclasses_compare: Union[_NoArg, bool] dataclasses_kw_only: Union[_NoArg, bool] + dataclasses_hash: Union[_NoArg, bool, None] def _as_dataclass_field(self, key: str) -> Any: """Return a ``dataclasses.Field`` object given these arguments.""" @@ -223,6 +224,8 @@ def _as_dataclass_field(self, key: str) -> Any: kw["compare"] = self.dataclasses_compare if self.dataclasses_kw_only is not _NoArg.NO_ARG: kw["kw_only"] = self.dataclasses_kw_only + if self.dataclasses_hash is not _NoArg.NO_ARG: + kw["hash"] = self.dataclasses_hash if "default" in kw and callable(kw["default"]): # callable defaults are ambiguous. deprecate them in favour of @@ -302,6 +305,7 @@ def _get_arguments_for_make_dataclass( _NoArg.NO_ARG, _NoArg.NO_ARG, _NoArg.NO_ARG, + _NoArg.NO_ARG, ) _DEFAULT_READONLY_ATTRIBUTE_OPTIONS = _AttributeOptions( @@ -311,6 +315,7 @@ def _get_arguments_for_make_dataclass( _NoArg.NO_ARG, _NoArg.NO_ARG, _NoArg.NO_ARG, + _NoArg.NO_ARG, ) diff --git a/test/orm/declarative/test_dc_transforms.py b/test/orm/declarative/test_dc_transforms.py index 4c4f158513f..52c4dae51a5 100644 --- a/test/orm/declarative/test_dc_transforms.py +++ b/test/orm/declarative/test_dc_transforms.py @@ -77,6 +77,7 @@ def dc_decl_base(self, request, metadata): if request.param == "(MAD, DB)": class Base(MappedAsDataclass, DeclarativeBase): + _mad_before = True metadata = _md type_annotation_map = { str: String().with_variant(String(50), "mysql", "mariadb") @@ -85,6 +86,7 @@ class Base(MappedAsDataclass, DeclarativeBase): else: # test #8665 by reversing the order of the classes class Base(DeclarativeBase, MappedAsDataclass): + _mad_before = False metadata = _md type_annotation_map = { str: String().with_variant(String(50), "mysql", "mariadb") @@ -684,6 +686,27 @@ class A(dc_decl_base): eq_(fas.args, ["self", "id"]) eq_(fas.kwonlyargs, ["data"]) + @testing.combinations(True, False, argnames="unsafe_hash") + def test_hash_attribute( + self, dc_decl_base: Type[MappedAsDataclass], unsafe_hash + ): + class A(dc_decl_base, unsafe_hash=unsafe_hash): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, hash=False) + data: Mapped[str] = mapped_column(hash=True) + + a = A(id=1, data="x") + if not unsafe_hash or not dc_decl_base._mad_before: + with expect_raises(TypeError): + a_hash1 = hash(a) + else: + a_hash1 = hash(a) + a.id = 41 + eq_(hash(a), a_hash1) + a.data = "y" + ne_(hash(a), a_hash1) + @testing.requires.python310 def test_kw_only_dataclass_constant( self, dc_decl_base: Type[MappedAsDataclass] @@ -1814,9 +1837,10 @@ def test_attribute_options(self, use_arguments, construct): "default_factory": list, "compare": True, "kw_only": False, + "hash": False, } exp = interfaces._AttributeOptions( - False, False, False, list, True, False + False, False, False, list, True, False, False ) else: kw = {} @@ -1838,7 +1862,13 @@ def test_ro_attribute_options(self, use_arguments, construct): "compare": True, } exp = interfaces._AttributeOptions( - False, False, _NoArg.NO_ARG, _NoArg.NO_ARG, True, _NoArg.NO_ARG + False, + False, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + True, + _NoArg.NO_ARG, + _NoArg.NO_ARG, ) else: kw = {} diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index a58da96c151..2aa8f0f0b0f 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -1060,6 +1060,13 @@ def test_we_got_all_attrs_test_annotated(self): "Argument 'init' is a dataclass argument" ), ), + ( + "hash", + True, + exc.SADeprecationWarning( + "Argument 'hash' is a dataclass argument" + ), + ), argnames="argname, argument, assertion", ) @testing.variation("use_annotated", [True, False, "control"]) @@ -1083,6 +1090,7 @@ def test_names_encountered_for_annotated( "repr", "compare", "default_factory", + "hash", ) if is_dataclass: diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index ffa83aec25d..b50573fa12f 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -1051,6 +1051,13 @@ def test_we_got_all_attrs_test_annotated(self): "Argument 'init' is a dataclass argument" ), ), + ( + "hash", + True, + exc.SADeprecationWarning( + "Argument 'hash' is a dataclass argument" + ), + ), argnames="argname, argument, assertion", ) @testing.variation("use_annotated", [True, False, "control"]) @@ -1074,6 +1081,7 @@ def test_names_encountered_for_annotated( "repr", "compare", "default_factory", + "hash", ) if is_dataclass: From 0dda26af500941b9ec916e1be5b6091f7072578f Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 12 Oct 2024 14:58:26 +0200 Subject: [PATCH 357/544] Optimize MySQL foreign key reflection Improved foreign keys reflection logic in MySQL 8+ to use a better optimized query. The previous query could be quite slow in databases with a large number of columns. Fixes: #11975 Change-Id: Ie8bcd810d4b37abf7fd5e497596e0ade52c3f82e (cherry picked from commit a98d31621d58d45e2a4d74351282cedddcfe85fa) --- doc/build/changelog/unreleased_20/11975.rst | 7 +++ lib/sqlalchemy/dialects/mysql/base.py | 67 +++++++++++++++------ test/dialect/mysql/test_reflection.py | 2 +- 3 files changed, 55 insertions(+), 21 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11975.rst diff --git a/doc/build/changelog/unreleased_20/11975.rst b/doc/build/changelog/unreleased_20/11975.rst new file mode 100644 index 00000000000..708a23aa0b3 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11975.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: mysql, performance + :tickets: 11975 + + Improved foreign keys reflection logic in MySQL 8+ to use a better + optimized query. The previous query could be quite slow in databases + with a large number of columns. diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 69279b1009f..b41ec15ba98 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -3070,29 +3070,47 @@ def lower(s): return s default_schema_name = connection.dialect.default_schema_name - col_tuples = [ - ( - lower(rec["referred_schema"] or default_schema_name), - lower(rec["referred_table"]), - col_name, - ) - for rec in fkeys - for col_name in rec["referred_columns"] - ] - if col_tuples: - correct_for_wrong_fk_case = connection.execute( - sql.text( - """ - select table_schema, table_name, column_name - from information_schema.columns - where (table_schema, table_name, lower(column_name)) in - :table_data; - """ - ).bindparams(sql.bindparam("table_data", expanding=True)), - dict(table_data=col_tuples), + # NOTE: using (table_schema, table_name, lower(column_name)) in (...) + # is very slow since mysql does not seem able to properly use indexse. + # Unpack the where condition instead. + schema_by_table_by_column = defaultdict(lambda: defaultdict(list)) + for rec in fkeys: + sch = lower(rec["referred_schema"] or default_schema_name) + tbl = lower(rec["referred_table"]) + for col_name in rec["referred_columns"]: + schema_by_table_by_column[sch][tbl].append(col_name) + + if schema_by_table_by_column: + + condition = sql.or_( + *( + sql.and_( + _info_columns.c.table_schema == schema, + sql.or_( + *( + sql.and_( + _info_columns.c.table_name == table, + sql.func.lower( + _info_columns.c.column_name + ).in_(columns), + ) + for table, columns in tables.items() + ) + ), + ) + for schema, tables in schema_by_table_by_column.items() + ) ) + select = sql.select( + _info_columns.c.table_schema, + _info_columns.c.table_name, + _info_columns.c.column_name, + ).where(condition) + + correct_for_wrong_fk_case = connection.execute(select) + # in casing=0, table name and schema name come back in their # exact case. # in casing=1, table name and schema name come back in lower @@ -3465,3 +3483,12 @@ def __getattr__(self, attr): return item.decode(self.charset) else: return item + + +_info_columns = sql.table( + "columns", + sql.column("table_schema", VARCHAR(64)), + sql.column("table_name", VARCHAR(64)), + sql.column("column_name", VARCHAR(64)), + schema="information_schema", +) diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py index 4fa472ce1ae..92cf3818e24 100644 --- a/test/dialect/mysql/test_reflection.py +++ b/test/dialect/mysql/test_reflection.py @@ -1197,7 +1197,7 @@ def test_correct_for_mysql_bugs_88718_96365(self): dialect._casing = casing dialect.default_schema_name = "Test" connection = mock.Mock( - dialect=dialect, execute=lambda stmt, params: ischema + dialect=dialect, execute=lambda stmt: ischema ) dialect._correct_for_mysql_bugs_88718_96365(fkeys, connection) eq_( From 24323f744aa8e2ccc635e58b5f58e11d9b531f93 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 13 Oct 2024 10:04:23 -0400 Subject: [PATCH 358/544] consult allow_partial_pks for NULL check in lazyload Refined the check which the ORM lazy loader uses to detect "this would be loading by primary key and the primary key is NULL, skip loading" to take into account the current setting for the :paramref:`.orm.Mapper.allow_partial_pks` parameter. If this parameter is False, then a composite PK value that has partial NULL elements should also be skipped. This can apply to some composite overlapping foreign key configurations. Fixes: #11995 Change-Id: Icf9a52b7405d7400d46bfa944edcbff1a89225a3 (cherry picked from commit 830debc30896203bfd21fea18d323c5d849068d1) --- doc/build/changelog/unreleased_20/11995.rst | 12 +++++ lib/sqlalchemy/orm/base.py | 2 + lib/sqlalchemy/orm/mapper.py | 11 +++++ lib/sqlalchemy/orm/strategies.py | 13 ++++-- lib/sqlalchemy/orm/util.py | 1 + test/orm/test_lazy_relations.py | 50 +++++++++++++++++++++ 6 files changed, 86 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11995.rst diff --git a/doc/build/changelog/unreleased_20/11995.rst b/doc/build/changelog/unreleased_20/11995.rst new file mode 100644 index 00000000000..a748a1c5dfa --- /dev/null +++ b/doc/build/changelog/unreleased_20/11995.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: bug, orm + :tickets: 11995 + + Refined the check which the ORM lazy loader uses to detect "this would be + loading by primary key and the primary key is NULL, skip loading" to take + into account the current setting for the + :paramref:`.orm.Mapper.allow_partial_pks` parameter. If this parameter is + False, then a composite PK value that has partial NULL elements should also + be skipped. This can apply to some composite overlapping foreign key + configurations. + diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index c9005298d82..b5f7dbbafb0 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -283,6 +283,8 @@ class NotExtension(InspectionAttrExtensionType): _none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT]) +_none_only_set = frozenset([None]) + _SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED") _DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE") diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 5bb203b3a92..29659e6e8e2 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -296,6 +296,17 @@ class will overwrite all data within object instances that already particular primary key value. A "partial primary key" can occur if one has mapped to an OUTER JOIN, for example. + The :paramref:`.orm.Mapper.allow_partial_pks` parameter also + indicates to the ORM relationship lazy loader, when loading a + many-to-one related object, if a composite primary key that has + partial NULL values should result in an attempt to load from the + database, or if a load attempt is not necessary. + + .. versionadded:: 2.0.36 :paramref:`.orm.Mapper.allow_partial_pks` + is consulted by the relationship lazy loader strategy, such that + when set to False, a SELECT for a composite primary key that + has partial NULL values will not be emitted. + :param batch: Defaults to ``True``, indicating that save operations of multiple entities can be batched together for efficiency. Setting to False indicates diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 3f947a8d743..c89a12efd66 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -47,7 +47,7 @@ from .session import _state_session from .state import InstanceState from .strategy_options import Load -from .util import _none_set +from .util import _none_only_set from .util import AliasedClass from .. import event from .. import exc as sa_exc @@ -936,8 +936,15 @@ def _load_for_state( elif LoaderCallableStatus.NEVER_SET in primary_key_identity: return LoaderCallableStatus.NEVER_SET - if _none_set.issuperset(primary_key_identity): - return None + # test for None alone in primary_key_identity based on + # allow_partial_pks preference. PASSIVE_NO_RESULT and NEVER_SET + # have already been tested above + if not self.mapper.allow_partial_pks: + if _none_only_set.intersection(primary_key_identity): + return None + else: + if _none_only_set.issuperset(primary_key_identity): + return None if ( self.key in state.dict diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index c9fa79c804b..69556751a76 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -42,6 +42,7 @@ from .base import _class_to_mapper as _class_to_mapper from .base import _MappedAnnotationBase from .base import _never_set as _never_set # noqa: F401 +from .base import _none_only_set as _none_only_set # noqa: F401 from .base import _none_set as _none_set # noqa: F401 from .base import attribute_str as attribute_str # noqa: F401 from .base import class_mapper as class_mapper diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py index 64c86853d27..9bb8071984d 100644 --- a/test/orm/test_lazy_relations.py +++ b/test/orm/test_lazy_relations.py @@ -21,7 +21,9 @@ from sqlalchemy.orm import attributes from sqlalchemy.orm import configure_mappers from sqlalchemy.orm import exc as orm_exc +from sqlalchemy.orm import foreign from sqlalchemy.orm import relationship +from sqlalchemy.orm import remote from sqlalchemy.orm import Session from sqlalchemy.orm import with_parent from sqlalchemy.testing import assert_raises @@ -1270,6 +1272,54 @@ def go(): self.assert_sql_count(testing.db, go, 1) + @testing.fixture() + def composite_overlapping_fixture(self, decl_base, connection): + def go(allow_partial_pks): + + class Section(decl_base): + __tablename__ = "sections" + year = Column(Integer, primary_key=True) + idx = Column(Integer, primary_key=True) + parent_idx = Column(Integer) + + if not allow_partial_pks: + __mapper_args__ = {"allow_partial_pks": False} + + ForeignKeyConstraint((year, parent_idx), (year, idx)) + + parent = relationship( + "Section", + primaryjoin=and_( + year == remote(year), + foreign(parent_idx) == remote(idx), + ), + ) + + decl_base.metadata.create_all(connection) + connection.commit() + + with Session(connection) as sess: + sess.add(Section(year=5, idx=1, parent_idx=None)) + sess.commit() + + return Section + + return go + + @testing.variation("allow_partial_pks", [True, False]) + def test_composite_m2o_load_partial_pks( + self, allow_partial_pks, composite_overlapping_fixture + ): + Section = composite_overlapping_fixture(allow_partial_pks) + + session = fixture_session() + section = session.get(Section, (5, 1)) + + with self.assert_statement_count( + testing.db, 1 if allow_partial_pks else 0 + ): + testing.is_none(section.parent) + class CorrelatedTest(fixtures.MappedTest): @classmethod From a0251d4e6136d37d4cdf2281ad2b308d2ebe5a65 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sun, 13 Oct 2024 18:32:46 +0200 Subject: [PATCH 359/544] Render bind cast in json and jsonb in PG Render bind cast for ``JSON`` and ``JSONB`` datatype on every dialect. Previously this was only enabled in a subset of dialects. Fixes: #11994 Change-Id: Ib085deb3e84034dac9e4f4057d32f055d5533e52 (cherry picked from commit b2648e69f2375f7257cbe04b16f663d97795db19) --- doc/build/changelog/unreleased_20/11994.rst | 6 ++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 4 -- lib/sqlalchemy/dialects/postgresql/json.py | 1 + lib/sqlalchemy/dialects/postgresql/psycopg.py | 4 -- test/dialect/postgresql/test_query.py | 58 +++++++++++++++++++ test/dialect/postgresql/test_types.py | 6 +- 6 files changed, 68 insertions(+), 11 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11994.rst diff --git a/doc/build/changelog/unreleased_20/11994.rst b/doc/build/changelog/unreleased_20/11994.rst new file mode 100644 index 00000000000..efcb8e97b66 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11994.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: postgresql, usecase + :tickets: 11994 + + Render bind cast for ``JSON`` and ``JSONB`` datatype on every dialect. + Previously this was only enabled in a subset of dialects. diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 53ebe60398d..27bd07ab077 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -263,15 +263,11 @@ class AsyncpgBigInteger(sqltypes.BigInteger): class AsyncpgJSON(json.JSON): - render_bind_cast = True - def result_processor(self, dialect, coltype): return None class AsyncpgJSONB(json.JSONB): - render_bind_cast = True - def result_processor(self, dialect, coltype): return None diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 1cdafbd03d9..914d8423d4b 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -144,6 +144,7 @@ class JSON(sqltypes.JSON): """ # noqa + render_bind_cast = True astext_type = sqltypes.Text() def __init__(self, none_as_null=False, astext_type=None): diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index b8c0087dd49..66a2c774623 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -126,8 +126,6 @@ class _PGREGCONFIG(REGCONFIG): class _PGJSON(JSON): - render_bind_cast = True - def bind_processor(self, dialect): return self._make_bind_processor(None, dialect._psycopg_Json) @@ -136,8 +134,6 @@ def result_processor(self, dialect, coltype): class _PGJSONB(JSONB): - render_bind_cast = True - def bind_processor(self, dialect): return self._make_bind_processor(None, dialect._psycopg_Jsonb) diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py index a737381760e..9198fb96aea 100644 --- a/test/dialect/postgresql/test_query.py +++ b/test/dialect/postgresql/test_query.py @@ -26,6 +26,8 @@ from sqlalchemy import Time from sqlalchemy import true from sqlalchemy import tuple_ +from sqlalchemy import Uuid +from sqlalchemy import values from sqlalchemy.dialects import postgresql from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.dialects.postgresql import REGCONFIG @@ -1791,3 +1793,59 @@ def test_render_derived_quoting_straight_json(self, connection, cast_fn): stmt = select(fn.c.CaseSensitive, fn.c["the % value"]) eq_(connection.execute(stmt).all(), [(1, "foo"), (2, "bar")]) + + +class RequiresCastTest(fixtures.TablesTest): + __only_on__ = "postgresql" + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table( + "t", + metadata, + Column("id", Integer, primary_key=True), + Column("uuid", Uuid), + Column("j", JSON), + Column("jb", JSONB), + ) + + @classmethod + def insert_data(cls, connection): + connection.execute( + cls.tables["t"].insert(), + [ + {"id": 1, "uuid": "d24587a1-06d9-41df-b1c3-3f423b97a755"}, + {"id": 2, "uuid": "4b07e1c8-d60c-4ea8-9d01-d7cd01362224"}, + ], + ) + + def test_update_values(self, connection): + value = values( + Column("id", Integer), + Column("uuid", Uuid), + Column("j", JSON), + Column("jb", JSONB), + name="update_data", + ).data( + [ + ( + 1, + "8b6ec1ec-b979-4d0b-b2ce-9acc6e4c2943", + {"foo": 1}, + {"foo_jb": 1}, + ), + ( + 2, + "a2123bcb-7ea3-420a-8284-1db4b2759d79", + {"bar": 2}, + {"bar_jb": 2}, + ), + ] + ) + connection.execute( + self.tables["t"] + .update() + .values(uuid=value.c.uuid, j=value.c.j, jb=value.c.jb) + .where(self.tables["t"].c.id == value.c.id) + ) diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 65c5fdbf7f6..25237656735 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -6195,7 +6195,7 @@ def setup_test(self): lambda self: self.jsoncol.has_all( {"name": "r1", "data": {"k1": "r1v1", "k2": "r1v2"}} ), - "test_table.test_column ?& %(test_column_1)s", + "test_table.test_column ?& %(test_column_1)s::JSONB", ), ( lambda self: self.jsoncol.has_all(self.any_), @@ -6213,7 +6213,7 @@ def setup_test(self): ), ( lambda self: self.jsoncol.contains({"k1": "r1v1"}), - "test_table.test_column @> %(test_column_1)s", + "test_table.test_column @> %(test_column_1)s::JSONB", ), ( lambda self: self.jsoncol.contains(self.any_), @@ -6221,7 +6221,7 @@ def setup_test(self): ), ( lambda self: self.jsoncol.contained_by({"foo": "1", "bar": None}), - "test_table.test_column <@ %(test_column_1)s", + "test_table.test_column <@ %(test_column_1)s::JSONB", ), ( lambda self: self.jsoncol.contained_by(self.any_), From c1bffa312ba973aaac639e04950457e81d291f9b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 14 Oct 2024 20:21:40 -0400 Subject: [PATCH 360/544] update for mypy 1.12.0 Change-Id: I8ab16e439a27b3072402beb2c09f715047362c94 (cherry picked from commit d8dd28c42eaffca1cd964a4ab8378c592332e41e) --- lib/sqlalchemy/ext/asyncio/session.py | 2 +- lib/sqlalchemy/sql/coercions.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index 473a8c1717c..c2a47d78025 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -1863,7 +1863,7 @@ async def start( ) -> AsyncSessionTransaction: self.sync_transaction = self._assign_proxied( await greenlet_spawn( - self.session.sync_session.begin_nested # type: ignore + self.session.sync_session.begin_nested if self.nested else self.session.sync_session.begin ) diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 0c998c667f2..1d11cbbd3d2 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -29,7 +29,6 @@ from typing import TypeVar from typing import Union -from . import operators from . import roles from . import visitors from ._typing import is_from_clause @@ -843,18 +842,19 @@ def _warn_for_implicit_coercion(self, elem): % (elem.__class__.__name__) ) - def _literal_coercion( # type: ignore[override] - self, element, *, expr, operator, **kw - ): + @util.preload_module("sqlalchemy.sql.elements") + def _literal_coercion(self, element, *, expr, operator, **kw): if util.is_non_string_iterable(element): non_literal_expressions: Dict[ - Optional[operators.ColumnOperators], - operators.ColumnOperators, + Optional[ColumnElement[Any]], + ColumnElement[Any], ] = {} element = list(element) for o in element: if not _is_literal(o): - if not isinstance(o, operators.ColumnOperators): + if not isinstance( + o, util.preloaded.sql_elements.ColumnElement + ): self._raise_for_expected(element, **kw) else: From 601e85b8d9b3cb6d85803b3989c79a8c10e4d7a0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 14 Oct 2024 11:15:21 -0400 Subject: [PATCH 361/544] match ORM mapped cols to PK in interpret_returning_rows Fixed bug in ORM "update with WHERE clause" feature where an explicit ``.returning()`` would interfere with the "fetch" synchronize strategy due to an assumption that the ORM mapped class featured the primary key columns in a specific position within the RETURNING. This has been fixed to use appropriate ORM column targeting. the _interpret_returning_rows method looked to be mostly not used as far as its joined inheritance features, which appear to have never been used as joined inheritance mappers are skipped. Fixes: #11997 Change-Id: I38fe3a84cdeb2eef38fe00d8b9a6a2b56f434bc6 (cherry picked from commit 553d02b30eba13f3db4595d7a15e51222f864e13) --- doc/build/changelog/unreleased_20/11997.rst | 9 +++ lib/sqlalchemy/orm/bulk_persistence.py | 73 ++++++++------------- test/orm/dml/test_update_delete_where.py | 8 ++- 3 files changed, 45 insertions(+), 45 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11997.rst diff --git a/doc/build/changelog/unreleased_20/11997.rst b/doc/build/changelog/unreleased_20/11997.rst new file mode 100644 index 00000000000..b2390977e16 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11997.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 11997 + + Fixed bug in ORM "update with WHERE clause" feature where an explicit + ``.returning()`` would interfere with the "fetch" synchronize strategy due + to an assumption that the ORM mapped class featured the primary key columns + in a specific position within the RETURNING. This has been fixed to use + appropriate ORM column targeting. diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 01a39049b07..755192384ac 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -862,53 +862,39 @@ def _adjust_for_extra_criteria(cls, global_attributes, ext_info): return return_crit @classmethod - def _interpret_returning_rows(cls, mapper, rows): - """translate from local inherited table columns to base mapper - primary key columns. + def _interpret_returning_rows(cls, result, mapper, rows): + """return rows that indicate PK cols in mapper.primary_key position + for RETURNING rows. - Joined inheritance mappers always establish the primary key in terms of - the base table. When we UPDATE a sub-table, we can only get - RETURNING for the sub-table's columns. + Prior to 2.0.36, this method seemed to be written for some kind of + inheritance scenario but the scenario was unused for actual joined + inheritance, and the function instead seemed to perform some kind of + partial translation that would remove non-PK cols if the PK cols + happened to be first in the row, but not otherwise. The joined + inheritance walk feature here seems to have never been used as it was + always skipped by the "local_table" check. - Here, we create a lookup from the local sub table's primary key - columns to the base table PK columns so that we can get identity - key values from RETURNING that's against the joined inheritance - sub-table. - - the complexity here is to support more than one level deep of - inheritance, where we have to link columns to each other across - the inheritance hierarchy. + As of 2.0.36 the function strips away non-PK cols and provides the + PK cols for the table in mapper PK order. """ - if mapper.local_table is not mapper.base_mapper.local_table: - return rows - - # this starts as a mapping of - # local_pk_col: local_pk_col. - # we will then iteratively rewrite the "value" of the dict with - # each successive superclass column - local_pk_to_base_pk = {pk: pk for pk in mapper.local_table.primary_key} - - for mp in mapper.iterate_to_root(): - if mp.inherits is None: - break - elif mp.local_table is mp.inherits.local_table: - continue - - t_to_e = dict(mp._table_to_equated[mp.inherits.local_table]) - col_to_col = {sub_pk: super_pk for super_pk, sub_pk in t_to_e[mp]} - for pk, super_ in local_pk_to_base_pk.items(): - local_pk_to_base_pk[pk] = col_to_col[super_] + try: + if mapper.local_table is not mapper.base_mapper.local_table: + # TODO: dive more into how a local table PK is used for fetch + # sync, not clear if this is correct as it depends on the + # downstream routine to fetch rows using + # local_table.primary_key order + pk_keys = result._tuple_getter(mapper.local_table.primary_key) + else: + pk_keys = result._tuple_getter(mapper.primary_key) + except KeyError: + # can't use these rows, they don't have PK cols in them + # this is an unusual case where the user would have used + # .return_defaults() + return [] - lookup = { - local_pk_to_base_pk[lpk]: idx - for idx, lpk in enumerate(mapper.local_table.primary_key) - } - primary_key_convert = [ - lookup[bpk] for bpk in mapper.base_mapper.primary_key - ] - return [tuple(row[idx] for idx in primary_key_convert) for row in rows] + return [pk_keys(row) for row in rows] @classmethod def _get_matched_objects_on_criteria(cls, update_options, states): @@ -1776,9 +1762,8 @@ def _do_post_synchronize_fetch( returned_defaults_rows = result.returned_defaults_rows if returned_defaults_rows: pk_rows = cls._interpret_returning_rows( - target_mapper, returned_defaults_rows + result, target_mapper, returned_defaults_rows ) - matched_rows = [ tuple(row) + (update_options._identity_token,) for row in pk_rows @@ -2108,7 +2093,7 @@ def _do_post_synchronize_fetch( if returned_defaults_rows: pk_rows = cls._interpret_returning_rows( - target_mapper, returned_defaults_rows + result, target_mapper, returned_defaults_rows ) matched_rows = [ diff --git a/test/orm/dml/test_update_delete_where.py b/test/orm/dml/test_update_delete_where.py index 8d9feaf63c2..da8efa44fa4 100644 --- a/test/orm/dml/test_update_delete_where.py +++ b/test/orm/dml/test_update_delete_where.py @@ -3329,6 +3329,7 @@ class OnUpdatePopulationTest(fixtures.TestBase): ], ) @testing.variation("synchronize", ["auto", "fetch", "evaluate"]) + @testing.variation("pk_order", ["first", "middle"]) def test_update_populate_existing( self, decl_base, @@ -3336,15 +3337,20 @@ def test_update_populate_existing( use_onupdate, use_returning, synchronize, + pk_order, ): """test #11912 and #11917""" class Employee(ComparableEntity, decl_base): __tablename__ = "employee" - uuid: Mapped[uuid.UUID] = mapped_column(primary_key=True) + if pk_order.first: + uuid: Mapped[uuid.UUID] = mapped_column(primary_key=True) user_name: Mapped[str] = mapped_column(String(200), nullable=False) + if pk_order.middle: + uuid: Mapped[uuid.UUID] = mapped_column(primary_key=True) + if use_onupdate.server: some_server_value: Mapped[str] = mapped_column( server_onupdate=FetchedValue() From 55e1440fbdca3c6234d3fe971fe5307c5cbd696d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 15 Oct 2024 08:20:25 -0400 Subject: [PATCH 362/544] add tests for pickling types inside an expression, some reduce methods Fixed regression from 1.4 where some datatypes such as those derived from :class:`.TypeDecorator` could not be pickled when they were part of a larger SQL expression composition due to internal supporting structures themselves not being pickleable. Fixes: #12002 Change-Id: I016e37b0c62071413f24c9aac35f6ecf475becaa (cherry picked from commit fa568215788c274eb2d178b6eb180ab1f7955c01) --- doc/build/changelog/unreleased_20/12002.rst | 8 +++++ lib/sqlalchemy/sql/type_api.py | 35 ++++++++++++++++----- test/sql/test_types.py | 33 +++++++++++++++++++ 3 files changed, 69 insertions(+), 7 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12002.rst diff --git a/doc/build/changelog/unreleased_20/12002.rst b/doc/build/changelog/unreleased_20/12002.rst new file mode 100644 index 00000000000..49ac7017592 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12002.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, sql, regression + :tickets: 12002 + + Fixed regression from 1.4 where some datatypes such as those derived from + :class:`.TypeDecorator` could not be pickled when they were part of a + larger SQL expression composition due to internal supporting structures + themselves not being pickleable. diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index e4f5f3f20a7..2f6494e61cb 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -183,6 +183,9 @@ def __init__(self, expr: ColumnElement[_CT]): self.expr = expr self.type = expr.type + def __reduce__(self) -> Any: + return self.__class__, (self.expr,) + @util.preload_module("sqlalchemy.sql.default_comparator") def operate( self, op: OperatorType, *other: Any, **kwargs: Any @@ -1721,20 +1724,38 @@ def reverse_operate( kwargs["_python_is_types"] = self.expr.type.coerce_to_is_types return super().reverse_operate(op, other, **kwargs) + @staticmethod + def _reduce_td_comparator( + impl: TypeEngine[Any], expr: ColumnElement[_T] + ) -> Any: + return TypeDecorator._create_td_comparator_type(impl)(expr) + + @staticmethod + def _create_td_comparator_type( + impl: TypeEngine[Any], + ) -> _ComparatorFactory[Any]: + + def __reduce__(self: TypeDecorator.Comparator[Any]) -> Any: + return (TypeDecorator._reduce_td_comparator, (impl, self.expr)) + + return type( + "TDComparator", + (TypeDecorator.Comparator, impl.comparator_factory), # type: ignore # noqa: E501 + {"__reduce__": __reduce__}, + ) + @property def comparator_factory( # type: ignore # mypy properties bug self, ) -> _ComparatorFactory[Any]: if TypeDecorator.Comparator in self.impl.comparator_factory.__mro__: # type: ignore # noqa: E501 - return self.impl.comparator_factory + return self.impl_instance.comparator_factory else: # reconcile the Comparator class on the impl with that - # of TypeDecorator - return type( - "TDComparator", - (TypeDecorator.Comparator, self.impl.comparator_factory), # type: ignore # noqa: E501 - {}, - ) + # of TypeDecorator. + # the use of multiple staticmethods is to support repeated + # pickling of the Comparator itself + return TypeDecorator._create_td_comparator_type(self.impl_instance) def _copy_with_check(self) -> Self: tt = self.copy() diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 702f70edc78..88c3b3a2540 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -512,6 +512,11 @@ def test_as_generic_all_types_custom(self, type_): assert isinstance(gentype, TypeEngine) +class SomeTypeDecorator(TypeDecorator): + impl = String() + cache_ok = True + + class PickleTypesTest(fixtures.TestBase): @testing.combinations( ("Boo", Boolean()), @@ -530,6 +535,7 @@ class PickleTypesTest(fixtures.TestBase): ("Lar", LargeBinary()), ("Pic", PickleType()), ("Int", Interval()), + ("Dec", SomeTypeDecorator()), argnames="name,type_", id_="ar", ) @@ -543,10 +549,37 @@ def test_pickle_types(self, name, type_, use_adapt): meta = MetaData() Table("foo", meta, column_type) + expr = select(1).where(column_type == bindparam("q")) + for loads, dumps in picklers(): loads(dumps(column_type)) loads(dumps(meta)) + expr_str_one = str(expr) + ne = loads(dumps(expr)) + + eq_(str(ne), expr_str_one) + + re_pickle_it = loads(dumps(ne)) + eq_(str(re_pickle_it), expr_str_one) + + def test_pickle_td_comparator(self): + comparator = SomeTypeDecorator().comparator_factory(column("q")) + + expected_mro = ( + TypeDecorator.Comparator, + sqltypes.Concatenable.Comparator, + TypeEngine.Comparator, + ) + eq_(comparator.__class__.__mro__[1:4], expected_mro) + + for loads, dumps in picklers(): + unpickled = loads(dumps(comparator)) + eq_(unpickled.__class__.__mro__[1:4], expected_mro) + + reunpickled = loads(dumps(unpickled)) + eq_(reunpickled.__class__.__mro__[1:4], expected_mro) + @testing.combinations( ("Str", String()), ("Tex", Text()), From 2a817a1ff09f3d35720832ac218242580ded3b01 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 15 Oct 2024 15:19:44 -0400 Subject: [PATCH 363/544] changelog updates for 2.0.36 Change-Id: Iffaa6c5556d7b41c8a7537333b7ea58d83ce4771 (cherry picked from commit 4ec37835a66192271171fbba3b6b178641902a18) --- doc/build/changelog/unreleased_20/11317.rst | 7 ++++--- doc/build/changelog/unreleased_20/11912.rst | 4 ++-- doc/build/changelog/unreleased_20/11917.rst | 2 +- doc/build/changelog/unreleased_20/11923.rst | 6 ++++-- doc/build/changelog/unreleased_20/11961.rst | 4 ++-- doc/build/changelog/unreleased_20/11965.rst | 8 ++++---- doc/build/changelog/unreleased_20/11975.rst | 7 ++++--- doc/build/changelog/unreleased_20/11994.rst | 9 ++++++--- doc/build/changelog/unreleased_20/11995.rst | 4 ++-- 9 files changed, 29 insertions(+), 22 deletions(-) diff --git a/doc/build/changelog/unreleased_20/11317.rst b/doc/build/changelog/unreleased_20/11317.rst index e41a0733d2c..445012ea01d 100644 --- a/doc/build/changelog/unreleased_20/11317.rst +++ b/doc/build/changelog/unreleased_20/11317.rst @@ -2,6 +2,7 @@ :tags: bug, schema :tickets: 11317 - Fixed a bug that caused a syntax error when a function was specified - to server_default when creating a column in MySQL or MariaDB. - Pull request courtesy of huuya. + Fixed bug where SQL functions passed to + :paramref:`_schema.Column.server_default` would not be rendered with the + particular form of parenthesization now required by newer versions of MySQL + and MariaDB. Pull request courtesy of huuya. diff --git a/doc/build/changelog/unreleased_20/11912.rst b/doc/build/changelog/unreleased_20/11912.rst index c0814b6cba1..a6bc1ae55d3 100644 --- a/doc/build/changelog/unreleased_20/11912.rst +++ b/doc/build/changelog/unreleased_20/11912.rst @@ -3,5 +3,5 @@ :tickets: 11912 Fixed bug in ORM bulk update/delete where using RETURNING with bulk - update/delete in combination with populate existing would fail to - accommodate the populate_existing option. + update/delete in combination with ``populate_existing`` would fail to + accommodate the ``populate_existing`` option. diff --git a/doc/build/changelog/unreleased_20/11917.rst b/doc/build/changelog/unreleased_20/11917.rst index 951b191605f..91702f011d7 100644 --- a/doc/build/changelog/unreleased_20/11917.rst +++ b/doc/build/changelog/unreleased_20/11917.rst @@ -7,4 +7,4 @@ :paramref:`.mapped_column.server_onupdate`, or :class:`.Computed` are now refreshed in ORM instances when running an ORM enabled UPDATE with WHERE criteria, even if the statement does not use RETURNING or - populate_existing. + ``populate_existing``. diff --git a/doc/build/changelog/unreleased_20/11923.rst b/doc/build/changelog/unreleased_20/11923.rst index 5b5fbceee31..fdd2d6d3c16 100644 --- a/doc/build/changelog/unreleased_20/11923.rst +++ b/doc/build/changelog/unreleased_20/11923.rst @@ -2,5 +2,7 @@ :tags: usecase, orm :tickets: 11923 - Added the dataclass field ``hash`` parameter to the orm field-like methods, - like :meth:`_orn.mapped_column`, :meth:`_orm.relationship`, etc. + Added new parameter :paramref:`_orm.mapped_column.hash` to ORM constructs + such as :meth:`_orm.mapped_column`, :meth:`_orm.relationship`, etc., + which is interpreted for ORM Native Dataclasses in the same way as other + dataclass-specific field parameters. diff --git a/doc/build/changelog/unreleased_20/11961.rst b/doc/build/changelog/unreleased_20/11961.rst index c6ffceb0364..8aa862d04f9 100644 --- a/doc/build/changelog/unreleased_20/11961.rst +++ b/doc/build/changelog/unreleased_20/11961.rst @@ -3,5 +3,5 @@ :tickets: 11961 Fixed bug in reflection of table comments where unrelated text would be - returned if an entry in the pg_description table happened to share the - same oid (objoid) as the table being reflected. \ No newline at end of file + returned if an entry in the ``pg_description`` table happened to share the + same oid (objoid) as the table being reflected. diff --git a/doc/build/changelog/unreleased_20/11965.rst b/doc/build/changelog/unreleased_20/11965.rst index 1f9294c0d90..f8e4ce0ad13 100644 --- a/doc/build/changelog/unreleased_20/11965.rst +++ b/doc/build/changelog/unreleased_20/11965.rst @@ -2,8 +2,8 @@ :tags: bug, orm :tickets: 11965 - Fixed regression caused by fixes to joined eager loading in - :ticket:`11449`, where a particular joinedload case could not be asserted - correctly. We now have an example of that case so the assertion has been - repaired to allow for it. + Fixed regression caused by fixes to joined eager loading in :ticket:`11449` + released in 2.0.31, where a particular joinedload case could not be + asserted correctly. We now have an example of that case so the assertion + has been repaired to allow for it. diff --git a/doc/build/changelog/unreleased_20/11975.rst b/doc/build/changelog/unreleased_20/11975.rst index 708a23aa0b3..69e4bdc6b87 100644 --- a/doc/build/changelog/unreleased_20/11975.rst +++ b/doc/build/changelog/unreleased_20/11975.rst @@ -2,6 +2,7 @@ :tags: mysql, performance :tickets: 11975 - Improved foreign keys reflection logic in MySQL 8+ to use a better - optimized query. The previous query could be quite slow in databases - with a large number of columns. + Improved a query used for the MySQL 8 backend when reflecting foreign keys + to be better optimized. Previously, for a database that had millions of + columns across all tables, the query could be prohibitively slow; the query + has been reworked to take better advantage of existing indexes. diff --git a/doc/build/changelog/unreleased_20/11994.rst b/doc/build/changelog/unreleased_20/11994.rst index efcb8e97b66..ca599148c56 100644 --- a/doc/build/changelog/unreleased_20/11994.rst +++ b/doc/build/changelog/unreleased_20/11994.rst @@ -1,6 +1,9 @@ .. change:: - :tags: postgresql, usecase + :tags: postgresql, bug :tickets: 11994 - Render bind cast for ``JSON`` and ``JSONB`` datatype on every dialect. - Previously this was only enabled in a subset of dialects. + The :class:`.postgresql.JSON` and :class:`.postgresql.JSONB` datatypes will + now render a "bind cast" in all cases for all PostgreSQL backends, + including psycopg2, whereas previously it was only enabled for some + backends. This allows greater accuracy in allowing the database server to + recognize when a string value is to be interpreted as JSON. diff --git a/doc/build/changelog/unreleased_20/11995.rst b/doc/build/changelog/unreleased_20/11995.rst index a748a1c5dfa..4387b9cf7ea 100644 --- a/doc/build/changelog/unreleased_20/11995.rst +++ b/doc/build/changelog/unreleased_20/11995.rst @@ -6,7 +6,7 @@ loading by primary key and the primary key is NULL, skip loading" to take into account the current setting for the :paramref:`.orm.Mapper.allow_partial_pks` parameter. If this parameter is - False, then a composite PK value that has partial NULL elements should also - be skipped. This can apply to some composite overlapping foreign key + ``False``, then a composite PK value that has partial NULL elements should + also be skipped. This can apply to some composite overlapping foreign key configurations. From 66e31814644bad95c8cda853f03088f59be0053d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 15 Oct 2024 15:28:23 -0400 Subject: [PATCH 364/544] - 2.0.36 --- doc/build/changelog/changelog_20.rst | 124 +++++++++++++++++++- doc/build/changelog/unreleased_20/11317.rst | 8 -- doc/build/changelog/unreleased_20/11912.rst | 7 -- doc/build/changelog/unreleased_20/11917.rst | 10 -- doc/build/changelog/unreleased_20/11923.rst | 8 -- doc/build/changelog/unreleased_20/11961.rst | 7 -- doc/build/changelog/unreleased_20/11965.rst | 9 -- doc/build/changelog/unreleased_20/11973.rst | 7 -- doc/build/changelog/unreleased_20/11975.rst | 8 -- doc/build/changelog/unreleased_20/11978.rst | 7 -- doc/build/changelog/unreleased_20/11994.rst | 9 -- doc/build/changelog/unreleased_20/11995.rst | 12 -- doc/build/changelog/unreleased_20/11997.rst | 9 -- doc/build/changelog/unreleased_20/12002.rst | 8 -- doc/build/conf.py | 4 +- 15 files changed, 125 insertions(+), 112 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11317.rst delete mode 100644 doc/build/changelog/unreleased_20/11912.rst delete mode 100644 doc/build/changelog/unreleased_20/11917.rst delete mode 100644 doc/build/changelog/unreleased_20/11923.rst delete mode 100644 doc/build/changelog/unreleased_20/11961.rst delete mode 100644 doc/build/changelog/unreleased_20/11965.rst delete mode 100644 doc/build/changelog/unreleased_20/11973.rst delete mode 100644 doc/build/changelog/unreleased_20/11975.rst delete mode 100644 doc/build/changelog/unreleased_20/11978.rst delete mode 100644 doc/build/changelog/unreleased_20/11994.rst delete mode 100644 doc/build/changelog/unreleased_20/11995.rst delete mode 100644 doc/build/changelog/unreleased_20/11997.rst delete mode 100644 doc/build/changelog/unreleased_20/12002.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 0270cee9998..980cd4d61de 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,129 @@ .. changelog:: :version: 2.0.36 - :include_notes_from: unreleased_20 + :released: October 15, 2024 + + .. change:: + :tags: bug, schema + :tickets: 11317 + + Fixed bug where SQL functions passed to + :paramref:`_schema.Column.server_default` would not be rendered with the + particular form of parenthesization now required by newer versions of MySQL + and MariaDB. Pull request courtesy of huuya. + + .. change:: + :tags: bug, orm + :tickets: 11912 + + Fixed bug in ORM bulk update/delete where using RETURNING with bulk + update/delete in combination with ``populate_existing`` would fail to + accommodate the ``populate_existing`` option. + + .. change:: + :tags: bug, orm + :tickets: 11917 + + Continuing from :ticket:`11912`, columns marked with + :paramref:`.mapped_column.onupdate`, + :paramref:`.mapped_column.server_onupdate`, or :class:`.Computed` are now + refreshed in ORM instances when running an ORM enabled UPDATE with WHERE + criteria, even if the statement does not use RETURNING or + ``populate_existing``. + + .. change:: + :tags: usecase, orm + :tickets: 11923 + + Added new parameter :paramref:`_orm.mapped_column.hash` to ORM constructs + such as :meth:`_orm.mapped_column`, :meth:`_orm.relationship`, etc., + which is interpreted for ORM Native Dataclasses in the same way as other + dataclass-specific field parameters. + + .. change:: + :tags: bug, postgresql, reflection + :tickets: 11961 + + Fixed bug in reflection of table comments where unrelated text would be + returned if an entry in the ``pg_description`` table happened to share the + same oid (objoid) as the table being reflected. + + .. change:: + :tags: bug, orm + :tickets: 11965 + + Fixed regression caused by fixes to joined eager loading in :ticket:`11449` + released in 2.0.31, where a particular joinedload case could not be + asserted correctly. We now have an example of that case so the assertion + has been repaired to allow for it. + + + .. change:: + :tags: orm, bug + :tickets: 11973 + + Improved the error message emitted when trying to map as dataclass a class + while also manually providing the ``__table__`` attribute. + This usage is currently not supported. + + .. change:: + :tags: mysql, performance + :tickets: 11975 + + Improved a query used for the MySQL 8 backend when reflecting foreign keys + to be better optimized. Previously, for a database that had millions of + columns across all tables, the query could be prohibitively slow; the query + has been reworked to take better advantage of existing indexes. + + .. change:: + :tags: usecase, sql + :tickets: 11978 + + Datatypes that are binary based such as :class:`.VARBINARY` will resolve to + :class:`.LargeBinary` when the :meth:`.TypeEngine.as_generic()` method is + called. + + .. change:: + :tags: postgresql, bug + :tickets: 11994 + + The :class:`.postgresql.JSON` and :class:`.postgresql.JSONB` datatypes will + now render a "bind cast" in all cases for all PostgreSQL backends, + including psycopg2, whereas previously it was only enabled for some + backends. This allows greater accuracy in allowing the database server to + recognize when a string value is to be interpreted as JSON. + + .. change:: + :tags: bug, orm + :tickets: 11995 + + Refined the check which the ORM lazy loader uses to detect "this would be + loading by primary key and the primary key is NULL, skip loading" to take + into account the current setting for the + :paramref:`.orm.Mapper.allow_partial_pks` parameter. If this parameter is + ``False``, then a composite PK value that has partial NULL elements should + also be skipped. This can apply to some composite overlapping foreign key + configurations. + + + .. change:: + :tags: bug, orm + :tickets: 11997 + + Fixed bug in ORM "update with WHERE clause" feature where an explicit + ``.returning()`` would interfere with the "fetch" synchronize strategy due + to an assumption that the ORM mapped class featured the primary key columns + in a specific position within the RETURNING. This has been fixed to use + appropriate ORM column targeting. + + .. change:: + :tags: bug, sql, regression + :tickets: 12002 + + Fixed regression from 1.4 where some datatypes such as those derived from + :class:`.TypeDecorator` could not be pickled when they were part of a + larger SQL expression composition due to internal supporting structures + themselves not being pickleable. .. changelog:: :version: 2.0.35 diff --git a/doc/build/changelog/unreleased_20/11317.rst b/doc/build/changelog/unreleased_20/11317.rst deleted file mode 100644 index 445012ea01d..00000000000 --- a/doc/build/changelog/unreleased_20/11317.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, schema - :tickets: 11317 - - Fixed bug where SQL functions passed to - :paramref:`_schema.Column.server_default` would not be rendered with the - particular form of parenthesization now required by newer versions of MySQL - and MariaDB. Pull request courtesy of huuya. diff --git a/doc/build/changelog/unreleased_20/11912.rst b/doc/build/changelog/unreleased_20/11912.rst deleted file mode 100644 index a6bc1ae55d3..00000000000 --- a/doc/build/changelog/unreleased_20/11912.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11912 - - Fixed bug in ORM bulk update/delete where using RETURNING with bulk - update/delete in combination with ``populate_existing`` would fail to - accommodate the ``populate_existing`` option. diff --git a/doc/build/changelog/unreleased_20/11917.rst b/doc/build/changelog/unreleased_20/11917.rst deleted file mode 100644 index 91702f011d7..00000000000 --- a/doc/build/changelog/unreleased_20/11917.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11917 - - Continuing from :ticket:`11912`, columns marked with - :paramref:`.mapped_column.onupdate`, - :paramref:`.mapped_column.server_onupdate`, or :class:`.Computed` are now - refreshed in ORM instances when running an ORM enabled UPDATE with WHERE - criteria, even if the statement does not use RETURNING or - ``populate_existing``. diff --git a/doc/build/changelog/unreleased_20/11923.rst b/doc/build/changelog/unreleased_20/11923.rst deleted file mode 100644 index fdd2d6d3c16..00000000000 --- a/doc/build/changelog/unreleased_20/11923.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: usecase, orm - :tickets: 11923 - - Added new parameter :paramref:`_orm.mapped_column.hash` to ORM constructs - such as :meth:`_orm.mapped_column`, :meth:`_orm.relationship`, etc., - which is interpreted for ORM Native Dataclasses in the same way as other - dataclass-specific field parameters. diff --git a/doc/build/changelog/unreleased_20/11961.rst b/doc/build/changelog/unreleased_20/11961.rst deleted file mode 100644 index 8aa862d04f9..00000000000 --- a/doc/build/changelog/unreleased_20/11961.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, postgresql, reflection - :tickets: 11961 - - Fixed bug in reflection of table comments where unrelated text would be - returned if an entry in the ``pg_description`` table happened to share the - same oid (objoid) as the table being reflected. diff --git a/doc/build/changelog/unreleased_20/11965.rst b/doc/build/changelog/unreleased_20/11965.rst deleted file mode 100644 index f8e4ce0ad13..00000000000 --- a/doc/build/changelog/unreleased_20/11965.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11965 - - Fixed regression caused by fixes to joined eager loading in :ticket:`11449` - released in 2.0.31, where a particular joinedload case could not be - asserted correctly. We now have an example of that case so the assertion - has been repaired to allow for it. - diff --git a/doc/build/changelog/unreleased_20/11973.rst b/doc/build/changelog/unreleased_20/11973.rst deleted file mode 100644 index bad0f220885..00000000000 --- a/doc/build/changelog/unreleased_20/11973.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: orm, bug - :tickets: 11973 - - Improved the error message emitted when trying to map as dataclass a class - while also manually providing the ``__table__`` attribute. - This usage is currently not supported. diff --git a/doc/build/changelog/unreleased_20/11975.rst b/doc/build/changelog/unreleased_20/11975.rst deleted file mode 100644 index 69e4bdc6b87..00000000000 --- a/doc/build/changelog/unreleased_20/11975.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: mysql, performance - :tickets: 11975 - - Improved a query used for the MySQL 8 backend when reflecting foreign keys - to be better optimized. Previously, for a database that had millions of - columns across all tables, the query could be prohibitively slow; the query - has been reworked to take better advantage of existing indexes. diff --git a/doc/build/changelog/unreleased_20/11978.rst b/doc/build/changelog/unreleased_20/11978.rst deleted file mode 100644 index a8a9cdaf579..00000000000 --- a/doc/build/changelog/unreleased_20/11978.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: usecase, sql - :tickets: 11978 - - Datatypes that are binary based such as :class:`.VARBINARY` will resolve to - :class:`.LargeBinary` when the :meth:`.TypeEngine.as_generic()` method is - called. diff --git a/doc/build/changelog/unreleased_20/11994.rst b/doc/build/changelog/unreleased_20/11994.rst deleted file mode 100644 index ca599148c56..00000000000 --- a/doc/build/changelog/unreleased_20/11994.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: postgresql, bug - :tickets: 11994 - - The :class:`.postgresql.JSON` and :class:`.postgresql.JSONB` datatypes will - now render a "bind cast" in all cases for all PostgreSQL backends, - including psycopg2, whereas previously it was only enabled for some - backends. This allows greater accuracy in allowing the database server to - recognize when a string value is to be interpreted as JSON. diff --git a/doc/build/changelog/unreleased_20/11995.rst b/doc/build/changelog/unreleased_20/11995.rst deleted file mode 100644 index 4387b9cf7ea..00000000000 --- a/doc/build/changelog/unreleased_20/11995.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11995 - - Refined the check which the ORM lazy loader uses to detect "this would be - loading by primary key and the primary key is NULL, skip loading" to take - into account the current setting for the - :paramref:`.orm.Mapper.allow_partial_pks` parameter. If this parameter is - ``False``, then a composite PK value that has partial NULL elements should - also be skipped. This can apply to some composite overlapping foreign key - configurations. - diff --git a/doc/build/changelog/unreleased_20/11997.rst b/doc/build/changelog/unreleased_20/11997.rst deleted file mode 100644 index b2390977e16..00000000000 --- a/doc/build/changelog/unreleased_20/11997.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11997 - - Fixed bug in ORM "update with WHERE clause" feature where an explicit - ``.returning()`` would interfere with the "fetch" synchronize strategy due - to an assumption that the ORM mapped class featured the primary key columns - in a specific position within the RETURNING. This has been fixed to use - appropriate ORM column targeting. diff --git a/doc/build/changelog/unreleased_20/12002.rst b/doc/build/changelog/unreleased_20/12002.rst deleted file mode 100644 index 49ac7017592..00000000000 --- a/doc/build/changelog/unreleased_20/12002.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, sql, regression - :tickets: 12002 - - Fixed regression from 1.4 where some datatypes such as those derived from - :class:`.TypeDecorator` could not be pickled when they were part of a - larger SQL expression composition due to internal supporting structures - themselves not being pickleable. diff --git a/doc/build/conf.py b/doc/build/conf.py index 6e8a9ee9432..ea0585dc49e 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.35" +release = "2.0.36" -release_date = "September 16, 2024" +release_date = "October 15, 2024" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 01490a0a7004a60b2ed644c3a7f39a9f14d684fd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 15 Oct 2024 15:42:11 -0400 Subject: [PATCH 365/544] Version 2.0.37 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 980cd4d61de..15db3ab9a6a 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.37 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.36 :released: October 15, 2024 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index f94278fe927..6360f47a5b9 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.36" +__version__ = "2.0.37" def __go(lcls: Any) -> None: From 978d3a4c066aefa9f2d93ecf935e52887bc28685 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 21 Oct 2024 13:10:54 -0400 Subject: [PATCH 366/544] pin older pip for tox under py37 this is suddenly failing due to a non-py37 syntax in pip/_vendor/typing_extensions.py. Not clear why the failure has just started however as there is no new version of pip recently. Change-Id: I783e2341424a1dad206a7ab668900475fd8d30c5 --- tox.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tox.ini b/tox.ini index 26254801d72..d872bbc8fe8 100644 --- a/tox.ini +++ b/tox.ini @@ -98,6 +98,9 @@ setenv= PYTHONNOUSERSITE=1 PYTEST_EXCLUDES=-m "not memory_intensive and not mypy" + # ensure older pip is installed for EOL python versions + py37: VIRTUALENV_PIP=24.0 + PYTEST_COLOR={tty:--color=yes} MYPY_COLOR={tty:--color-output} From f5225046b69262f1e5f28da84986e9d51dc7b609 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 21 Oct 2024 10:03:01 -0400 Subject: [PATCH 367/544] refine in_() check to use proper duck-typing for __clause_element__ Fixed regression caused by an internal code change in response to recent Mypy releases that caused the very unusual case of a list of ORM-mapped attribute expressions passed to :meth:`.ColumnOperators.in_` to no longer be accepted. in this commit we had to revisit d8dd28c42e where mypy typing didn't accept ColumnOperartors. the type here is the _HasClauseElement[_T] protocol which means we need to use a duck type for a runtime check. Fixes: #12019 Change-Id: Ib378e9cb8defb49d5ac4d726ec93d6bdc581b6a9 (cherry picked from commit aaddd7c8403e9ca2f77113467b5e2ae279a542c4) --- doc/build/changelog/unreleased_20/12019.rst | 8 ++++++++ lib/sqlalchemy/sql/coercions.py | 6 +++--- test/orm/test_query.py | 9 +++++++++ test/sql/test_operators.py | 17 +++++++++++++++++ 4 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12019.rst diff --git a/doc/build/changelog/unreleased_20/12019.rst b/doc/build/changelog/unreleased_20/12019.rst new file mode 100644 index 00000000000..3c7c1f4d01b --- /dev/null +++ b/doc/build/changelog/unreleased_20/12019.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: orm, bug + :tickets: 12019 + + Fixed regression caused by an internal code change in response to recent + Mypy releases that caused the very unusual case of a list of ORM-mapped + attribute expressions passed to :meth:`.ColumnOperators.in_` to no longer + be accepted. diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 1d11cbbd3d2..136fc486463 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -846,15 +846,15 @@ def _warn_for_implicit_coercion(self, elem): def _literal_coercion(self, element, *, expr, operator, **kw): if util.is_non_string_iterable(element): non_literal_expressions: Dict[ - Optional[ColumnElement[Any]], - ColumnElement[Any], + Optional[_ColumnExpressionArgument[Any]], + _ColumnExpressionArgument[Any], ] = {} element = list(element) for o in element: if not _is_literal(o): if not isinstance( o, util.preloaded.sql_elements.ColumnElement - ): + ) and not hasattr(o, "__clause_element__"): self._raise_for_expected(element, **kw) else: diff --git a/test/orm/test_query.py b/test/orm/test_query.py index 9dc26bc1e27..0e30f58ca16 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -1976,6 +1976,15 @@ def test_in_on_relationship_not_supported(self): assert_raises(NotImplementedError, Address.user.in_, [User(id=5)]) + def test_in_instrumented_attribute(self): + """test #12019""" + User = self.classes.User + + self._test( + User.id.in_([User.id, User.name]), + "users.id IN (users.id, users.name)", + ) + def test_neg(self): User = self.classes.User diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index 9c87b355776..8afe091925a 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -83,6 +83,14 @@ def operate(self, op, *other, **kwargs): return op +class ColExpressionDuckTypeOnly: + def __init__(self, expr): + self.expr = expr + + def __clause_element__(self): + return self.expr + + class DefaultColumnComparatorTest( testing.AssertsCompiledSQL, fixtures.TestBase ): @@ -2198,6 +2206,15 @@ def test_in_14(self): "mytable.myid IN (mytable.myid)", ) + def test_in_14_5(self): + """test #12019""" + self.assert_compile( + self.table1.c.myid.in_( + [ColExpressionDuckTypeOnly(self.table1.c.myid)] + ), + "mytable.myid IN (mytable.myid)", + ) + def test_in_15(self): self.assert_compile( self.table1.c.myid.in_(["a", self.table1.c.myid]), From 003a6821e2e1c939c90b5ae8c121ee9ae3ad4638 Mon Sep 17 00:00:00 2001 From: JaySon Date: Thu, 24 Oct 2024 00:48:59 +0800 Subject: [PATCH 368/544] Add TiDB dialect (#12025) * Add sqlalchemy-tidb to index.rst * Update index.rst (cherry picked from commit a9a85f8b2167d7a4313ea49b76e1b2a01a346d7f) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index eff7d91de80..59b2c13b0d6 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -130,6 +130,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Teradata Vantage | teradatasqlalchemy_ | +------------------------------------------------+---------------------------------------+ +| TiDB | sqlalchemy-tidb_ | ++------------------------------------------------+---------------------------------------+ | YugabyteDB | sqlalchemy-yugabytedb_ | +------------------------------------------------+---------------------------------------+ @@ -170,3 +172,4 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _databricks: https://docs.databricks.com/en/dev-tools/sqlalchemy.html .. _clickhouse-sqlalchemy: https://pypi.org/project/clickhouse-sqlalchemy/ .. _sqlalchemy-kinetica: https://github.com/kineticadb/sqlalchemy-kinetica/ +.. _sqlalchemy-tidb: https://github.com/pingcap/sqlalchemy-tidb From 6b282bb6a001fd56781f22d0e83de99720780bbd Mon Sep 17 00:00:00 2001 From: Gord Thompson Date: Wed, 30 Oct 2024 11:30:40 -0600 Subject: [PATCH 369/544] Update bigquery dialect link (#12048) (cherry picked from commit 58822b9e2412dfefdced95164943fdb515e2f52c) --- doc/build/dialects/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 59b2c13b0d6..436e535245d 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -94,7 +94,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Firebolt | firebolt-sqlalchemy_ | +------------------------------------------------+---------------------------------------+ -| Google BigQuery | pybigquery_ | +| Google BigQuery | sqlalchemy-bigquery_ | +------------------------------------------------+---------------------------------------+ | Google Sheets | gsheets_ | +------------------------------------------------+---------------------------------------+ @@ -144,7 +144,7 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _ibm-db-sa: https://pypi.org/project/ibm-db-sa/ .. _PyHive: https://github.com/dropbox/PyHive#sqlalchemy .. _teradatasqlalchemy: https://pypi.org/project/teradatasqlalchemy/ -.. _pybigquery: https://github.com/mxmzdlv/pybigquery/ +.. _sqlalchemy-bigquery: https://pypi.org/project/sqlalchemy-bigquery/ .. _sqlalchemy-redshift: https://pypi.org/project/sqlalchemy-redshift .. _sqlalchemy-drill: https://github.com/JohnOmernik/sqlalchemy-drill .. _sqlalchemy-hana: https://github.com/SAP/sqlalchemy-hana From a0b19dbbcbd6da730433556e738edb14fddb4bec Mon Sep 17 00:00:00 2001 From: Dennis Magnusson Date: Wed, 30 Oct 2024 19:33:40 +0200 Subject: [PATCH 370/544] Update declarative_styles.rst: add missing import from typing in the example (#12034) * Update declarative_styles.rst: add missing import * Update declarative_styles.rst: make import statement style consistent (cherry picked from commit f418ac8d38a26936aa9a8ed9befe210c88b97f33) --- doc/build/orm/declarative_styles.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/build/orm/declarative_styles.rst b/doc/build/orm/declarative_styles.rst index 48897ee6d6d..8feb5398b10 100644 --- a/doc/build/orm/declarative_styles.rst +++ b/doc/build/orm/declarative_styles.rst @@ -51,6 +51,7 @@ With the declarative base class, new mapped classes are declared as subclasses of the base:: from datetime import datetime + from typing import List from typing import Optional from sqlalchemy import ForeignKey From 7fa90bf798f02dbb77f45a788b829b6009dfd53d Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 30 Oct 2024 21:17:45 +0100 Subject: [PATCH 371/544] Improve mutable typing. References: #12046 Change-Id: If950f7e2090a0f637c2c28cf21a40dc345acc89a (cherry picked from commit 732698427e822b6ba81f1214864685903cdc13a4) --- lib/sqlalchemy/ext/mutable.py | 3 ++- lib/sqlalchemy/sql/elements.py | 4 +++- lib/sqlalchemy/sql/type_api.py | 5 ++--- test/typing/plain_files/ext/misc_ext.py | 17 +++++++++++++++++ 4 files changed, 24 insertions(+), 5 deletions(-) create mode 100644 test/typing/plain_files/ext/misc_ext.py diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index 7da5075a177..e005e4d63f1 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -391,6 +391,7 @@ def __setstate__(self, state): from ..orm.decl_api import DeclarativeAttributeIntercept from ..orm.state import InstanceState from ..orm.unitofwork import UOWTransaction +from ..sql._typing import _TypeEngineArgument from ..sql.base import SchemaEventTarget from ..sql.schema import Column from ..sql.type_api import TypeEngine @@ -638,7 +639,7 @@ def listen_for_type(mapper: Mapper[_O], class_: type) -> None: event.listen(Mapper, "mapper_configured", listen_for_type) @classmethod - def as_mutable(cls, sqltype: TypeEngine[_T]) -> TypeEngine[_T]: + def as_mutable(cls, sqltype: _TypeEngineArgument[_T]) -> TypeEngine[_T]: """Associate a SQL type with this mutable Python type. This establishes listeners that will detect ORM mappings against diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 45c1674d9fb..241aa4231e9 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2431,7 +2431,9 @@ def bindparams( @util.preload_module("sqlalchemy.sql.selectable") def columns( - self, *cols: _ColumnExpressionArgument[Any], **types: TypeEngine[Any] + self, + *cols: _ColumnExpressionArgument[Any], + **types: _TypeEngineArgument[Any], ) -> TextualSelect: r"""Turn this :class:`_expression.TextClause` object into a :class:`_expression.TextualSelect` diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 2f6494e61cb..633763aaa51 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -2315,11 +2315,10 @@ def to_instance( def adapt_type( - typeobj: TypeEngine[Any], + typeobj: _TypeEngineArgument[Any], colspecs: Mapping[Type[Any], Type[TypeEngine[Any]]], ) -> TypeEngine[Any]: - if isinstance(typeobj, type): - typeobj = typeobj() + typeobj = to_instance(typeobj) for t in typeobj.__class__.__mro__[0:-1]: try: impltype = colspecs[t] diff --git a/test/typing/plain_files/ext/misc_ext.py b/test/typing/plain_files/ext/misc_ext.py new file mode 100644 index 00000000000..c44d09bb3e6 --- /dev/null +++ b/test/typing/plain_files/ext/misc_ext.py @@ -0,0 +1,17 @@ +from typing import Any + +from sqlalchemy import JSON +from sqlalchemy import Select +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.ext.mutable import MutableDict +from sqlalchemy.ext.mutable import MutableList +from sqlalchemy.sql.compiler import SQLCompiler + + +@compiles(Select[Any], "my_cool_driver") +def go(sel: Select[Any], compiler: SQLCompiler, **kw: Any) -> str: + return "select 42" + + +MutableList.as_mutable(JSON) +MutableDict.as_mutable(JSON()) From 60927fc03eeedf78c81a515f656e888531738e0d Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 31 Oct 2024 22:14:57 +0100 Subject: [PATCH 372/544] Mention no_autoflush in warning References: #12049 Change-Id: I057140b2fe2f5fc60d5d27a79ddf19a6196fff7b (cherry picked from commit b4fdfd489755848f4945c31f0686ad9eaf128b08) --- lib/sqlalchemy/orm/session.py | 3 ++- test/orm/test_utils.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index eb81f16e01e..f18299b3eaf 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -3041,7 +3041,8 @@ def no_autoflush(self) -> Iterator[Session]: @util.langhelpers.tag_method_for_warnings( "This warning originated from the Session 'autoflush' process, " "which was invoked automatically in response to a user-initiated " - "operation.", + "operation. Consider using ``no_autoflush`` context manager if this " + "warning happended while initializing objects.", sa_exc.SAWarning, ) def _autoflush(self) -> None: diff --git a/test/orm/test_utils.py b/test/orm/test_utils.py index 4d6c148639f..35a5060da27 100644 --- a/test/orm/test_utils.py +++ b/test/orm/test_utils.py @@ -140,7 +140,9 @@ def emit_a_warning(mapper, connection, state): + re.escape( "(This warning originated from the Session 'autoflush' " "process, which was invoked automatically in response to a " - "user-initiated operation.)" + "user-initiated operation. Consider using ``no_autoflush`` " + "context manager if this warning happended while " + "initializing objects.)" ), ): sess.execute(select(Foo)) From 243a5c12b87e73b39220ff27f106e16e67cd7140 Mon Sep 17 00:00:00 2001 From: Miguel Grillo Date: Thu, 24 Oct 2024 14:32:33 -0400 Subject: [PATCH 373/544] Fixed: 12012: Add Support for `TABLESPACE` Specification in Table Definitions for Oracle Fixes: #12016 **Description** This PR adds support for specifying the `TABLESPACE` in table definitions in SQLAlchemy, specifically for Oracle. This feature is particularly useful for Oracle users who need to specify the tablespace where the table data will be stored. **Changes Made** 1. Updated `construct_arguments` in `OracleDialect`: - The `construct_arguments` list in the `OracleDialect` class has been updated to include the `tablespace` argument for the `Table` class. ```Python construct_arguments = [ ( sa_schema.Table, # old {"resolve_synonyms": False, "on_commit": None, "compress": False}, # new {"resolve_synonyms": False, "on_commit": None, "compress": False, "tablespace": None}, ), (sa_schema.Index, {"bitmap": False, "compress": False}), ] ``` **Path**: `lib/sqlalchemy/dialects/oracle/base.py` 2. Modified OracleDDLCompiler to Include TABLESPACE in post_create_table: - The OracleDDLCompiler class has been modified to include the TABLESPACE clause at the end of the CREATE TABLE statement if the tablespace option is provided. ```Python if opts["tablespace"]: tablespace_name = opts["tablespace"] table_opts.append( "\n TABLESPACE %s" % self.preparer.quote(tablespace_name) ) ``` **Path**: `lib/sqlalchemy/dialects/oracle/base.py` 3. Added tablespace Argument to the Table Class: - A new tablespace argument has been added to the Table class to allow specifying the tablespace in the table definition. 4. Documentation Update: - The documentation has been updated to reflect the new feature and provide usage examples. **Usage Example** ```Python from sqlalchemy import create_engine, Table, Column, Integer, String, MetaData engine = create_engine('oracle+cx_oracle://user:password@dsn') metadata = MetaData() users = Table('users', metadata, Column('id', Integer, primary_key=True), Column('name', String), Column('email', String, unique=True), oracle_tablespace='my_tablespace' # New tablespace argument optional ) metadata.create_all(engine) ``` ### Description ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [x] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #12013 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12013 Pull-request-sha: e542dea6ced269cb188b06cbd04cecf1c400e29a Change-Id: I4733b466f9486289e13dd7503d18b3b5c866e836 (cherry picked from commit 44fa4a55bad2bc1bd20047275c366385ba3d4b1f) --- doc/build/changelog/unreleased_20/12016.rst | 7 +++++ lib/sqlalchemy/dialects/oracle/base.py | 33 +++++++++++++++++---- test/dialect/oracle/test_compiler.py | 20 +++++++++++++ test/dialect/oracle/test_dialect.py | 3 +- test/dialect/oracle/test_reflection.py | 18 +++++++++-- 5 files changed, 72 insertions(+), 9 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12016.rst diff --git a/doc/build/changelog/unreleased_20/12016.rst b/doc/build/changelog/unreleased_20/12016.rst new file mode 100644 index 00000000000..5fa68d03723 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12016.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: feature, oracle + :ticket: 12016 + + Added new table option `oracle_tablespace` to specify the `TABLESPACE` option + when creating a table in Oracle. This allows users to define the tablespace in + which the table should be created. Pull request courtesy of Miguel Grillo. diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index abf3645c768..be0718450bf 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -488,6 +488,17 @@ The ``oracle_compress`` parameter accepts either an integer compression level, or ``True`` to use the default compression level. +* ``TABLESPACE``:: + + Table('mytable', metadata, ..., + oracle_tablespace="EXAMPLE_TABLESPACE") + + The ``oracle_tablespace`` parameter specifies the tablespace in which the + table is to be created. This is useful when you want to create a table in a + tablespace other than the default tablespace of the user. + + .. versionadded:: 2.0.37 + .. _oracle_index_options: Oracle Specific Index Options @@ -1333,7 +1344,10 @@ def post_create_table(self, table): table_opts.append("\n COMPRESS") else: table_opts.append("\n COMPRESS FOR %s" % (opts["compress"])) - + if opts["tablespace"]: + table_opts.append( + "\n TABLESPACE %s" % self.preparer.quote(opts["tablespace"]) + ) return "".join(table_opts) def get_identity_options(self, identity_options): @@ -1457,7 +1471,12 @@ class OracleDialect(default.DefaultDialect): construct_arguments = [ ( sa_schema.Table, - {"resolve_synonyms": False, "on_commit": None, "compress": False}, + { + "resolve_synonyms": False, + "on_commit": None, + "compress": False, + "tablespace": None, + }, ), (sa_schema.Index, {"bitmap": False, "compress": False}), ] @@ -2069,6 +2088,7 @@ def _table_options_query( if self._supports_table_compress_for else sql.null().label("compress_for") ), + dictionary.all_tables.c.tablespace_name, ).where(dictionary.all_tables.c.owner == owner) if has_filter_names: query = query.where( @@ -2160,11 +2180,12 @@ def get_multi_table_options( connection, query, dblink, returns_long=False, params=params ) - for table, compression, compress_for in result: + for table, compression, compress_for, tablespace in result: + data = default() if compression == "ENABLED": - data = {"oracle_compress": compress_for} - else: - data = default() + data["oracle_compress"] = compress_for + if tablespace: + data["oracle_tablespace"] = tablespace options[(schema, self.normalize_name(table))] = data if ObjectKind.VIEW in kind and ObjectScope.DEFAULT in scope: # add the views (no temporary views) diff --git a/test/dialect/oracle/test_compiler.py b/test/dialect/oracle/test_compiler.py index 2165aa0909d..560298800e7 100644 --- a/test/dialect/oracle/test_compiler.py +++ b/test/dialect/oracle/test_compiler.py @@ -1627,6 +1627,26 @@ def test_double_to_oracle_double(self): cast(column("foo"), d1), "CAST(foo AS DOUBLE PRECISION)" ) + @testing.combinations( + ("TEST_TABLESPACE", 'TABLESPACE "TEST_TABLESPACE"'), + ("test_tablespace", "TABLESPACE test_tablespace"), + ("TestTableSpace", 'TABLESPACE "TestTableSpace"'), + argnames="tablespace, expected_sql", + ) + def test_table_tablespace(self, tablespace, expected_sql): + m = MetaData() + + t = Table( + "table1", + m, + Column("x", Integer), + oracle_tablespace=tablespace, + ) + self.assert_compile( + schema.CreateTable(t), + f"CREATE TABLE table1 (x INTEGER) {expected_sql}", + ) + class SequenceTest(fixtures.TestBase, AssertsCompiledSQL): def test_basic(self): diff --git a/test/dialect/oracle/test_dialect.py b/test/dialect/oracle/test_dialect.py index 0c4b894f89d..684f9d49458 100644 --- a/test/dialect/oracle/test_dialect.py +++ b/test/dialect/oracle/test_dialect.py @@ -879,6 +879,7 @@ def test_sequences_are_integers(self, connection): def test_limit_offset_for_update(self, metadata, connection): # oracle can't actually do the ROWNUM thing with FOR UPDATE # very well. + # Seems to be fixed in 23. t = Table( "t1", @@ -903,7 +904,7 @@ def test_limit_offset_for_update(self, metadata, connection): # as of #8221, this fails also. limit w/o order by is useless # in any case. stmt = t.select().with_for_update().limit(2) - if testing.against("oracle>=12"): + if testing.against("oracle>=12") and testing.against("oracle<23"): with expect_raises_message(exc.DatabaseError, "ORA-02014"): connection.execute(stmt).fetchall() else: diff --git a/test/dialect/oracle/test_reflection.py b/test/dialect/oracle/test_reflection.py index 00d83637201..a17b53895f1 100644 --- a/test/dialect/oracle/test_reflection.py +++ b/test/dialect/oracle/test_reflection.py @@ -684,6 +684,20 @@ def test_reflect_hidden_column(self): finally: conn.exec_driver_sql("DROP TABLE my_table") + def test_tablespace(self, connection, metadata): + tbl = Table( + "test_tablespace", + metadata, + Column("data", Integer), + oracle_tablespace="temp", + ) + metadata.create_all(connection) + + m2 = MetaData() + + tbl = Table("test_tablespace", m2, autoload_with=connection) + assert tbl.dialect_options["oracle"]["tablespace"] == "TEMP" + class ViewReflectionTest(fixtures.TestBase): __only_on__ = "oracle" @@ -1540,8 +1554,8 @@ def setup_test(self): (schema, "parent"): [], } self.options[schema] = { - (schema, "my_table"): {}, - (schema, "parent"): {}, + (schema, "my_table"): {"oracle_tablespace": "USERS"}, + (schema, "parent"): {"oracle_tablespace": "USERS"}, } def test_tables(self, connection): From a3d6a26a81ebe7629d3931857f5b0bf5c406cd38 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 26 Oct 2024 21:50:36 +0200 Subject: [PATCH 374/544] Improve Oracle identifier length detection Use the connection attribute ``max_identifier_length`` available in oracledb since version 2.5 when determining the identifier length in the Oracle dialect. Fixes: #12032 Change-Id: If16db93e0df25776295bc521706dbad1cc541f4a (cherry picked from commit 90bf575b81c5396b364908547551b6592a333bf7) --- doc/build/changelog/unreleased_20/12032.rst | 7 +++++++ lib/sqlalchemy/dialects/oracle/base.py | 15 +++++++++------ lib/sqlalchemy/dialects/oracle/oracledb.py | 14 ++++++++++++-- 3 files changed, 28 insertions(+), 8 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12032.rst diff --git a/doc/build/changelog/unreleased_20/12032.rst b/doc/build/changelog/unreleased_20/12032.rst new file mode 100644 index 00000000000..5a407329807 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12032.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: oracle, usecase + :tickets: 12032 + + Use the connection attribute ``max_identifier_length`` available + in oracledb since version 2.5 when determining the identifier length + in the Oracle dialect. diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index abf3645c768..30b575140f2 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -176,12 +176,15 @@ actual server version in order to assist with migration of Oracle databases, and may be configured within the Oracle server itself. This compatibility version is retrieved using the query ``SELECT value FROM v$parameter WHERE -name = 'compatible';``. The SQLAlchemy Oracle dialect, when tasked with -determining the default max identifier length, will attempt to use this query -upon first connect in order to determine the effective compatibility version of -the server, which determines what the maximum allowed identifier length is for -the server. If the table is not available, the server version information is -used instead. +name = 'compatible';``. +The SQLAlchemy Oracle dialect, when tasked with determining the default max +identifier length, will use the ``max_identifier_length`` attribute available +in the connection of the oracledb driver since version 2.5. When using an older +version or cx_oracle SQLAlchemy will instead attempted to use the query +mentioned above upon first connect in order to determine the effective +compatibility version of the server, which determines what the maximum allowed +identifier length is for the server. If the table is not available, the server +version information is used instead. As of SQLAlchemy 1.4, the default max identifier length for the Oracle dialect is 128 characters. Upon first connect, the compatibility version is detected diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index 0667ed768e8..bdeb535e689 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -221,6 +221,12 @@ def do_recover_twophase(self, connection): for fi, gti, bq in connection.connection.tpc_recover() ] + def _check_max_identifier_length(self, connection): + if self.oracledb_ver >= (2, 5): + return connection.connection.max_identifier_length + else: + super()._check_max_identifier_length(connection) + class AsyncAdapt_oracledb_cursor(AsyncAdapt_dbapi_cursor): _cursor: AsyncCursor @@ -251,7 +257,7 @@ def _aenter_cursor(self, cursor: AsyncCursor) -> AsyncCursor: self._adapt_connection._handle_exception(error) async def _execute_async(self, operation, parameters): - # override to not use mutex, oracledb already has mutex + # override to not use mutex, oracledb already has a mutex if parameters is None: result = await self._cursor.execute(operation) @@ -267,7 +273,7 @@ async def _executemany_async( operation, seq_of_parameters, ): - # override to not use mutex, oracledb already has mutex + # override to not use mutex, oracledb already has a mutex return await self._cursor.executemany(operation, seq_of_parameters) def __enter__(self): @@ -325,6 +331,10 @@ def stmtcachesize(self): def stmtcachesize(self, value): self._connection.stmtcachesize = value + @property + def max_identifier_length(self): + return self._connection.max_identifier_length + def cursor(self): return AsyncAdapt_oracledb_cursor(self) From 53f500ce274d30162e1246b455f18871db31d7b6 Mon Sep 17 00:00:00 2001 From: Yeongbae Jeon Date: Sun, 10 Nov 2024 01:49:20 +0900 Subject: [PATCH 375/544] Fix source comment/doc typos (#12072) minor spelling corrections in comments and doc (cherry picked from commit ef23611a6bf8358dd05e0fc13384f1eb1925e1ff) --- lib/sqlalchemy/engine/default.py | 2 +- lib/sqlalchemy/engine/interfaces.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index d42a3138bc1..9b769265fa0 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -254,7 +254,7 @@ class DefaultDialect(Dialect): default_schema_name: Optional[str] = None # indicates symbol names are - # UPPERCASEd if they are case insensitive + # UPPERCASED if they are case insensitive # within the database. # if this is True, the methods normalize_name() # and denormalize_name() must be provided. diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 17a133f27a9..6696a787064 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -1117,7 +1117,7 @@ def loaded_dbapi(self) -> ModuleType: supports_constraint_comments: bool """Indicates if the dialect supports comment DDL on constraints. - .. versionadded: 2.0 + .. versionadded:: 2.0 """ _has_events = False @@ -2498,7 +2498,7 @@ def get_isolation_level_values( ``REPEATABLE READ``. isolation level names will have underscores converted to spaces before being passed along to the dialect. * The names for the four standard isolation names to the extent that - they are supported by the backend should be ``READ UNCOMMITTED`` + they are supported by the backend should be ``READ UNCOMMITTED``, ``READ COMMITTED``, ``REPEATABLE READ``, ``SERIALIZABLE`` * if the dialect supports an autocommit option it should be provided using the isolation level name ``AUTOCOMMIT``. From 2174426599e68ebef4a1eab7f090de0674c25c82 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 12 Nov 2024 14:50:50 -0500 Subject: [PATCH 376/544] dont leak mutating bindparams list into AnalyzedFunction Fixed issue in "lambda SQL" feature where the tracking of bound parameters could be corrupted if the same lambda were evaluated across multiple compile phases, including when using the same lambda across multiple engine instances or with statement caching disabled. Fixes: #12084 Change-Id: I327aa93ce7feb2326a22113164bd834b96b6b889 (cherry picked from commit 5bbefc41b7b2695c95c9c93bcaabd8c4731e348e) --- doc/build/changelog/unreleased_20/12084.rst | 9 +++++ lib/sqlalchemy/sql/lambdas.py | 2 +- test/sql/test_lambdas.py | 41 +++++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12084.rst diff --git a/doc/build/changelog/unreleased_20/12084.rst b/doc/build/changelog/unreleased_20/12084.rst new file mode 100644 index 00000000000..0eef5c9a1cb --- /dev/null +++ b/doc/build/changelog/unreleased_20/12084.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, sql + :tickets: 12084 + + Fixed issue in "lambda SQL" feature where the tracking of bound parameters + could be corrupted if the same lambda were evaluated across multiple + compile phases, including when using the same lambda across multiple engine + instances or with statement caching disabled. + diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index 7a6b7b8f776..2657b2c243d 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -278,7 +278,7 @@ def _retrieve_tracker_rec(self, fn, apply_propagate_attrs, opts): rec = AnalyzedFunction( tracker, self, apply_propagate_attrs, fn ) - rec.closure_bindparams = bindparams + rec.closure_bindparams = list(bindparams) lambda_cache[key] = rec else: rec = lambda_cache[key] diff --git a/test/sql/test_lambdas.py b/test/sql/test_lambdas.py index 17991ea2e35..9eb20dd4e59 100644 --- a/test/sql/test_lambdas.py +++ b/test/sql/test_lambdas.py @@ -1889,6 +1889,47 @@ def upd(id_, newname): (7, "foo"), ) + def test_bindparam_not_cached(self, user_address_fixture, testing_engine): + """test #12084""" + + users, addresses = user_address_fixture + + engine = testing_engine( + share_pool=True, options={"query_cache_size": 0} + ) + with engine.begin() as conn: + conn.execute( + users.insert(), + [{"id": 7, "name": "bar"}, {"id": 8, "name": "foo"}], + ) + + def make_query(stmt, *criteria): + for crit in criteria: + stmt += lambda s: s.where(crit) + + return stmt + + for i in range(2): + with engine.connect() as conn: + stmt = lambda_stmt(lambda: select(users)) + # create a filter criterion that will never match anything + stmt1 = make_query( + stmt, + users.c.name == "bar", + users.c.name == "foo", + ) + + assert len(conn.scalars(stmt1).all()) == 0 + + stmt2 = make_query( + stmt, + users.c.name == "bar", + users.c.name == "bar", + users.c.name == "foo", + ) + + assert len(conn.scalars(stmt2).all()) == 0 + class DeferredLambdaElementTest( fixtures.TestBase, testing.AssertsExecutionResults, AssertsCompiledSQL From 3e6712ae071df36b3fa65ef240468a4839892768 Mon Sep 17 00:00:00 2001 From: Oleg Ovcharuk Date: Thu, 14 Nov 2024 21:49:34 +0300 Subject: [PATCH 377/544] Add YDB to external dialect list (#12088) (cherry picked from commit 2f559b2d5290827a75c59dc59bc3cdafaaa5ea23) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 436e535245d..d0710ef346e 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -132,6 +132,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | TiDB | sqlalchemy-tidb_ | +------------------------------------------------+---------------------------------------+ +| YDB | ydb-sqlalchemy_ | ++------------------------------------------------+---------------------------------------+ | YugabyteDB | sqlalchemy-yugabytedb_ | +------------------------------------------------+---------------------------------------+ @@ -173,3 +175,4 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _clickhouse-sqlalchemy: https://pypi.org/project/clickhouse-sqlalchemy/ .. _sqlalchemy-kinetica: https://github.com/kineticadb/sqlalchemy-kinetica/ .. _sqlalchemy-tidb: https://github.com/pingcap/sqlalchemy-tidb +.. _ydb-sqlalchemy: https://github.com/ydb-platform/ydb-sqlalchemy/ From ec80c7e14ea33ab32ae5c0103096a28fdcf06678 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 14 Nov 2024 19:55:32 +0100 Subject: [PATCH 378/544] Improve oracle max id length again Adjust 90bf575b81c5396b364908547551b6592a333bf7 to handle the none case Fix missing return Fixes: #12032 Change-Id: I166efbde1a0cc88673ad3cdfbda70c737dcafcc8 (cherry picked from commit 1a7c5772623546f8ba182adef48a8bb20f9c50c6) --- lib/sqlalchemy/dialects/oracle/oracledb.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index bdeb535e689..37e07daf1d5 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -223,9 +223,10 @@ def do_recover_twophase(self, connection): def _check_max_identifier_length(self, connection): if self.oracledb_ver >= (2, 5): - return connection.connection.max_identifier_length - else: - super()._check_max_identifier_length(connection) + max_len = connection.connection.max_identifier_length + if max_len is not None: + return max_len + return super()._check_max_identifier_length(connection) class AsyncAdapt_oracledb_cursor(AsyncAdapt_dbapi_cursor): From 5e9a64f037cffba9ddd6046f097b80c6ee5ff907 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 14 Nov 2024 20:27:35 +0100 Subject: [PATCH 379/544] Support table function in oracle Fixed compilation of ``TABLE`` function when used in a from clause in Oracle Database dialect. Fixes: #12100 Change-Id: I862e5be9685611dc74338c37b7537505fc2194e5 (cherry picked from commit 564de4661fce3274d71c32676a735a250821fc0f) --- doc/build/changelog/unreleased_20/12100.rst | 6 ++++++ lib/sqlalchemy/dialects/oracle/base.py | 2 +- test/dialect/oracle/test_compiler.py | 12 ++++++++++++ 3 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12100.rst diff --git a/doc/build/changelog/unreleased_20/12100.rst b/doc/build/changelog/unreleased_20/12100.rst new file mode 100644 index 00000000000..5fc111ae495 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12100.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, oracle + :tickets: 12100 + + Fixed compilation of ``TABLE`` function when used in a from clause + in Oracle Database dialect. diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index b03fbc9d06b..14a88d1c629 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -851,7 +851,7 @@ def function_argspec(self, fn, **kw): def visit_function(self, func, **kw): text = super().visit_function(func, **kw) - if kw.get("asfrom", False): + if kw.get("asfrom", False) and func.name.lower() != "table": text = "TABLE (%s)" % text return text diff --git a/test/dialect/oracle/test_compiler.py b/test/dialect/oracle/test_compiler.py index 560298800e7..532d08c6626 100644 --- a/test/dialect/oracle/test_compiler.py +++ b/test/dialect/oracle/test_compiler.py @@ -1852,3 +1852,15 @@ def test_table_valued(self): "SELECT anon_1.string1, anon_1.string2 " "FROM TABLE (three_pairs()) anon_1", ) + + @testing.combinations(func.TABLE, func.table, func.Table) + def test_table_function(self, fn): + """Issue #12100 Use case is: + https://python-oracledb.readthedocs.io/en/latest/user_guide/bind.html#binding-a-large-number-of-items-in-an-in-list + """ + fn_call = fn("simulate_name_array") + stmt = select(1).select_from(fn_call) + self.assert_compile( + stmt, + f"SELECT 1 FROM {fn_call.name}(:{fn_call.name}_1)", + ) From 6ee6b7b9c5d0c918b4b8b5c26cbbe48288048871 Mon Sep 17 00:00:00 2001 From: Christopher Jones Date: Tue, 12 Nov 2024 17:28:38 -0500 Subject: [PATCH 380/544] Update Oracle dialect doc, mostly to prefer python-oracledb ### Description Small updates for Oracle Database dialect documentation. - prefer python-oracledb over cx_Oracle - Prefer the product name 'Oracle Database' over the company name 'Oracle' - update links - modernize This is a refresh of existing content. I decided the apparently now duplicated sections between cx_Oracle and python-oracledb were justified for clarity due to the inevitable differences. This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed **Have a nice day!** Closes: #12078 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12078 Pull-request-sha: 7c4dcf94032af295a6836e9412a4367b716f2de5 Change-Id: I3678976f5524ee164dc31b3122b224ae37060b71 (cherry picked from commit 855d03cc15ac30d458d8d2e501df5f324238f43e) --- doc/build/core/connections.rst | 51 +- doc/build/core/defaults.rst | 18 +- doc/build/core/engines.rst | 19 +- doc/build/core/metadata.rst | 39 +- doc/build/core/pooling.rst | 21 +- doc/build/core/type_basics.rst | 8 +- doc/build/dialects/oracle.rst | 25 +- doc/build/glossary.rst | 24 +- doc/build/index.rst | 3 +- doc/build/orm/persistence_techniques.rst | 12 +- doc/build/orm/queryguide/relationships.rst | 8 +- doc/build/orm/relationship_persistence.rst | 23 +- doc/build/orm/versioning.rst | 3 +- doc/build/tutorial/data_select.rst | 20 +- lib/sqlalchemy/dialects/oracle/base.py | 361 +++++++------ lib/sqlalchemy/dialects/oracle/cx_oracle.py | 230 ++++---- lib/sqlalchemy/dialects/oracle/oracledb.py | 528 +++++++++++++++++-- lib/sqlalchemy/dialects/oracle/provision.py | 2 +- lib/sqlalchemy/dialects/oracle/types.py | 37 +- lib/sqlalchemy/engine/base.py | 11 +- lib/sqlalchemy/engine/default.py | 14 +- lib/sqlalchemy/engine/events.py | 3 +- lib/sqlalchemy/engine/interfaces.py | 19 +- lib/sqlalchemy/engine/reflection.py | 4 +- lib/sqlalchemy/ext/compiler.py | 3 +- lib/sqlalchemy/orm/context.py | 8 +- lib/sqlalchemy/sql/_elements_constructors.py | 4 +- lib/sqlalchemy/sql/compiler.py | 3 +- lib/sqlalchemy/sql/elements.py | 10 +- lib/sqlalchemy/sql/operators.py | 14 +- lib/sqlalchemy/sql/schema.py | 20 +- lib/sqlalchemy/sql/selectable.py | 44 +- lib/sqlalchemy/sql/sqltypes.py | 34 +- reap_dbs.py | 2 +- test/dialect/oracle/_oracledb_mode.py | 2 +- test/dialect/oracle/test_compiler.py | 28 +- test/dialect/oracle/test_dialect.py | 4 +- test/dialect/oracle/test_types.py | 11 +- 38 files changed, 1080 insertions(+), 590 deletions(-) diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst index 597d317f072..030d41cd3b3 100644 --- a/doc/build/core/connections.rst +++ b/doc/build/core/connections.rst @@ -419,7 +419,7 @@ reverted when a connection is returned to the connection pool. :ref:`SQL Server Transaction Isolation ` - :ref:`Oracle Transaction Isolation ` + :ref:`Oracle Database Transaction Isolation ` :ref:`session_transaction_isolation` - for the ORM @@ -588,17 +588,17 @@ To sum up: Using Server Side Cursors (a.k.a. stream results) ------------------------------------------------- -Some backends feature explicit support for the concept of "server -side cursors" versus "client side cursors". A client side cursor here -means that the database driver fully fetches all rows from a result set -into memory before returning from a statement execution. Drivers such as -those of PostgreSQL and MySQL/MariaDB generally use client side cursors -by default. A server side cursor, by contrast, indicates that result rows -remain pending within the database server's state as result rows are consumed -by the client. The drivers for Oracle generally use a "server side" model, -for example, and the SQLite dialect, while not using a real "client / server" -architecture, still uses an unbuffered result fetching approach that will -leave result rows outside of process memory before they are consumed. +Some backends feature explicit support for the concept of "server side cursors" +versus "client side cursors". A client side cursor here means that the +database driver fully fetches all rows from a result set into memory before +returning from a statement execution. Drivers such as those of PostgreSQL and +MySQL/MariaDB generally use client side cursors by default. A server side +cursor, by contrast, indicates that result rows remain pending within the +database server's state as result rows are consumed by the client. The drivers +for Oracle Database generally use a "server side" model, for example, and the +SQLite dialect, while not using a real "client / server" architecture, still +uses an unbuffered result fetching approach that will leave result rows outside +of process memory before they are consumed. .. topic:: What we really mean is "buffered" vs. "unbuffered" results @@ -1807,17 +1807,18 @@ Current Support ~~~~~~~~~~~~~~~ The feature is enabled for all backend included in SQLAlchemy that support -RETURNING, with the exception of Oracle for which both the cx_Oracle and -OracleDB drivers offer their own equivalent feature. The feature normally takes -place when making use of the :meth:`_dml.Insert.returning` method of an -:class:`_dml.Insert` construct in conjunction with :term:`executemany` -execution, which occurs when passing a list of dictionaries to the -:paramref:`_engine.Connection.execute.parameters` parameter of the -:meth:`_engine.Connection.execute` or :meth:`_orm.Session.execute` methods (as -well as equivalent methods under :ref:`asyncio ` and -shorthand methods like :meth:`_orm.Session.scalars`). It also takes place -within the ORM :term:`unit of work` process when using methods such as -:meth:`_orm.Session.add` and :meth:`_orm.Session.add_all` to add rows. +RETURNING, with the exception of Oracle Database for which both the +python-oracledb and cx_Oracle drivers offer their own equivalent feature. The +feature normally takes place when making use of the +:meth:`_dml.Insert.returning` method of an :class:`_dml.Insert` construct in +conjunction with :term:`executemany` execution, which occurs when passing a +list of dictionaries to the :paramref:`_engine.Connection.execute.parameters` +parameter of the :meth:`_engine.Connection.execute` or +:meth:`_orm.Session.execute` methods (as well as equivalent methods under +:ref:`asyncio ` and shorthand methods like +:meth:`_orm.Session.scalars`). It also takes place within the ORM :term:`unit +of work` process when using methods such as :meth:`_orm.Session.add` and +:meth:`_orm.Session.add_all` to add rows. For SQLAlchemy's included dialects, support or equivalent support is currently as follows: @@ -1827,8 +1828,8 @@ as follows: * SQL Server - all supported SQL Server versions [#]_ * MariaDB - supported for MariaDB versions 10.5 and above * MySQL - no support, no RETURNING feature is present -* Oracle - supports RETURNING with executemany using native cx_Oracle / OracleDB - APIs, for all supported Oracle versions 9 and above, using multi-row OUT +* Oracle Database - supports RETURNING with executemany using native python-oracledb / cx_Oracle + APIs, for all supported Oracle Database versions 9 and above, using multi-row OUT parameters. This is not the same implementation as "executemanyvalues", however has the same usage patterns and equivalent performance benefits. diff --git a/doc/build/core/defaults.rst b/doc/build/core/defaults.rst index ef5ad208159..586f0531438 100644 --- a/doc/build/core/defaults.rst +++ b/doc/build/core/defaults.rst @@ -349,7 +349,7 @@ SQLAlchemy represents database sequences using the :class:`~sqlalchemy.schema.Sequence` object, which is considered to be a special case of "column default". It only has an effect on databases which have explicit support for sequences, which among SQLAlchemy's included dialects -includes PostgreSQL, Oracle, MS SQL Server, and MariaDB. The +includes PostgreSQL, Oracle Database, MS SQL Server, and MariaDB. The :class:`~sqlalchemy.schema.Sequence` object is otherwise ignored. .. tip:: @@ -466,8 +466,8 @@ column:: In the above example, ``CREATE TABLE`` for PostgreSQL will make use of the ``SERIAL`` datatype for the ``cart_id`` column, and the ``cart_id_seq`` -sequence will be ignored. However on Oracle, the ``cart_id_seq`` sequence -will be created explicitly. +sequence will be ignored. However on Oracle Database, the ``cart_id_seq`` +sequence will be created explicitly. .. tip:: @@ -544,7 +544,7 @@ Associating a Sequence as the Server Side Default ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. note:: The following technique is known to work only with the PostgreSQL - database. It does not work with Oracle. + database. It does not work with Oracle Database. The preceding sections illustrate how to associate a :class:`.Sequence` with a :class:`_schema.Column` as the **Python side default generator**:: @@ -627,7 +627,7 @@ including the default schema, if any. :ref:`postgresql_sequences` - in the PostgreSQL dialect documentation - :ref:`oracle_returning` - in the Oracle dialect documentation + :ref:`oracle_returning` - in the Oracle Database dialect documentation .. _computed_ddl: @@ -704,9 +704,9 @@ eagerly fetched. * PostgreSQL as of version 12 -* Oracle - with the caveat that RETURNING does not work correctly with UPDATE - (a warning will be emitted to this effect when the UPDATE..RETURNING that - includes a computed column is rendered) +* Oracle Database - with the caveat that RETURNING does not work correctly with + UPDATE (a warning will be emitted to this effect when the UPDATE..RETURNING + that includes a computed column is rendered) * Microsoft SQL Server @@ -792,7 +792,7 @@ The :class:`.Identity` construct is currently known to be supported by: * PostgreSQL as of version 10. -* Oracle as of version 12. It also supports passing ``always=None`` to +* Oracle Database as of version 12. It also supports passing ``always=None`` to enable the default generated mode and the parameter ``on_null=True`` to specify "ON NULL" in conjunction with a "BY DEFAULT" identity column. diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst index ed9c2b1e4dd..108a939a9ea 100644 --- a/doc/build/core/engines.rst +++ b/doc/build/core/engines.rst @@ -200,13 +200,23 @@ More notes on connecting to MySQL at :ref:`mysql_toplevel`. Oracle ^^^^^^^^^^ -The Oracle dialect uses cx_oracle as the default DBAPI:: +The preferred Oracle Database dialect uses the python-oracledb driver as the +DBAPI:: - engine = create_engine("oracle://scott:tiger@127.0.0.1:1521/sidname") + engine = create_engine( + "oracle+oracledb://scott:tiger@127.0.0.1:1521/?service_name=freepdb1" + ) - engine = create_engine("oracle+cx_oracle://scott:tiger@tnsname") + engine = create_engine("oracle+oracledb://scott:tiger@tnsalias") -More notes on connecting to Oracle at :ref:`oracle_toplevel`. +For historical reasons, the Oracle dialect uses the obsolete cx_Oracle driver +as the default DBAPI:: + + engine = create_engine("oracle://scott:tiger@127.0.0.1:1521/?service_name=freepdb1") + + engine = create_engine("oracle+cx_oracle://scott:tiger@tnsalias") + +More notes on connecting to Oracle Database at :ref:`oracle_toplevel`. Microsoft SQL Server ^^^^^^^^^^^^^^^^^^^^ @@ -693,4 +703,3 @@ these parameters from being logged for privacy purposes, enable the ... conn.execute(text("select :some_private_name"), {"some_private_name": "pii"}) 2020-10-24 12:48:32,808 INFO sqlalchemy.engine.Engine select ? 2020-10-24 12:48:32,808 INFO sqlalchemy.engine.Engine [SQL parameters hidden due to hide_parameters=True] - diff --git a/doc/build/core/metadata.rst b/doc/build/core/metadata.rst index 1a933828856..318509bbdac 100644 --- a/doc/build/core/metadata.rst +++ b/doc/build/core/metadata.rst @@ -296,9 +296,9 @@ refer to alternate sets of tables and other constructs. The server-side geometry of a "schema" takes many forms, including names of "schemas" under the scope of a particular database (e.g. PostgreSQL schemas), named sibling databases (e.g. MySQL / MariaDB access to other databases on the same server), -as well as other concepts like tables owned by other usernames (Oracle, SQL -Server) or even names that refer to alternate database files (SQLite ATTACH) or -remote servers (Oracle DBLINK with synonyms). +as well as other concepts like tables owned by other usernames (Oracle +Database, SQL Server) or even names that refer to alternate database files +(SQLite ATTACH) or remote servers (Oracle Database DBLINK with synonyms). What all of the above approaches have (mostly) in common is that there's a way of referencing this alternate set of tables using a string name. SQLAlchemy @@ -328,14 +328,15 @@ schema names on a per-connection or per-statement basis. "database" that typically has a single "owner". Within this database there can be any number of "schemas" which then contain the actual table objects. - A table within a specific schema is referenced explicitly using the - syntax ".". Contrast this to an architecture such - as that of MySQL, where there are only "databases", however SQL statements - can refer to multiple databases at once, using the same syntax except it - is ".". On Oracle, this syntax refers to yet another - concept, the "owner" of a table. Regardless of which kind of database is - in use, SQLAlchemy uses the phrase "schema" to refer to the qualifying - identifier within the general syntax of ".". + A table within a specific schema is referenced explicitly using the syntax + ".". Contrast this to an architecture such as that + of MySQL, where there are only "databases", however SQL statements can + refer to multiple databases at once, using the same syntax except it is + ".". On Oracle Database, this syntax refers to yet + another concept, the "owner" of a table. Regardless of which kind of + database is in use, SQLAlchemy uses the phrase "schema" to refer to the + qualifying identifier within the general syntax of + ".". .. seealso:: @@ -510,17 +511,19 @@ These names are usually configured at the login level, such as when connecting to a PostgreSQL database, the default "schema" is called "public". There are often cases where the default "schema" cannot be set via the login -itself and instead would usefully be configured each time a connection -is made, using a statement such as "SET SEARCH_PATH" on PostgreSQL or -"ALTER SESSION" on Oracle. These approaches may be achieved by using -the :meth:`_pool.PoolEvents.connect` event, which allows access to the -DBAPI connection when it is first created. For example, to set the -Oracle CURRENT_SCHEMA variable to an alternate name:: +itself and instead would usefully be configured each time a connection is made, +using a statement such as "SET SEARCH_PATH" on PostgreSQL or "ALTER SESSION" on +Oracle Database. These approaches may be achieved by using the +:meth:`_pool.PoolEvents.connect` event, which allows access to the DBAPI +connection when it is first created. For example, to set the Oracle Database +CURRENT_SCHEMA variable to an alternate name:: from sqlalchemy import event from sqlalchemy import create_engine - engine = create_engine("oracle+cx_oracle://scott:tiger@tsn_name") + engine = create_engine( + "oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1" + ) @event.listens_for(engine, "connect", insert=True) diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst index f3ea6e86238..526782b0551 100644 --- a/doc/build/core/pooling.rst +++ b/doc/build/core/pooling.rst @@ -509,30 +509,32 @@ particular error should be considered a "disconnect" situation or not, as well as if this disconnect should cause the entire connection pool to be invalidated or not. -For example, to add support to consider the Oracle error codes -``DPY-1001`` and ``DPY-4011`` to be handled as disconnect codes, apply an -event handler to the engine after creation:: +For example, to add support to consider the Oracle Database driver error codes +``DPY-1001`` and ``DPY-4011`` to be handled as disconnect codes, apply an event +handler to the engine after creation:: import re from sqlalchemy import create_engine - engine = create_engine("oracle://scott:tiger@dnsname") + engine = create_engine( + "oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1" + ) @event.listens_for(engine, "handle_error") def handle_exception(context: ExceptionContext) -> None: if not context.is_disconnect and re.match( - r"^(?:DPI-1001|DPI-4011)", str(context.original_exception) + r"^(?:DPY-1001|DPY-4011)", str(context.original_exception) ): context.is_disconnect = True return None -The above error processing function will be invoked for all Oracle errors -raised, including those caught when using the -:ref:`pool pre ping ` feature for those backends -that rely upon disconnect error handling (new in 2.0). +The above error processing function will be invoked for all Oracle Database +errors raised, including those caught when using the :ref:`pool pre ping +` feature for those backends that rely upon +disconnect error handling (new in 2.0). .. seealso:: @@ -760,4 +762,3 @@ API Documentation - Available Pool Implementations .. autoclass:: _ConnectionFairy .. autoclass:: _ConnectionRecord - diff --git a/doc/build/core/type_basics.rst b/doc/build/core/type_basics.rst index f3817fe0c99..817bca601aa 100644 --- a/doc/build/core/type_basics.rst +++ b/doc/build/core/type_basics.rst @@ -63,9 +63,9 @@ not every backend has a real "boolean" datatype; some make use of integers or BIT values 0 and 1, some have boolean literal constants ``true`` and ``false`` while others dont. For this datatype, :class:`_types.Boolean` may render ``BOOLEAN`` on a backend such as PostgreSQL, ``BIT`` on the -MySQL backend and ``SMALLINT`` on Oracle. As data is sent and received -from the database using this type, based on the dialect in use it may be -interpreting Python numeric or boolean values. +MySQL backend and ``SMALLINT`` on Oracle Database. As data is sent and +received from the database using this type, based on the dialect in use it +may be interpreting Python numeric or boolean values. The typical SQLAlchemy application will likely wish to use primarily "CamelCase" types in the general case, as they will generally provide the best @@ -336,5 +336,3 @@ its exact name in DDL with ``CREATE TABLE`` is issued. .. autoclass:: VARCHAR - - diff --git a/doc/build/dialects/oracle.rst b/doc/build/dialects/oracle.rst index 8187e714798..b3d44858ced 100644 --- a/doc/build/dialects/oracle.rst +++ b/doc/build/dialects/oracle.rst @@ -5,12 +5,12 @@ Oracle .. automodule:: sqlalchemy.dialects.oracle.base -Oracle Data Types ------------------ +Oracle Database Data Types +-------------------------- -As with all SQLAlchemy dialects, all UPPERCASE types that are known to be -valid with Oracle are importable from the top level dialect, whether -they originate from :mod:`sqlalchemy.types` or from the local dialect:: +As with all SQLAlchemy dialects, all UPPERCASE types that are known to be valid +with Oracle Database are importable from the top level dialect, whether they +originate from :mod:`sqlalchemy.types` or from the local dialect:: from sqlalchemy.dialects.oracle import ( BFILE, @@ -36,7 +36,7 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect:: .. versionadded:: 1.2.19 Added :class:`_types.NCHAR` to the list of datatypes exported by the Oracle dialect. -Types which are specific to Oracle, or have Oracle-specific +Types which are specific to Oracle Database, or have Oracle-specific construction arguments, are as follows: .. currentmodule:: sqlalchemy.dialects.oracle @@ -80,13 +80,6 @@ construction arguments, are as follows: .. autoclass:: TIMESTAMP :members: __init__ -.. _cx_oracle: - -cx_Oracle ---------- - -.. automodule:: sqlalchemy.dialects.oracle.cx_oracle - .. _oracledb: python-oracledb @@ -94,3 +87,9 @@ python-oracledb .. automodule:: sqlalchemy.dialects.oracle.oracledb +.. _cx_oracle: + +cx_Oracle +--------- + +.. automodule:: sqlalchemy.dialects.oracle.cx_oracle diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst index a7422bd97ba..1d8ac29aabe 100644 --- a/doc/build/glossary.rst +++ b/doc/build/glossary.rst @@ -298,7 +298,7 @@ Glossary A key limitation of the ``cursor.executemany()`` method as used with all known DBAPIs is that the ``cursor`` is not configured to return rows when this method is used. For **most** backends (a notable - exception being the cx_Oracle, / OracleDB DBAPIs), this means that + exception being the python-oracledb / cx_Oracle DBAPIs), this means that statements like ``INSERT..RETURNING`` typically cannot be used with ``cursor.executemany()`` directly, since DBAPIs typically do not aggregate the single row from each INSERT execution together. @@ -1158,16 +1158,17 @@ Glossary values as they are not included otherwise (but note any series of columns or SQL expressions can be placed into RETURNING, not just default-value columns). - The backends that currently support - RETURNING or a similar construct are PostgreSQL, SQL Server, Oracle, - and Firebird. The PostgreSQL and Firebird implementations are generally - full featured, whereas the implementations of SQL Server and Oracle - have caveats. On SQL Server, the clause is known as "OUTPUT INSERTED" - for INSERT and UPDATE statements and "OUTPUT DELETED" for DELETE statements; - the key caveat is that triggers are not supported in conjunction with this - keyword. On Oracle, it is known as "RETURNING...INTO", and requires that the - value be placed into an OUT parameter, meaning not only is the syntax awkward, - but it can also only be used for one row at a time. + The backends that currently support RETURNING or a similar construct + are PostgreSQL, SQL Server, Oracle Database, and Firebird. The + PostgreSQL and Firebird implementations are generally full featured, + whereas the implementations of SQL Server and Oracle Database have + caveats. On SQL Server, the clause is known as "OUTPUT INSERTED" for + INSERT and UPDATE statements and "OUTPUT DELETED" for DELETE + statements; the key caveat is that triggers are not supported in + conjunction with this keyword. In Oracle Database, it is known as + "RETURNING...INTO", and requires that the value be placed into an OUT + parameter, meaning not only is the syntax awkward, but it can also only + be used for one row at a time. SQLAlchemy's :meth:`.UpdateBase.returning` system provides a layer of abstraction on top of the RETURNING systems of these backends to provide a consistent @@ -1702,4 +1703,3 @@ Glossary .. seealso:: :ref:`session_object_states` - diff --git a/doc/build/index.rst b/doc/build/index.rst index 8214b867311..44914b0bb54 100644 --- a/doc/build/index.rst +++ b/doc/build/index.rst @@ -151,7 +151,7 @@ SQLAlchemy Documentation :doc:`PostgreSQL ` | :doc:`MySQL and MariaDB ` | :doc:`SQLite ` | - :doc:`Oracle ` | + :doc:`Oracle Database ` | :doc:`Microsoft SQL Server ` :doc:`More Dialects ... ` @@ -171,4 +171,3 @@ SQLAlchemy Documentation * :doc:`Error Message Guide ` - Explanations of many SQLAlchemy Errors * :doc:`Complete table of of contents ` * :ref:`Index ` - diff --git a/doc/build/orm/persistence_techniques.rst b/doc/build/orm/persistence_techniques.rst index c7741ef9c2f..a877fcd0e0e 100644 --- a/doc/build/orm/persistence_techniques.rst +++ b/doc/build/orm/persistence_techniques.rst @@ -37,7 +37,7 @@ from the database. The feature also has conditional support to work in conjunction with primary key columns. For backends that have RETURNING support -(including Oracle, SQL Server, MariaDB 10.5, SQLite 3.35) a +(including Oracle Database, SQL Server, MariaDB 10.5, SQLite 3.35) a SQL expression may be assigned to a primary key column as well. This allows both the SQL expression to be evaluated, as well as allows any server side triggers that modify the primary key value on INSERT, to be successfully @@ -274,7 +274,7 @@ answered are, 1. is this column part of the primary key or not, and 2. does the database support RETURNING or an equivalent, such as "OUTPUT inserted"; these are SQL phrases which return a server-generated value at the same time as the INSERT or UPDATE statement is invoked. RETURNING is currently supported -by PostgreSQL, Oracle, MariaDB 10.5, SQLite 3.35, and SQL Server. +by PostgreSQL, Oracle Database, MariaDB 10.5, SQLite 3.35, and SQL Server. Case 1: non primary key, RETURNING or equivalent is supported ------------------------------------------------------------- @@ -438,7 +438,7 @@ PostgreSQL SERIAL, these types are handled automatically by the Core; databases include functions for fetching the "last inserted id" where RETURNING is not supported, and where RETURNING is supported SQLAlchemy will use that. -For example, using Oracle with a column marked as :class:`.Identity`, +For example, using Oracle Database with a column marked as :class:`.Identity`, RETURNING is used automatically to fetch the new primary key value:: class MyOracleModel(Base): @@ -447,7 +447,7 @@ RETURNING is used automatically to fetch the new primary key value:: id: Mapped[int] = mapped_column(Identity(), primary_key=True) data: Mapped[str] = mapped_column(String(50)) -The INSERT for a model as above on Oracle looks like: +The INSERT for a model as above on Oracle Database looks like: .. sourcecode:: sql @@ -460,7 +460,7 @@ place and the new value will be returned immediately. For non-integer values generated by server side functions or triggers, as well as for integer values that come from constructs outside the table itself, including explicit sequences and triggers, the server default generation must -be marked in the table metadata. Using Oracle as the example again, we can +be marked in the table metadata. Using Oracle Database as the example again, we can illustrate a similar table as above naming an explicit sequence using the :class:`.Sequence` construct:: @@ -470,7 +470,7 @@ illustrate a similar table as above naming an explicit sequence using the id: Mapped[int] = mapped_column(Sequence("my_oracle_seq"), primary_key=True) data: Mapped[str] = mapped_column(String(50)) -An INSERT for this version of the model on Oracle would look like: +An INSERT for this version of the model on Oracle Database would look like: .. sourcecode:: sql diff --git a/doc/build/orm/queryguide/relationships.rst b/doc/build/orm/queryguide/relationships.rst index bf6f692b98a..d63ae67ac74 100644 --- a/doc/build/orm/queryguide/relationships.rst +++ b/doc/build/orm/queryguide/relationships.rst @@ -828,10 +828,10 @@ will JOIN across all three tables to match rows from one side to the other. Things to know about this kind of loading include: * The strategy emits a SELECT for up to 500 parent primary key values at a - time, as the primary keys are rendered into a large IN expression in the - SQL statement. Some databases like Oracle have a hard limit on how large - an IN expression can be, and overall the size of the SQL string shouldn't - be arbitrarily large. + time, as the primary keys are rendered into a large IN expression in the SQL + statement. Some databases like Oracle Database have a hard limit on how + large an IN expression can be, and overall the size of the SQL string + shouldn't be arbitrarily large. * As "selectin" loading relies upon IN, for a mapping with composite primary keys, it must use the "tuple" form of IN, which looks like ``WHERE diff --git a/doc/build/orm/relationship_persistence.rst b/doc/build/orm/relationship_persistence.rst index 9a5a036c695..ba686d691d1 100644 --- a/doc/build/orm/relationship_persistence.rst +++ b/doc/build/orm/relationship_persistence.rst @@ -35,12 +35,13 @@ Or: 1 'somewidget' 5 5 'someentry' 1 In the first case, a row points to itself. Technically, a database that uses -sequences such as PostgreSQL or Oracle can INSERT the row at once using a -previously generated value, but databases which rely upon autoincrement-style -primary key identifiers cannot. The :func:`~sqlalchemy.orm.relationship` -always assumes a "parent/child" model of row population during flush, so -unless you are populating the primary key/foreign key columns directly, -:func:`~sqlalchemy.orm.relationship` needs to use two statements. +sequences such as PostgreSQL or Oracle Database can INSERT the row at once +using a previously generated value, but databases which rely upon +autoincrement-style primary key identifiers cannot. The +:func:`~sqlalchemy.orm.relationship` always assumes a "parent/child" model of +row population during flush, so unless you are populating the primary +key/foreign key columns directly, :func:`~sqlalchemy.orm.relationship` needs to +use two statements. In the second case, the "widget" row must be inserted before any referring "entry" rows, but then the "favorite_entry_id" column of that "widget" row @@ -243,7 +244,7 @@ by emitting an UPDATE statement against foreign key columns that immediately reference a primary key column whose value has changed. The primary platforms without referential integrity features are MySQL when the ``MyISAM`` storage engine is used, and SQLite when the -``PRAGMA foreign_keys=ON`` pragma is not used. The Oracle database also +``PRAGMA foreign_keys=ON`` pragma is not used. Oracle Database also has no support for ``ON UPDATE CASCADE``, but because it still enforces referential integrity, needs constraints to be marked as deferrable so that SQLAlchemy can emit UPDATE statements. @@ -297,7 +298,7 @@ Key limitations of ``passive_updates=False`` include: map for objects that may be referencing the one with a mutating primary key, not throughout the database. -As virtually all databases other than Oracle now support ``ON UPDATE CASCADE``, -it is highly recommended that traditional ``ON UPDATE CASCADE`` support be used -in the case that natural and mutable primary key values are in use. - +As virtually all databases other than Oracle Database now support ``ON UPDATE +CASCADE``, it is highly recommended that traditional ``ON UPDATE CASCADE`` +support be used in the case that natural and mutable primary key values are in +use. diff --git a/doc/build/orm/versioning.rst b/doc/build/orm/versioning.rst index 87865917cdf..7f209e24b26 100644 --- a/doc/build/orm/versioning.rst +++ b/doc/build/orm/versioning.rst @@ -207,7 +207,8 @@ missed version counters: It is *strongly recommended* that server side version counters only be used when absolutely necessary and only on backends that support :term:`RETURNING`, -currently PostgreSQL, Oracle, MariaDB 10.5, SQLite 3.35, and SQL Server. +currently PostgreSQL, Oracle Database, MariaDB 10.5, SQLite 3.35, and SQL +Server. Programmatic or Conditional Version Counters diff --git a/doc/build/tutorial/data_select.rst b/doc/build/tutorial/data_select.rst index d9d51c7f51f..5052a5bae32 100644 --- a/doc/build/tutorial/data_select.rst +++ b/doc/build/tutorial/data_select.rst @@ -1387,8 +1387,8 @@ At the same time, a relatively small set of extremely common SQL functions such as :class:`_functions.count`, :class:`_functions.now`, :class:`_functions.max`, :class:`_functions.concat` include pre-packaged versions of themselves which provide for proper typing information as well as backend-specific SQL -generation in some cases. The example below contrasts the SQL generation -that occurs for the PostgreSQL dialect compared to the Oracle dialect for +generation in some cases. The example below contrasts the SQL generation that +occurs for the PostgreSQL dialect compared to the Oracle Database dialect for the :class:`_functions.now` function:: >>> from sqlalchemy.dialects import postgresql @@ -1683,10 +1683,10 @@ Table-Valued Functions Table-valued SQL functions support a scalar representation that contains named sub-elements. Often used for JSON and ARRAY-oriented functions as well as functions like ``generate_series()``, the table-valued function is specified in -the FROM clause, and is then referenced as a table, or sometimes even as -a column. Functions of this form are prominent within the PostgreSQL database, +the FROM clause, and is then referenced as a table, or sometimes even as a +column. Functions of this form are prominent within the PostgreSQL database, however some forms of table valued functions are also supported by SQLite, -Oracle, and SQL Server. +Oracle Database, and SQL Server. .. seealso:: @@ -1735,9 +1735,9 @@ towards as ``value``, and then selected two of its three rows. Column Valued Functions - Table Valued Function as a Scalar Column ################################################################## -A special syntax supported by PostgreSQL and Oracle is that of referring -towards a function in the FROM clause, which then delivers itself as a -single column in the columns clause of a SELECT statement or other column +A special syntax supported by PostgreSQL and Oracle Database is that of +referring towards a function in the FROM clause, which then delivers itself as +a single column in the columns clause of a SELECT statement or other column expression context. PostgreSQL makes great use of this syntax for such functions as ``json_array_elements()``, ``json_object_keys()``, ``json_each_text()``, ``json_each()``, etc. @@ -1752,8 +1752,8 @@ to a :class:`_functions.Function` construct:: {printsql}SELECT x FROM json_array_elements(:json_array_elements_1) AS x -The "column valued" form is also supported by the Oracle dialect, where -it is usable for custom SQL functions:: +The "column valued" form is also supported by the Oracle Database dialects, +where it is usable for custom SQL functions:: >>> from sqlalchemy.dialects import oracle >>> stmt = select(func.scalar_strings(5).column_valued("s")) diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index b03fbc9d06b..ce20a6a11f0 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -9,7 +9,7 @@ r""" .. dialect:: oracle - :name: Oracle + :name: Oracle Database :normal_support: 11+ :best_effort: 9+ @@ -17,17 +17,17 @@ Auto Increment Behavior ----------------------- -SQLAlchemy Table objects which include integer primary keys are usually -assumed to have "autoincrementing" behavior, meaning they can generate their -own primary key values upon INSERT. For use within Oracle, two options are -available, which are the use of IDENTITY columns (Oracle 12 and above only) -or the association of a SEQUENCE with the column. +SQLAlchemy Table objects which include integer primary keys are usually assumed +to have "autoincrementing" behavior, meaning they can generate their own +primary key values upon INSERT. For use within Oracle Database, two options are +available, which are the use of IDENTITY columns (Oracle Database 12 and above +only) or the association of a SEQUENCE with the column. -Specifying GENERATED AS IDENTITY (Oracle 12 and above) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Specifying GENERATED AS IDENTITY (Oracle Database 12 and above) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Starting from version 12 Oracle can make use of identity columns using -the :class:`_sql.Identity` to specify the autoincrementing behavior:: +Starting from version 12, Oracle Database can make use of identity columns +using the :class:`_sql.Identity` to specify the autoincrementing behavior:: t = Table('mytable', metadata, Column('id', Integer, Identity(start=3), primary_key=True), @@ -46,23 +46,22 @@ The :class:`_schema.Identity` object support many options to control the "autoincrementing" behavior of the column, like the starting value, the -incrementing value, etc. -In addition to the standard options, Oracle supports setting -:paramref:`_schema.Identity.always` to ``None`` to use the default -generated mode, rendering GENERATED AS IDENTITY in the DDL. It also supports +incrementing value, etc. In addition to the standard options, Oracle Database +supports setting :paramref:`_schema.Identity.always` to ``None`` to use the +default generated mode, rendering GENERATED AS IDENTITY in the DDL. It also supports setting :paramref:`_schema.Identity.on_null` to ``True`` to specify ON NULL in conjunction with a 'BY DEFAULT' identity column. -Using a SEQUENCE (all Oracle versions) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Using a SEQUENCE (all Oracle Database versions) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Older version of Oracle had no "autoincrement" -feature, SQLAlchemy relies upon sequences to produce these values. With the -older Oracle versions, *a sequence must always be explicitly specified to -enable autoincrement*. This is divergent with the majority of documentation -examples which assume the usage of an autoincrement-capable database. To -specify sequences, use the sqlalchemy.schema.Sequence object which is passed -to a Column construct:: +Older version of Oracle Database had no "autoincrement" feature: SQLAlchemy +relies upon sequences to produce these values. With the older Oracle Database +versions, *a sequence must always be explicitly specified to enable +autoincrement*. This is divergent with the majority of documentation examples +which assume the usage of an autoincrement-capable database. To specify +sequences, use the sqlalchemy.schema.Sequence object which is passed to a +Column construct:: t = Table('mytable', metadata, Column('id', Integer, Sequence('id_seq', start=1), primary_key=True), @@ -85,9 +84,9 @@ Transaction Isolation Level / Autocommit ---------------------------------------- -The Oracle database supports "READ COMMITTED" and "SERIALIZABLE" modes of -isolation. The AUTOCOMMIT isolation level is also supported by the cx_Oracle -dialect. +Oracle Database supports "READ COMMITTED" and "SERIALIZABLE" modes of +isolation. The AUTOCOMMIT isolation level is also supported by the +python-oracledb and cx_Oracle dialects. To set using per-connection execution options:: @@ -96,10 +95,9 @@ isolation_level="AUTOCOMMIT" ) -For ``READ COMMITTED`` and ``SERIALIZABLE``, the Oracle dialect sets the -level at the session level using ``ALTER SESSION``, which is reverted back -to its default setting when the connection is returned to the connection -pool. +For ``READ COMMITTED`` and ``SERIALIZABLE``, the Oracle Database dialects sets +the level at the session level using ``ALTER SESSION``, which is reverted back +to its default setting when the connection is returned to the connection pool. Valid values for ``isolation_level`` include: @@ -109,28 +107,28 @@ .. note:: The implementation for the :meth:`_engine.Connection.get_isolation_level` method as implemented by the - Oracle dialect necessarily forces the start of a transaction using the - Oracle LOCAL_TRANSACTION_ID function; otherwise no level is normally - readable. + Oracle Database dialects necessarily force the start of a transaction using the + Oracle Database DBMS_TRANSACTION.LOCAL_TRANSACTION_ID function; otherwise no + level is normally readable. Additionally, the :meth:`_engine.Connection.get_isolation_level` method will raise an exception if the ``v$transaction`` view is not available due to - permissions or other reasons, which is a common occurrence in Oracle + permissions or other reasons, which is a common occurrence in Oracle Database installations. - The cx_Oracle dialect attempts to call the + The python-oracledb and cx_Oracle dialects attempt to call the :meth:`_engine.Connection.get_isolation_level` method when the dialect makes its first connection to the database in order to acquire the "default"isolation level. This default level is necessary so that the level can be reset on a connection after it has been temporarily modified using - :meth:`_engine.Connection.execution_options` method. In the common event + :meth:`_engine.Connection.execution_options` method. In the common event that the :meth:`_engine.Connection.get_isolation_level` method raises an exception due to ``v$transaction`` not being readable as well as any other database-related failure, the level is assumed to be "READ COMMITTED". No warning is emitted for this initial first-connect condition as it is expected to be a common restriction on Oracle databases. -.. versionadded:: 1.3.16 added support for AUTOCOMMIT to the cx_oracle dialect +.. versionadded:: 1.3.16 added support for AUTOCOMMIT to the cx_Oracle dialect as well as the notion of a default isolation level .. versionadded:: 1.3.21 Added support for SERIALIZABLE as well as live @@ -148,59 +146,56 @@ Identifier Casing ----------------- -In Oracle, the data dictionary represents all case insensitive identifier -names using UPPERCASE text. SQLAlchemy on the other hand considers an -all-lower case identifier name to be case insensitive. The Oracle dialect -converts all case insensitive identifiers to and from those two formats during -schema level communication, such as reflection of tables and indexes. Using -an UPPERCASE name on the SQLAlchemy side indicates a case sensitive +In Oracle Database, the data dictionary represents all case insensitive +identifier names using UPPERCASE text. SQLAlchemy on the other hand considers +an all-lower case identifier name to be case insensitive. The Oracle Database +dialects convert all case insensitive identifiers to and from those two formats +during schema level communication, such as reflection of tables and indexes. +Using an UPPERCASE name on the SQLAlchemy side indicates a case sensitive identifier, and SQLAlchemy will quote the name - this will cause mismatches -against data dictionary data received from Oracle, so unless identifier names -have been truly created as case sensitive (i.e. using quoted names), all -lowercase names should be used on the SQLAlchemy side. +against data dictionary data received from Oracle Database, so unless +identifier names have been truly created as case sensitive (i.e. using quoted +names), all lowercase names should be used on the SQLAlchemy side. .. _oracle_max_identifier_lengths: -Max Identifier Lengths ----------------------- +Maximum Identifier Lengths +-------------------------- -Oracle has changed the default max identifier length as of Oracle Server -version 12.2. Prior to this version, the length was 30, and for 12.2 and -greater it is now 128. This change impacts SQLAlchemy in the area of -generated SQL label names as well as the generation of constraint names, -particularly in the case where the constraint naming convention feature -described at :ref:`constraint_naming_conventions` is being used. - -To assist with this change and others, Oracle includes the concept of a -"compatibility" version, which is a version number that is independent of the -actual server version in order to assist with migration of Oracle databases, -and may be configured within the Oracle server itself. This compatibility -version is retrieved using the query ``SELECT value FROM v$parameter WHERE -name = 'compatible';``. -The SQLAlchemy Oracle dialect, when tasked with determining the default max -identifier length, will use the ``max_identifier_length`` attribute available -in the connection of the oracledb driver since version 2.5. When using an older -version or cx_oracle SQLAlchemy will instead attempted to use the query -mentioned above upon first connect in order to determine the effective -compatibility version of the server, which determines what the maximum allowed -identifier length is for the server. If the table is not available, the server -version information is used instead. - -As of SQLAlchemy 1.4, the default max identifier length for the Oracle dialect -is 128 characters. Upon first connect, the compatibility version is detected -and if it is less than Oracle version 12.2, the max identifier length is -changed to be 30 characters. In all cases, setting the +SQLAlchemy is sensitive to the maximum identifier length supported by Oracle +Database. This affects generated SQL label names as well as the generation of +constraint names, particularly in the case where the constraint naming +convention feature described at :ref:`constraint_naming_conventions` is being +used. + +Oracle Database 12.2 increased the default maximum identifier length from 30 to +128. As of SQLAlchemy 1.4, the default maximum identifier length for the Oracle +dialects is 128 characters. Upon first connection, the maximum length actually +supported by the database is obtained. In all cases, setting the :paramref:`_sa.create_engine.max_identifier_length` parameter will bypass this change and the value given will be used as is:: engine = create_engine( - "oracle+cx_oracle://scott:tiger@oracle122", + "oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1", max_identifier_length=30) +If :paramref:`_sa.create_engine.max_identifier_length` is not set, the oracledb +dialect internally uses the ``max_identifier_length`` attribute available on +driver connections since python-oracledb version 2.5. When using an older +driver version, or using the cx_Oracle dialect, SQLAlchemy will instead attempt +to use the query ``SELECT value FROM v$parameter WHERE name = 'compatible'`` +upon first connect in order to determine the effective compatibility version of +the database. The "compatibility" version is a version number that is +independent of the actual database version. It is used to assist database +migration. It is configured by an Oracle Database initialization parameter. The +compatibility version then determines the maximum allowed identifier length for +the database. If the V$ view is not available, the database version information +is used instead. + The maximum identifier length comes into play both when generating anonymized SQL labels in SELECT statements, but more crucially when generating constraint names from a naming convention. It is this area that has created the need for -SQLAlchemy to change this default conservatively. For example, the following +SQLAlchemy to change this default conservatively. For example, the following naming convention produces two very different constraint names based on the identifier length:: @@ -237,63 +232,62 @@ CREATE INDEX ix_some_column_name_1s_70cd ON t (some_column_name_1, some_column_name_2, some_column_name_3) -However with length=128, it becomes:: +However with length of 128, it becomes:: CREATE INDEX ix_some_column_name_1some_column_name_2some_column_name_3 ON t (some_column_name_1, some_column_name_2, some_column_name_3) -Applications which have run versions of SQLAlchemy prior to 1.4 on an Oracle -server version 12.2 or greater are therefore subject to the scenario of a +Applications which have run versions of SQLAlchemy prior to 1.4 on Oracle +Database version 12.2 or greater are therefore subject to the scenario of a database migration that wishes to "DROP CONSTRAINT" on a name that was previously generated with the shorter length. This migration will fail when the identifier length is changed without the name of the index or constraint first being adjusted. Such applications are strongly advised to make use of -:paramref:`_sa.create_engine.max_identifier_length` -in order to maintain control -of the generation of truncated names, and to fully review and test all database -migrations in a staging environment when changing this value to ensure that the -impact of this change has been mitigated. +:paramref:`_sa.create_engine.max_identifier_length` in order to maintain +control of the generation of truncated names, and to fully review and test all +database migrations in a staging environment when changing this value to ensure +that the impact of this change has been mitigated. -.. versionchanged:: 1.4 the default max_identifier_length for Oracle is 128 - characters, which is adjusted down to 30 upon first connect if an older - version of Oracle server (compatibility version < 12.2) is detected. +.. versionchanged:: 1.4 the default max_identifier_length for Oracle Database + is 128 characters, which is adjusted down to 30 upon first connect if the + Oracle Database, or its compatibility setting, are lower than version 12.2. LIMIT/OFFSET/FETCH Support -------------------------- -Methods like :meth:`_sql.Select.limit` and :meth:`_sql.Select.offset` make -use of ``FETCH FIRST N ROW / OFFSET N ROWS`` syntax assuming -Oracle 12c or above, and assuming the SELECT statement is not embedded within -a compound statement like UNION. This syntax is also available directly by using -the :meth:`_sql.Select.fetch` method. - -.. versionchanged:: 2.0 the Oracle dialect now uses - ``FETCH FIRST N ROW / OFFSET N ROWS`` for all - :meth:`_sql.Select.limit` and :meth:`_sql.Select.offset` usage including - within the ORM and legacy :class:`_orm.Query`. To force the legacy - behavior using window functions, specify the ``enable_offset_fetch=False`` - dialect parameter to :func:`_sa.create_engine`. - -The use of ``FETCH FIRST / OFFSET`` may be disabled on any Oracle version -by passing ``enable_offset_fetch=False`` to :func:`_sa.create_engine`, which -will force the use of "legacy" mode that makes use of window functions. +Methods like :meth:`_sql.Select.limit` and :meth:`_sql.Select.offset` make use +of ``FETCH FIRST N ROW / OFFSET N ROWS`` syntax assuming Oracle Database 12c or +above, and assuming the SELECT statement is not embedded within a compound +statement like UNION. This syntax is also available directly by using the +:meth:`_sql.Select.fetch` method. + +.. versionchanged:: 2.0 the Oracle Database dialects now use ``FETCH FIRST N + ROW / OFFSET N ROWS`` for all :meth:`_sql.Select.limit` and + :meth:`_sql.Select.offset` usage including within the ORM and legacy + :class:`_orm.Query`. To force the legacy behavior using window functions, + specify the ``enable_offset_fetch=False`` dialect parameter to + :func:`_sa.create_engine`. + +The use of ``FETCH FIRST / OFFSET`` may be disabled on any Oracle Database +version by passing ``enable_offset_fetch=False`` to :func:`_sa.create_engine`, +which will force the use of "legacy" mode that makes use of window functions. This mode is also selected automatically when using a version of Oracle -prior to 12c. +Database prior to 12c. -When using legacy mode, or when a :class:`.Select` statement -with limit/offset is embedded in a compound statement, an emulated approach for -LIMIT / OFFSET based on window functions is used, which involves creation of a -subquery using ``ROW_NUMBER`` that is prone to performance issues as well as -SQL construction issues for complex statements. However, this approach is -supported by all Oracle versions. See notes below. +When using legacy mode, or when a :class:`.Select` statement with limit/offset +is embedded in a compound statement, an emulated approach for LIMIT / OFFSET +based on window functions is used, which involves creation of a subquery using +``ROW_NUMBER`` that is prone to performance issues as well as SQL construction +issues for complex statements. However, this approach is supported by all +Oracle Database versions. See notes below. Notes on LIMIT / OFFSET emulation (when fetch() method cannot be used) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If using :meth:`_sql.Select.limit` and :meth:`_sql.Select.offset`, or with the ORM the :meth:`_orm.Query.limit` and :meth:`_orm.Query.offset` methods on an -Oracle version prior to 12c, the following notes apply: +Oracle Database version prior to 12c, the following notes apply: * SQLAlchemy currently makes use of ROWNUM to achieve LIMIT/OFFSET; the exact methodology is taken from @@ -304,10 +298,11 @@ to :func:`_sa.create_engine`. .. versionchanged:: 1.4 - The Oracle dialect renders limit/offset integer values using a "post - compile" scheme which renders the integer directly before passing the - statement to the cursor for execution. The ``use_binds_for_limits`` flag - no longer has an effect. + + The Oracle Database dialect renders limit/offset integer values using a + "post compile" scheme which renders the integer directly before passing + the statement to the cursor for execution. The ``use_binds_for_limits`` + flag no longer has an effect. .. seealso:: @@ -318,21 +313,21 @@ RETURNING Support ----------------- -The Oracle database supports RETURNING fully for INSERT, UPDATE and DELETE -statements that are invoked with a single collection of bound parameters -(that is, a ``cursor.execute()`` style statement; SQLAlchemy does not generally +Oracle Database supports RETURNING fully for INSERT, UPDATE and DELETE +statements that are invoked with a single collection of bound parameters (that +is, a ``cursor.execute()`` style statement; SQLAlchemy does not generally support RETURNING with :term:`executemany` statements). Multiple rows may be returned as well. -.. versionchanged:: 2.0 the Oracle backend has full support for RETURNING - on parity with other backends. +.. versionchanged:: 2.0 the Oracle Database backend has full support for + RETURNING on parity with other backends. ON UPDATE CASCADE ----------------- -Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based -solution is available at +Oracle Database doesn't have native ON UPDATE CASCADE functionality. A trigger +based solution is available at https://web.archive.org/web/20090317041251/https://asktom.oracle.com/tkyte/update_cascade/index.html When using the SQLAlchemy ORM, the ORM has limited ability to manually issue @@ -340,14 +335,14 @@ "deferrable=True, initially='deferred'" keyword arguments, and specify "passive_updates=False" on each relationship(). -Oracle 8 Compatibility ----------------------- +Oracle Database 8 Compatibility +------------------------------- -.. warning:: The status of Oracle 8 compatibility is not known for SQLAlchemy - 2.0. +.. warning:: The status of Oracle Database 8 compatibility is not known for + SQLAlchemy 2.0. -When Oracle 8 is detected, the dialect internally configures itself to the -following behaviors: +When Oracle Database 8 is detected, the dialect internally configures itself to +the following behaviors: * the use_ansi flag is set to False. This has the effect of converting all JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN @@ -372,11 +367,11 @@ some_table = Table('some_table', autoload_with=some_engine, oracle_resolve_synonyms=True) -When this flag is set, the given name (such as ``some_table`` above) will -be searched not just in the ``ALL_TABLES`` view, but also within the +When this flag is set, the given name (such as ``some_table`` above) will be +searched not just in the ``ALL_TABLES`` view, but also within the ``ALL_SYNONYMS`` view to see if this name is actually a synonym to another -name. If the synonym is located and refers to a DBLINK, the oracle dialect -knows how to locate the table's information using DBLINK syntax(e.g. +name. If the synonym is located and refers to a DBLINK, the Oracle Database +dialects know how to locate the table's information using DBLINK syntax(e.g. ``@dblink``). ``oracle_resolve_synonyms`` is accepted wherever reflection arguments are @@ -390,8 +385,8 @@ Constraint Reflection --------------------- -The Oracle dialect can return information about foreign key, unique, and -CHECK constraints, as well as indexes on tables. +The Oracle Database dialects can return information about foreign key, unique, +and CHECK constraints, as well as indexes on tables. Raw information regarding these constraints can be acquired using :meth:`_reflection.Inspector.get_foreign_keys`, @@ -399,7 +394,7 @@ :meth:`_reflection.Inspector.get_check_constraints`, and :meth:`_reflection.Inspector.get_indexes`. -.. versionchanged:: 1.2 The Oracle dialect can now reflect UNIQUE and +.. versionchanged:: 1.2 The Oracle Database dialect can now reflect UNIQUE and CHECK constraints. When using reflection at the :class:`_schema.Table` level, the @@ -409,29 +404,26 @@ Note the following caveats: * When using the :meth:`_reflection.Inspector.get_check_constraints` method, - Oracle - builds a special "IS NOT NULL" constraint for columns that specify - "NOT NULL". This constraint is **not** returned by default; to include - the "IS NOT NULL" constraints, pass the flag ``include_all=True``:: + Oracle Database builds a special "IS NOT NULL" constraint for columns that + specify "NOT NULL". This constraint is **not** returned by default; to + include the "IS NOT NULL" constraints, pass the flag ``include_all=True``:: from sqlalchemy import create_engine, inspect - engine = create_engine("oracle+cx_oracle://s:t@dsn") + engine = create_engine("oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1") inspector = inspect(engine) all_check_constraints = inspector.get_check_constraints( "some_table", include_all=True) -* in most cases, when reflecting a :class:`_schema.Table`, - a UNIQUE constraint will - **not** be available as a :class:`.UniqueConstraint` object, as Oracle - mirrors unique constraints with a UNIQUE index in most cases (the exception - seems to be when two or more unique constraints represent the same columns); - the :class:`_schema.Table` will instead represent these using - :class:`.Index` - with the ``unique=True`` flag set. +* in most cases, when reflecting a :class:`_schema.Table`, a UNIQUE constraint + will **not** be available as a :class:`.UniqueConstraint` object, as Oracle + Database mirrors unique constraints with a UNIQUE index in most cases (the + exception seems to be when two or more unique constraints represent the same + columns); the :class:`_schema.Table` will instead represent these using + :class:`.Index` with the ``unique=True`` flag set. -* Oracle creates an implicit index for the primary key of a table; this index - is **excluded** from all index results. +* Oracle Database creates an implicit index for the primary key of a table; + this index is **excluded** from all index results. * the list of columns reflected for an index will not include column names that start with SYS_NC. @@ -451,27 +443,27 @@ # exclude SYSAUX and SOME_TABLESPACE, but not SYSTEM e = create_engine( - "oracle+cx_oracle://scott:tiger@xe", + "oracle+oracledb://scott:tiger@localhost:1521/?service_name=freepdb1", exclude_tablespaces=["SYSAUX", "SOME_TABLESPACE"]) DateTime Compatibility ---------------------- -Oracle has no datatype known as ``DATETIME``, it instead has only ``DATE``, -which can actually store a date and time value. For this reason, the Oracle -dialect provides a type :class:`_oracle.DATE` which is a subclass of -:class:`.DateTime`. This type has no special behavior, and is only -present as a "marker" for this type; additionally, when a database column -is reflected and the type is reported as ``DATE``, the time-supporting +Oracle Database has no datatype known as ``DATETIME``, it instead has only +``DATE``, which can actually store a date and time value. For this reason, the +Oracle Database dialects provide a type :class:`_oracle.DATE` which is a +subclass of :class:`.DateTime`. This type has no special behavior, and is only +present as a "marker" for this type; additionally, when a database column is +reflected and the type is reported as ``DATE``, the time-supporting :class:`_oracle.DATE` type is used. .. _oracle_table_options: -Oracle Table Options --------------------- +Oracle Database Table Options +----------------------------- -The CREATE TABLE phrase supports the following options with Oracle -in conjunction with the :class:`_schema.Table` construct: +The CREATE TABLE phrase supports the following options with Oracle Database +dialects in conjunction with the :class:`_schema.Table` construct: * ``ON COMMIT``:: @@ -504,8 +496,8 @@ .. _oracle_index_options: -Oracle Specific Index Options ------------------------------ +Oracle Database Specific Index Options +-------------------------------------- Bitmap Indexes ~~~~~~~~~~~~~~ @@ -521,8 +513,8 @@ Index compression ~~~~~~~~~~~~~~~~~ -Oracle has a more efficient storage mode for indexes containing lots of -repeated values. Use the ``oracle_compress`` parameter to turn on key +Oracle Database has a more efficient storage mode for indexes containing lots +of repeated values. Use the ``oracle_compress`` parameter to turn on key compression:: Index('my_index', my_table.c.data, oracle_compress=True) @@ -720,16 +712,16 @@ def _generate_numeric( # https://www.oracletutorial.com/oracle-basics/oracle-float/ estimated_binary_precision = int(precision / 0.30103) raise exc.ArgumentError( - "Oracle FLOAT types use 'binary precision', which does " - "not convert cleanly from decimal 'precision'. Please " - "specify " - f"this type with a separate Oracle variant, such as " - f"{type_.__class__.__name__}(precision={precision})." + "Oracle Database FLOAT types use 'binary precision', " + "which does not convert cleanly from decimal " + "'precision'. Please specify " + "this type with a separate Oracle Database variant, such " + f"as {type_.__class__.__name__}(precision={precision})." f"with_variant(oracle.FLOAT" f"(binary_precision=" f"{estimated_binary_precision}), 'oracle'), so that the " - "Oracle specific 'binary_precision' may be specified " - "accurately." + "Oracle Database specific 'binary_precision' may be " + "specified accurately." ) else: precision = binary_precision @@ -958,13 +950,13 @@ def returning_clause( and not self.dialect._supports_update_returning_computed_cols ): util.warn( - "Computed columns don't work with Oracle UPDATE " + "Computed columns don't work with Oracle Database UPDATE " "statements that use RETURNING; the value of the column " "*before* the UPDATE takes place is returned. It is " - "advised to not use RETURNING with an Oracle computed " - "column. Consider setting implicit_returning to False on " - "the Table object in order to avoid implicit RETURNING " - "clauses from being generated for this Table." + "advised to not use RETURNING with an Oracle Database " + "computed column. Consider setting implicit_returning " + "to False on the Table object in order to avoid implicit " + "RETURNING clauses from being generated for this Table." ) if column.type._has_column_expression: col_expr = column.type.column_expression(column) @@ -988,7 +980,7 @@ def returning_clause( raise exc.InvalidRequestError( "Using explicit outparam() objects with " "UpdateBase.returning() in the same Core DML statement " - "is not supported in the Oracle dialect." + "is not supported in the Oracle Database dialects." ) self._oracle_returning = True @@ -1009,7 +1001,7 @@ def returning_clause( return "RETURNING " + ", ".join(columns) + " INTO " + ", ".join(binds) def _row_limit_clause(self, select, **kw): - """ORacle 12c supports OFFSET/FETCH operators + """Oracle Database 12c supports OFFSET/FETCH operators Use it instead subquery with row_number """ @@ -1293,7 +1285,7 @@ def define_constraint_cascades(self, constraint): # https://web.archive.org/web/20090317041251/https://asktom.oracle.com/tkyte/update_cascade/index.html if constraint.onupdate is not None: util.warn( - "Oracle does not contain native UPDATE CASCADE " + "Oracle Database does not contain native UPDATE CASCADE " "functionality - onupdates will not be rendered for foreign " "keys. Consider using deferrable=True, initially='deferred' " "or triggers." @@ -1368,8 +1360,9 @@ def visit_computed_column(self, generated, **kw): ) if generated.persisted is True: raise exc.CompileError( - "Oracle computed columns do not support 'stored' persistence; " - "set the 'persisted' flag to None or False for Oracle support." + "Oracle Database computed columns do not support 'stored' " + "persistence; set the 'persisted' flag to None or False for " + "Oracle Database support." ) elif generated.persisted is False: text += " VIRTUAL" @@ -1487,8 +1480,8 @@ class OracleDialect(default.DefaultDialect): @util.deprecated_params( use_binds_for_limits=( "1.4", - "The ``use_binds_for_limits`` Oracle dialect parameter is " - "deprecated. The dialect now renders LIMIT /OFFSET integers " + "The ``use_binds_for_limits`` Oracle Database dialect parameter " + "is deprecated. The dialect now renders LIMIT / OFFSET integers " "inline in all cases using a post-compilation hook, so that the " "value is still represented by a 'bound parameter' on the Core " "Expression side.", diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index ed9b02d3fb1..babb916a602 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -7,13 +7,18 @@ # mypy: ignore-errors -r""" -.. dialect:: oracle+cx_oracle +r""".. dialect:: oracle+cx_oracle :name: cx-Oracle :dbapi: cx_oracle :connectstring: oracle+cx_oracle://user:pass@hostname:port[/dbname][?service_name=[&key=value&key=value...]] :url: https://oracle.github.io/python-cx_Oracle/ +Description +----------- + +cx_Oracle was the original driver for Oracle Database. It was superseded by +python-oracledb which should be used instead. + DSN vs. Hostname connections ----------------------------- @@ -23,27 +28,36 @@ Hostname Connections with Easy Connect Syntax ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Given a hostname, port and service name of the target Oracle Database, for -example from Oracle's `Easy Connect syntax -`_, -then connect in SQLAlchemy using the ``service_name`` query string parameter:: +Given a hostname, port and service name of the target database, for example +from Oracle Database's Easy Connect syntax then connect in SQLAlchemy using the +``service_name`` query string parameter:: - engine = create_engine("oracle+cx_oracle://scott:tiger@hostname:port/?service_name=myservice&encoding=UTF-8&nencoding=UTF-8") + engine = create_engine("oracle+cx_oracle://scott:tiger@hostname:port?service_name=myservice&encoding=UTF-8&nencoding=UTF-8") -The `full Easy Connect syntax -`_ -is not supported. Instead, use a ``tnsnames.ora`` file and connect using a -DSN. +Note that the default driver value for encoding and nencoding was changed to +“UTF-8” in cx_Oracle 8.0 so these parameters can be omitted when using that +version, or later. -Connections with tnsnames.ora or Oracle Cloud -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +To use a full Easy Connect string, pass it as the ``dsn`` key value in a +:paramref:`_sa.create_engine.connect_args` dictionary:: -Alternatively, if no port, database name, or ``service_name`` is provided, the -dialect will use an Oracle DSN "connection string". This takes the "hostname" -portion of the URL as the data source name. For example, if the -``tnsnames.ora`` file contains a `Net Service Name -`_ -of ``myalias`` as below:: + import cx_Oracle + e = create_engine( + "oracle+cx_oracle://@", + connect_args={ + "user": "scott", + "password": "tiger", + "dsn": "hostname:port/myservice?transport_connect_timeout=30&expire_time=60" + } + ) + +Connections with tnsnames.ora or to Oracle Autonomous Database +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Alternatively, if no port, database name, or service name is provided, the +dialect will use an Oracle Database DSN "connection string". This takes the +"hostname" portion of the URL as the data source name. For example, if the +``tnsnames.ora`` file contains a TNS Alias of ``myalias`` as below:: myalias = (DESCRIPTION = @@ -58,19 +72,20 @@ hostname portion of the URL, without specifying a port, database name or ``service_name``:: - engine = create_engine("oracle+cx_oracle://scott:tiger@myalias/?encoding=UTF-8&nencoding=UTF-8") + engine = create_engine("oracle+cx_oracle://scott:tiger@myalias") -Users of Oracle Cloud should use this syntax and also configure the cloud +Users of Oracle Autonomous Database should use this syntax. If the database is +configured for mutural TLS ("mTLS"), then you must also configure the cloud wallet as shown in cx_Oracle documentation `Connecting to Autononmous Databases `_. SID Connections ^^^^^^^^^^^^^^^ -To use Oracle's obsolete SID connection syntax, the SID can be passed in a -"database name" portion of the URL as below:: +To use Oracle Database's obsolete System Identifier connection syntax, the SID +can be passed in a "database name" portion of the URL:: - engine = create_engine("oracle+cx_oracle://scott:tiger@hostname:1521/dbname?encoding=UTF-8&nencoding=UTF-8") + engine = create_engine("oracle+cx_oracle://scott:tiger@hostname:port/dbname") Above, the DSN passed to cx_Oracle is created by ``cx_Oracle.makedsn()`` as follows:: @@ -79,17 +94,22 @@ >>> cx_Oracle.makedsn("hostname", 1521, sid="dbname") '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=hostname)(PORT=1521))(CONNECT_DATA=(SID=dbname)))' +Note that although the SQLAlchemy syntax ``hostname:port/dbname`` looks like +Oracle's Easy Connect syntax it is different. It uses a SID in place of the +service name required by Easy Connect. The Easy Connect syntax does not +support SIDs. + Passing cx_Oracle connect arguments ----------------------------------- -Additional connection arguments can usually be passed via the URL -query string; particular symbols like ``cx_Oracle.SYSDBA`` are intercepted -and converted to the correct symbol:: +Additional connection arguments can usually be passed via the URL query string; +particular symbols like ``SYSDBA`` are intercepted and converted to the correct +symbol:: e = create_engine( "oracle+cx_oracle://user:pass@dsn?encoding=UTF-8&nencoding=UTF-8&mode=SYSDBA&events=true") -.. versionchanged:: 1.3 the cx_oracle dialect now accepts all argument names +.. versionchanged:: 1.3 the cx_Oracle dialect now accepts all argument names within the URL string itself, to be passed to the cx_Oracle DBAPI. As was the case earlier but not correctly documented, the :paramref:`_sa.create_engine.connect_args` parameter also accepts all @@ -110,9 +130,9 @@ } ) -Note that the default value for ``encoding`` and ``nencoding`` was changed to -"UTF-8" in cx_Oracle 8.0 so these parameters can be omitted when using that -version, or later. +Note that the default driver value for ``encoding`` and ``nencoding`` was +changed to "UTF-8" in cx_Oracle 8.0 so these parameters can be omitted when +using that version, or later. Options consumed by the SQLAlchemy cx_Oracle dialect outside of the driver -------------------------------------------------------------------------- @@ -130,8 +150,7 @@ to ``None``, indicating that the driver default should be used (typically the value is 100). This setting controls how many rows are buffered when fetching rows, and can have a significant effect on performance when - modified. The setting is used for both ``cx_Oracle`` as well as - ``oracledb``. + modified. .. versionchanged:: 2.0.26 - changed the default value from 50 to None, to use the default value of the driver itself. @@ -147,10 +166,16 @@ Using cx_Oracle SessionPool --------------------------- -The cx_Oracle library provides its own connection pool implementation that may -be used in place of SQLAlchemy's pooling functionality. This can be achieved -by using the :paramref:`_sa.create_engine.creator` parameter to provide a -function that returns a new connection, along with setting +The cx_Oracle driver provides its own connection pool implementation that may +be used in place of SQLAlchemy's pooling functionality. The driver pool +supports Oracle Database features such dead connection detection, connection +draining for planned database downtime, support for Oracle Application +Continuity and Transparent Application Continuity, and gives support for +Database Resident Connection Pooling (DRCP). + +Using the driver pool can be achieved by using the +:paramref:`_sa.create_engine.creator` parameter to provide a function that +returns a new connection, along with setting :paramref:`_sa.create_engine.pool_class` to ``NullPool`` to disable SQLAlchemy's pooling:: @@ -160,8 +185,8 @@ pool = cx_Oracle.SessionPool( user="scott", password="tiger", dsn="orclpdb", - min=2, max=5, increment=1, threaded=True, - encoding="UTF-8", nencoding="UTF-8" + min=1, max=4, increment=1, threaded=True, + encoding="UTF-8", nencoding="UTF-8" ) engine = create_engine("oracle+cx_oracle://", creator=pool.acquire, poolclass=NullPool) @@ -170,21 +195,22 @@ connection pooling:: with engine.connect() as conn: - print(conn.scalar("select 1 FROM dual")) - + print(conn.scalar("select 1 from dual")) As well as providing a scalable solution for multi-user applications, the cx_Oracle session pool supports some Oracle features such as DRCP and `Application Continuity `_. +Note that the pool creation parameters ``threaded``, ``encoding`` and +``nencoding`` were deprecated in later cx_Oracle releases. + Using Oracle Database Resident Connection Pooling (DRCP) -------------------------------------------------------- -When using Oracle's `DRCP -`_, -the best practice is to pass a connection class and "purity" when acquiring a -connection from the SessionPool. Refer to the `cx_Oracle DRCP documentation +When using Oracle Database's DRCP, the best practice is to pass a connection +class and "purity" when acquiring a connection from the SessionPool. Refer to +the `cx_Oracle DRCP documentation `_. This can be achieved by wrapping ``pool.acquire()``:: @@ -196,7 +222,7 @@ pool = cx_Oracle.SessionPool( user="scott", password="tiger", dsn="orclpdb", min=2, max=5, increment=1, threaded=True, - encoding="UTF-8", nencoding="UTF-8" + encoding="UTF-8", nencoding="UTF-8" ) def creator(): @@ -208,7 +234,7 @@ def creator(): pooling and Oracle Database additionally uses DRCP:: with engine.connect() as conn: - print(conn.scalar("select 1 FROM dual")) + print(conn.scalar("select 1 from dual")) .. _cx_oracle_unicode: @@ -216,24 +242,26 @@ def creator(): ------- As is the case for all DBAPIs under Python 3, all strings are inherently -Unicode strings. In all cases however, the driver requires an explicit +Unicode strings. In all cases however, the driver requires an explicit encoding configuration. Ensuring the Correct Client Encoding ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The long accepted standard for establishing client encoding for nearly all -Oracle related software is via the `NLS_LANG `_ -environment variable. cx_Oracle like most other Oracle drivers will use -this environment variable as the source of its encoding configuration. The -format of this variable is idiosyncratic; a typical value would be -``AMERICAN_AMERICA.AL32UTF8``. +Oracle Database related software is via the `NLS_LANG +`_ environment +variable. Older versions of cx_Oracle use this environment variable as the +source of its encoding configuration. The format of this variable is +Territory_Country.CharacterSet; a typical value would be +``AMERICAN_AMERICA.AL32UTF8``. cx_Oracle version 8 and later use the character +set "UTF-8" by default, and ignore the character set component of NLS_LANG. -The cx_Oracle driver also supports a programmatic alternative which is to -pass the ``encoding`` and ``nencoding`` parameters directly to its -``.connect()`` function. These can be present in the URL as follows:: +The cx_Oracle driver also supported a programmatic alternative which is to pass +the ``encoding`` and ``nencoding`` parameters directly to its ``.connect()`` +function. These can be present in the URL as follows:: - engine = create_engine("oracle+cx_oracle://scott:tiger@orclpdb/?encoding=UTF-8&nencoding=UTF-8") + engine = create_engine("oracle+cx_oracle://scott:tiger@tnsalias?encoding=UTF-8&nencoding=UTF-8") For the meaning of the ``encoding`` and ``nencoding`` parameters, please consult @@ -248,25 +276,24 @@ def creator(): Unicode-specific Column datatypes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The Core expression language handles unicode data by use of the :class:`.Unicode` -and :class:`.UnicodeText` -datatypes. These types correspond to the VARCHAR2 and CLOB Oracle datatypes by -default. When using these datatypes with Unicode data, it is expected that -the Oracle database is configured with a Unicode-aware character set, as well -as that the ``NLS_LANG`` environment variable is set appropriately, so that -the VARCHAR2 and CLOB datatypes can accommodate the data. +The Core expression language handles unicode data by use of the +:class:`.Unicode` and :class:`.UnicodeText` datatypes. These types correspond +to the VARCHAR2 and CLOB Oracle Database datatypes by default. When using +these datatypes with Unicode data, it is expected that the database is +configured with a Unicode-aware character set, as well as that the ``NLS_LANG`` +environment variable is set appropriately (this applies to older versions of +cx_Oracle), so that the VARCHAR2 and CLOB datatypes can accommodate the data. -In the case that the Oracle database is not configured with a Unicode character +In the case that Oracle Database is not configured with a Unicode character set, the two options are to use the :class:`_types.NCHAR` and :class:`_oracle.NCLOB` datatypes explicitly, or to pass the flag -``use_nchar_for_unicode=True`` to :func:`_sa.create_engine`, -which will cause the -SQLAlchemy dialect to use NCHAR/NCLOB for the :class:`.Unicode` / +``use_nchar_for_unicode=True`` to :func:`_sa.create_engine`, which will cause +the SQLAlchemy dialect to use NCHAR/NCLOB for the :class:`.Unicode` / :class:`.UnicodeText` datatypes instead of VARCHAR/CLOB. -.. versionchanged:: 1.3 The :class:`.Unicode` and :class:`.UnicodeText` - datatypes now correspond to the ``VARCHAR2`` and ``CLOB`` Oracle datatypes - unless the ``use_nchar_for_unicode=True`` is passed to the dialect +.. versionchanged:: 1.3 The :class:`.Unicode` and :class:`.UnicodeText` + datatypes now correspond to the ``VARCHAR2`` and ``CLOB`` Oracle Database + datatypes unless the ``use_nchar_for_unicode=True`` is passed to the dialect when :func:`_sa.create_engine` is called. @@ -275,7 +302,7 @@ def creator(): Encoding Errors ^^^^^^^^^^^^^^^ -For the unusual case that data in the Oracle database is present with a broken +For the unusual case that data in Oracle Database is present with a broken encoding, the dialect accepts a parameter ``encoding_errors`` which will be passed to Unicode decoding functions in order to affect how decoding errors are handled. The value is ultimately consumed by the Python `decode @@ -293,13 +320,13 @@ def creator(): ------------------------------------------------------------------------------- The cx_Oracle DBAPI has a deep and fundamental reliance upon the usage of the -DBAPI ``setinputsizes()`` call. The purpose of this call is to establish the +DBAPI ``setinputsizes()`` call. The purpose of this call is to establish the datatypes that are bound to a SQL statement for Python values being passed as parameters. While virtually no other DBAPI assigns any use to the ``setinputsizes()`` call, the cx_Oracle DBAPI relies upon it heavily in its -interactions with the Oracle client interface, and in some scenarios it is not -possible for SQLAlchemy to know exactly how data should be bound, as some -settings can cause profoundly different performance characteristics, while +interactions with the Oracle Database client interface, and in some scenarios +it is not possible for SQLAlchemy to know exactly how data should be bound, as +some settings can cause profoundly different performance characteristics, while altering the type coercion behavior at the same time. Users of the cx_Oracle dialect are **strongly encouraged** to read through @@ -354,35 +381,35 @@ def _remove_clob(inputsizes, cursor, statement, parameters, context): if dbapitype is CLOB: del inputsizes[bindparam] -.. _cx_oracle_returning: - -RETURNING Support ------------------ - -The cx_Oracle dialect implements RETURNING using OUT parameters. -The dialect supports RETURNING fully. - .. _cx_oracle_lob: LOB Datatypes -------------- LOB datatypes refer to the "large object" datatypes such as CLOB, NCLOB and -BLOB. Modern versions of cx_Oracle and oracledb are optimized for these -datatypes to be delivered as a single buffer. As such, SQLAlchemy makes use of -these newer type handlers by default. +BLOB. Modern versions of cx_Oracle is optimized for these datatypes to be +delivered as a single buffer. As such, SQLAlchemy makes use of these newer type +handlers by default. To disable the use of newer type handlers and deliver LOB objects as classic buffered objects with a ``read()`` method, the parameter ``auto_convert_lobs=False`` may be passed to :func:`_sa.create_engine`, which takes place only engine-wide. -Two Phase Transactions Not Supported (use oracledb) ---------------------------------------------------- +.. _cx_oracle_returning: + +RETURNING Support +----------------- + +The cx_Oracle dialect implements RETURNING using OUT parameters. +The dialect supports RETURNING fully. + +Two Phase Transactions Not Supported +------------------------------------ Two phase transactions are **not supported** under cx_Oracle due to poor driver -support. The newer :ref:`oracledb` dialect however **does** support two phase -transactions and should be preferred. +support. The newer :ref:`oracledb` dialect however **does** support two phase +transactions. .. _cx_oracle_numeric: @@ -393,20 +420,21 @@ def _remove_clob(inputsizes, cursor, statement, parameters, context): ``Decimal`` objects or float objects. When a :class:`.Numeric` object, or a subclass such as :class:`.Float`, :class:`_oracle.DOUBLE_PRECISION` etc. is in use, the :paramref:`.Numeric.asdecimal` flag determines if values should be -coerced to ``Decimal`` upon return, or returned as float objects. To make -matters more complicated under Oracle, Oracle's ``NUMBER`` type can also -represent integer values if the "scale" is zero, so the Oracle-specific -:class:`_oracle.NUMBER` type takes this into account as well. +coerced to ``Decimal`` upon return, or returned as float objects. To make +matters more complicated under Oracle Database, the ``NUMBER`` type can also +represent integer values if the "scale" is zero, so the Oracle +Database-specific :class:`_oracle.NUMBER` type takes this into account as well. The cx_Oracle dialect makes extensive use of connection- and cursor-level "outputtypehandler" callables in order to coerce numeric values as requested. These callables are specific to the specific flavor of :class:`.Numeric` in -use, as well as if no SQLAlchemy typing objects are present. There are -observed scenarios where Oracle may sends incomplete or ambiguous information -about the numeric types being returned, such as a query where the numeric types -are buried under multiple levels of subquery. The type handlers do their best -to make the right decision in all cases, deferring to the underlying cx_Oracle -DBAPI for all those cases where the driver can make the best decision. +use, as well as if no SQLAlchemy typing objects are present. There are +observed scenarios where Oracle Database may send incomplete or ambiguous +information about the numeric types being returned, such as a query where the +numeric types are buried under multiple levels of subquery. The type handlers +do their best to make the right decision in all cases, deferring to the +underlying cx_Oracle DBAPI for all those cases where the driver can make the +best decision. When no typing objects are present, as when executing plain SQL strings, a default "outputtypehandler" is present which will generally return numeric diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index 37e07daf1d5..a6f52ecec5d 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -6,8 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors -r""" -.. dialect:: oracle+oracledb +r""".. dialect:: oracle+oracledb :name: python-oracledb :dbapi: oracledb :connectstring: oracle+oracledb://user:pass@hostname:port[/dbname][?service_name=[&key=value&key=value...]] @@ -16,75 +15,526 @@ Description ----------- -python-oracledb is released by Oracle to supersede the cx_Oracle driver. -It is fully compatible with cx_Oracle and features both a "thin" client -mode that requires no dependencies, as well as a "thick" mode that uses -the Oracle Client Interface in the same way as cx_Oracle. +Python-oracledb is the Oracle Database driver for Python. It features a default +"thin" client mode that requires no dependencies, and an optional "thick" mode +that uses Oracle Client libraries. It supports SQLAlchemy features including +two phase transactions and Asyncio. -.. seealso:: - - :ref:`cx_oracle` - all of cx_Oracle's notes apply to the oracledb driver - as well, with the exception that oracledb supports two phase transactions. +Python-oracle is the renamed, updated cx_Oracle driver. Oracle is no longer +doing any releases in the cx_Oracle namespace. The SQLAlchemy ``oracledb`` dialect provides both a sync and an async implementation under the same dialect name. The proper version is selected depending on how the engine is created: * calling :func:`_sa.create_engine` with ``oracle+oracledb://...`` will - automatically select the sync version, e.g.:: + automatically select the sync version:: from sqlalchemy import create_engine - sync_engine = create_engine("oracle+oracledb://scott:tiger@localhost/?service_name=XEPDB1") + sync_engine = create_engine("oracle+oracledb://scott:tiger@localhost?service_name=FREEPDB1") -* calling :func:`_asyncio.create_async_engine` with - ``oracle+oracledb://...`` will automatically select the async version, - e.g.:: +* calling :func:`_asyncio.create_async_engine` with ``oracle+oracledb://...`` + will automatically select the async version:: from sqlalchemy.ext.asyncio import create_async_engine - asyncio_engine = create_async_engine("oracle+oracledb://scott:tiger@localhost/?service_name=XEPDB1") + asyncio_engine = create_async_engine("oracle+oracledb://scott:tiger@localhost?service_name=FREEPDB1") -The asyncio version of the dialect may also be specified explicitly using the -``oracledb_async`` suffix, as:: + The asyncio version of the dialect may also be specified explicitly using the + ``oracledb_async`` suffix:: - from sqlalchemy.ext.asyncio import create_async_engine - asyncio_engine = create_async_engine("oracle+oracledb_async://scott:tiger@localhost/?service_name=XEPDB1") + from sqlalchemy.ext.asyncio import create_async_engine + asyncio_engine = create_async_engine("oracle+oracledb_async://scott:tiger@localhost?service_name=FREEPDB1") .. versionadded:: 2.0.25 added support for the async version of oracledb. Thick mode support ------------------ -By default the ``python-oracledb`` is started in thin mode, that does not -require oracle client libraries to be installed in the system. The -``python-oracledb`` driver also support a "thick" mode, that behaves -similarly to ``cx_oracle`` and requires that Oracle Client Interface (OCI) -is installed. +By default, the python-oracledb driver runs in a "thin" mode that does not +require Oracle Client libraries to be installed. The driver also supports a +"thick" mode that uses Oracle Client libraries to get functionality such as +Oracle Application Continuity. -To enable this mode, the user may call ``oracledb.init_oracle_client`` -manually, or by passing the parameter ``thick_mode=True`` to -:func:`_sa.create_engine`. To pass custom arguments to ``init_oracle_client``, -like the ``lib_dir`` path, a dict may be passed to this parameter, as in:: +To enable thick mode, call `oracledb.init_oracle_client() +`_ +explicitly, or pass the parameter ``thick_mode=True`` to +:func:`_sa.create_engine`. To pass custom arguments to +``init_oracle_client()``, like the ``lib_dir`` path, a dict may be passed, for +example:: engine = sa.create_engine("oracle+oracledb://...", thick_mode={ - "lib_dir": "/path/to/oracle/client/lib", "driver_name": "my-app" + "lib_dir": "/path/to/oracle/client/lib", + "config_dir": "/path/to/network_config_file_directory", + "driver_name": "my-app : 1.0.0" }) +Note that passing a ``lib_dir`` path should only be done on macOS or +Windows. On Linux it does not behave as you might expect. + .. seealso:: - https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.init_oracle_client + python-oracledb documentation `Enabling python-oracledb Thick mode + `_ + +Connecting to Oracle Database +----------------------------- + +python-oracledb provides several methods of indicating the target database. +The dialect translates from a series of different URL forms. + +Given the hostname, port and service name of the target database, you can +connect in SQLAlchemy using the ``service_name`` query string parameter:: + + engine = create_engine("oracle+oracledb://scott:tiger@hostname:port?service_name=myservice") + +Connecting with Easy Connect strings +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can pass any valid python-oracledb connection string as the ``dsn`` key +value in a :paramref:`_sa.create_engine.connect_args` dictionary. See +python-oracledb documentation `Oracle Net Services Connection Strings +`_. + +For example to use an `Easy Connect string +`_ +with a timeout to prevent connection establishment from hanging if the network +transport to the database cannot be establishd in 30 seconds, and also setting +a keep-alive time of 60 seconds to stop idle network connections from being +terminated by a firewall:: + + e = create_engine( + "oracle+oracledb://@", + connect_args={ + "user": "scott", + "password": "tiger", + "dsn": "hostname:port/myservice?transport_connect_timeout=30&expire_time=60" + } + ) + +The Easy Connect syntax has been enhanced during the life of Oracle Database. +Review the documentation for your database version. The current documentation +is at `Understanding the Easy Connect Naming Method +`_. + +The general syntax is similar to:: + + [[protocol:]//]host[:port][/[service_name]][?parameter_name=value{¶meter_name=value}] + +Note that although the SQLAlchemy URL syntax ``hostname:port/dbname`` looks +like Oracle's Easy Connect syntax, it is different. SQLAlchemy's URL requires a +system identifier (SID) for the ``dbname`` component:: + + engine = create_engine("oracle+oracledb://scott:tiger@hostname:port/sid") + +Easy Connect syntax does not support SIDs. It uses services names, which are +the preferred choice for connecting to Oracle Database. + +Passing python-oracledb connect arguments +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Other python-oracledb driver `connection options +`_ +can be passed in ``connect_args``. For example:: + + e = create_engine( + "oracle+oracledb://@", + connect_args={ + "user": "scott", + "password": "tiger", + "dsn": "hostname:port/myservice", + "events": True, + "mode": oracledb.AUTH_MODE_SYSDBA + } + ) + +Connecting with tnsnames.ora TNS aliases +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If no port, database name, or service name is provided, the dialect will use an +Oracle Database DSN "connection string". This takes the "hostname" portion of +the URL as the data source name. For example, if the ``tnsnames.ora`` file +contains a `TNS Alias +`_ +of ``myalias`` as below:: + + myalias = + (DESCRIPTION = + (ADDRESS = (PROTOCOL = TCP)(HOST = mymachine.example.com)(PORT = 1521)) + (CONNECT_DATA = + (SERVER = DEDICATED) + (SERVICE_NAME = orclpdb1) + ) + ) + +The python-oracledb dialect connects to this database service when ``myalias`` is the +hostname portion of the URL, without specifying a port, database name or +``service_name``:: + + engine = create_engine("oracle+oracledb://scott:tiger@myalias") + +Connecting to Oracle Autonomous Database +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Users of Oracle Autonomous Database should use either use the TNS Alias URL +shown above, or pass the TNS Alias as the ``dsn`` key value in a +:paramref:`_sa.create_engine.connect_args` dictionary. + +If Oracle Autonomous Database is configured for mutual TLS ("mTLS") +connections, then additional configuration is required as shown in `Connecting +to Oracle Cloud Autonomous Databases +`_. In +summary, Thick mode users should configure file locations and set the wallet +path in ``sqlnet.ora`` appropriately:: + + e = create_engine( + "oracle+oracledb://@", + thick_mode={ + # directory containing tnsnames.ora and cwallet.so + "config_dir": "/opt/oracle/wallet_dir", + }, + connect_args={ + "user": "scott", + "password": "tiger", + "dsn": "mydb_high" + } + ) + +Thin mode users of mTLS should pass the appropriate directories and PEM wallet +password when creating the engine, similar to:: + + e = create_engine( + "oracle+oracledb://@", + connect_args={ + "user": "scott", + "password": "tiger", + "dsn": "mydb_high", + "config_dir": "/opt/oracle/wallet_dir", # directory containing tnsnames.ora + "wallet_location": "/opt/oracle/wallet_dir", # directory containing ewallet.pem + "wallet_password": "top secret" # password for the PEM file + } + ) + +Typically ``config_dir`` and ``wallet_location`` are the same directory, which +is where the Oracle Autonomous Database wallet zip file was extracted. Note +this directory should be protected. + +Connection Pooling +------------------ + +Applications with multiple concurrent users should use connection pooling. A +minimal sized connection pool is also beneficial for long-running, single-user +applications that do not frequently use a connection. + +The python-oracledb driver provides its own connection pool implementation that +may be used in place of SQLAlchemy's pooling functionality. The driver pool +gives support for high availability features such as dead connection detection, +connection draining for planned database downtime, support for Oracle +Application Continuity and Transparent Application Continuity, and gives +support for `Database Resident Connection Pooling (DRCP) +`_. + +To take advantage of python-oracledb's pool, use the +:paramref:`_sa.create_engine.creator` parameter to provide a function that +returns a new connection, along with setting +:paramref:`_sa.create_engine.pool_class` to ``NullPool`` to disable +SQLAlchemy's pooling:: + + import oracledb + from sqlalchemy import create_engine + from sqlalchemy import text + from sqlalchemy.pool import NullPool + + # Uncomment to use the optional python-oracledb Thick mode. + # Review the python-oracledb doc for the appropriate parameters + #oracledb.init_oracle_client() + + pool = oracledb.create_pool(user="scott", password="tiger", dsn="localhost:1521/freepdb1", + min=1, max=4, increment=1) + engine = create_engine("oracle+oracledb://", creator=pool.acquire, poolclass=NullPool) + +The above engine may then be used normally. Internally, python-oracledb handles +connection pooling:: + + with engine.connect() as conn: + print(conn.scalar(text("select 1 from dual"))) + +Refer to the python-oracledb documentation for `oracledb.create_pool() +`_ +for the arguments that can be used when creating a connection pool. + +.. _drcp: + +Using Oracle Database Resident Connection Pooling (DRCP) +-------------------------------------------------------- + +When using Oracle Database's Database Resident Connection Pooling (DRCP), the +best practice is to specify a connection class and "purity". Refer to the +`python-oracledb documentation on DRCP +`_. +For example:: + + import oracledb + from sqlalchemy import create_engine + from sqlalchemy import text + from sqlalchemy.pool import NullPool + + # Uncomment to use the optional python-oracledb Thick mode. + # Review the python-oracledb doc for the appropriate parameters + #oracledb.init_oracle_client() + + pool = oracledb.create_pool(user="scott", password="tiger", dsn="localhost:1521/freepdb1", + min=1, max=4, increment=1, + cclass="MYCLASS", purity=oracledb.PURITY_SELF) + engine = create_engine("oracle+oracledb://", creator=pool.acquire, poolclass=NullPool) + +The above engine may then be used normally where python-oracledb handles +application connection pooling and Oracle Database additionally uses DRCP:: + + with engine.connect() as conn: + print(conn.scalar(text("select 1 from dual"))) + +If you wish to use different connection classes or purities for different +connections, then wrap ``pool.acquire()``:: + + import oracledb + from sqlalchemy import create_engine + from sqlalchemy import text + from sqlalchemy.pool import NullPool + + # Uncomment to use python-oracledb Thick mode. + # Review the python-oracledb doc for the appropriate parameters + #oracledb.init_oracle_client() + + pool = oracledb.create_pool(user="scott", password="tiger", dsn="localhost:1521/freepdb1", + min=1, max=4, increment=1, + cclass="MYCLASS", purity=oracledb.PURITY_SELF) + + def creator(): + return pool.acquire(cclass="MYOTHERCLASS", purity=oracledb.PURITY_NEW) + + engine = create_engine("oracle+oracledb://", creator=creator, poolclass=NullPool) + +Engine Options consumed by the SQLAlchemy oracledb dialect outside of the driver +-------------------------------------------------------------------------------- + +There are also options that are consumed by the SQLAlchemy oracledb dialect +itself. These options are always passed directly to :func:`_sa.create_engine`, +such as:: + + e = create_engine( + "oracle+oracledb://user:pass@tnsalias", arraysize=500) + +The parameters accepted by the oracledb dialect are as follows: + +* ``arraysize`` - set the driver cursor.arraysize value. It defaults to + ``None``, indicating that the driver default value of 100 should be used. + This setting controls how many rows are buffered when fetching rows, and can + have a significant effect on performance if increased for queries that return + large numbers of rows. + + .. versionchanged:: 2.0.26 - changed the default value from 50 to None, + to use the default value of the driver itself. + +* ``auto_convert_lobs`` - defaults to True; See :ref:`oracledb_lob`. + +* ``coerce_to_decimal`` - see :ref:`oracledb_numeric` for detail. + +* ``encoding_errors`` - see :ref:`oracledb_unicode_encoding_errors` for detail. -Two Phase Transactions Supported --------------------------------- +.. _oracledb_unicode: -Two phase transactions are fully supported under oracledb. Starting with -oracledb 2.3 two phase transactions are supported also in thin mode. APIs -for two phase transactions are provided at the Core level via -:meth:`_engine.Connection.begin_twophase` and :paramref:`_orm.Session.twophase` -for transparent ORM use. +Unicode +------- + +As is the case for all DBAPIs under Python 3, all strings are inherently +Unicode strings. + +Ensuring the Correct Client Encoding +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In python-oracledb, the encoding used for all character data is "UTF-8". + +Unicode-specific Column datatypes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The Core expression language handles unicode data by use of the +:class:`.Unicode` and :class:`.UnicodeText` datatypes. These types correspond +to the VARCHAR2 and CLOB Oracle Database datatypes by default. When using +these datatypes with Unicode data, it is expected that the database is +configured with a Unicode-aware character set so that the VARCHAR2 and CLOB +datatypes can accommodate the data. + +In the case that Oracle Database is not configured with a Unicode character +set, the two options are to use the :class:`_types.NCHAR` and +:class:`_oracle.NCLOB` datatypes explicitly, or to pass the flag +``use_nchar_for_unicode=True`` to :func:`_sa.create_engine`, which will cause +the SQLAlchemy dialect to use NCHAR/NCLOB for the :class:`.Unicode` / +:class:`.UnicodeText` datatypes instead of VARCHAR/CLOB. + +.. versionchanged:: 1.3 The :class:`.Unicode` and :class:`.UnicodeText` + datatypes now correspond to the ``VARCHAR2`` and ``CLOB`` Oracle Database + datatypes unless the ``use_nchar_for_unicode=True`` is passed to the dialect + when :func:`_sa.create_engine` is called. + + +.. _oracledb_unicode_encoding_errors: + +Encoding Errors +^^^^^^^^^^^^^^^ + +For the unusual case that data in Oracle Database is present with a broken +encoding, the dialect accepts a parameter ``encoding_errors`` which will be +passed to Unicode decoding functions in order to affect how decoding errors are +handled. The value is ultimately consumed by the Python `decode +`_ function, and +is passed both via python-oracledb's ``encodingErrors`` parameter consumed by +``Cursor.var()``, as well as SQLAlchemy's own decoding function, as the +python-oracledb dialect makes use of both under different circumstances. + +.. versionadded:: 1.3.11 + + +.. _oracledb_setinputsizes: + +Fine grained control over python-oracledb data binding with setinputsizes +------------------------------------------------------------------------- + +The python-oracle DBAPI has a deep and fundamental reliance upon the usage of +the DBAPI ``setinputsizes()`` call. The purpose of this call is to establish +the datatypes that are bound to a SQL statement for Python values being passed +as parameters. While virtually no other DBAPI assigns any use to the +``setinputsizes()`` call, the python-oracledb DBAPI relies upon it heavily in +its interactions with the Oracle Database, and in some scenarios it is not +possible for SQLAlchemy to know exactly how data should be bound, as some +settings can cause profoundly different performance characteristics, while +altering the type coercion behavior at the same time. + +Users of the oracledb dialect are **strongly encouraged** to read through +python-oracledb's list of built-in datatype symbols at `Database Types +`_ +Note that in some cases, significant performance degradation can occur when +using these types vs. not. + +On the SQLAlchemy side, the :meth:`.DialectEvents.do_setinputsizes` event can +be used both for runtime visibility (e.g. logging) of the setinputsizes step as +well as to fully control how ``setinputsizes()`` is used on a per-statement +basis. + +.. versionadded:: 1.2.9 Added :meth:`.DialectEvents.setinputsizes` + + +Example 1 - logging all setinputsizes calls +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The following example illustrates how to log the intermediary values from a +SQLAlchemy perspective before they are converted to the raw ``setinputsizes()`` +parameter dictionary. The keys of the dictionary are :class:`.BindParameter` +objects which have a ``.key`` and a ``.type`` attribute:: + + from sqlalchemy import create_engine, event + + engine = create_engine("oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1") + + @event.listens_for(engine, "do_setinputsizes") + def _log_setinputsizes(inputsizes, cursor, statement, parameters, context): + for bindparam, dbapitype in inputsizes.items(): + log.info( + "Bound parameter name: %s SQLAlchemy type: %r " + "DBAPI object: %s", + bindparam.key, bindparam.type, dbapitype) + +Example 2 - remove all bindings to CLOB +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For performance, fetching LOB datatypes from Oracle Database is set by default +for the ``Text`` type within SQLAlchemy. This setting can be modified as +follows:: + + + from sqlalchemy import create_engine, event + from oracledb import CLOB + + engine = create_engine("oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1") + + @event.listens_for(engine, "do_setinputsizes") + def _remove_clob(inputsizes, cursor, statement, parameters, context): + for bindparam, dbapitype in list(inputsizes.items()): + if dbapitype is CLOB: + del inputsizes[bindparam] + +.. _oracledb_lob: + +LOB Datatypes +-------------- + +LOB datatypes refer to the "large object" datatypes such as CLOB, NCLOB and +BLOB. Oracle Database can efficiently return these datatypes as a single +buffer. SQLAlchemy makes use of type handlers to do this by default. + +To disable the use of the type handlers and deliver LOB objects as classic +buffered objects with a ``read()`` method, the parameter +``auto_convert_lobs=False`` may be passed to :func:`_sa.create_engine`. + +.. _oracledb_returning: + +RETURNING Support +----------------- + +The oracledb dialect implements RETURNING using OUT parameters. The dialect +supports RETURNING fully. + +Two Phase Transaction Support +----------------------------- + +Two phase transactions are fully supported with python-oracledb. (Thin mode +requires python-oracledb 2.3). APIs for two phase transactions are provided at +the Core level via :meth:`_engine.Connection.begin_twophase` and +:paramref:`_orm.Session.twophase` for transparent ORM use. .. versionchanged:: 2.0.32 added support for two phase transactions -.. versionadded:: 2.0.0 added support for oracledb driver. +.. _oracledb_numeric: + +Precision Numerics +------------------ + +SQLAlchemy's numeric types can handle receiving and returning values as Python +``Decimal`` objects or float objects. When a :class:`.Numeric` object, or a +subclass such as :class:`.Float`, :class:`_oracle.DOUBLE_PRECISION` etc. is in +use, the :paramref:`.Numeric.asdecimal` flag determines if values should be +coerced to ``Decimal`` upon return, or returned as float objects. To make +matters more complicated under Oracle Database, the ``NUMBER`` type can also +represent integer values if the "scale" is zero, so the Oracle +Database-specific :class:`_oracle.NUMBER` type takes this into account as well. + +The oracledb dialect makes extensive use of connection- and cursor-level +"outputtypehandler" callables in order to coerce numeric values as requested. +These callables are specific to the specific flavor of :class:`.Numeric` in +use, as well as if no SQLAlchemy typing objects are present. There are +observed scenarios where Oracle Database may send incomplete or ambiguous +information about the numeric types being returned, such as a query where the +numeric types are buried under multiple levels of subquery. The type handlers +do their best to make the right decision in all cases, deferring to the +underlying python-oracledb DBAPI for all those cases where the driver can make +the best decision. + +When no typing objects are present, as when executing plain SQL strings, a +default "outputtypehandler" is present which will generally return numeric +values which specify precision and scale as Python ``Decimal`` objects. To +disable this coercion to decimal for performance reasons, pass the flag +``coerce_to_decimal=False`` to :func:`_sa.create_engine`:: + + engine = create_engine("oracle+oracledb://scott:tiger@tnsalias", coerce_to_decimal=False) + +The ``coerce_to_decimal`` flag only impacts the results of plain string +SQL statements that are not otherwise associated with a :class:`.Numeric` +SQLAlchemy type (or a subclass of such). + +.. versionchanged:: 1.2 The numeric handling system for the oracle dialects has + been reworked to take advantage of newer driver features as well as better + integration of outputtypehandlers. + +.. versionadded:: 2.0.0 added support for the python-oracledb driver. """ # noqa from __future__ import annotations diff --git a/lib/sqlalchemy/dialects/oracle/provision.py b/lib/sqlalchemy/dialects/oracle/provision.py index b33c1525cd5..0eb6273a8c6 100644 --- a/lib/sqlalchemy/dialects/oracle/provision.py +++ b/lib/sqlalchemy/dialects/oracle/provision.py @@ -89,7 +89,7 @@ def _oracle_drop_db(cfg, eng, ident): # cx_Oracle seems to occasionally leak open connections when a large # suite it run, even if we confirm we have zero references to # connection objects. - # while there is a "kill session" command in Oracle, + # while there is a "kill session" command in Oracle Database, # it unfortunately does not release the connection sufficiently. _ora_drop_ignore(conn, ident) _ora_drop_ignore(conn, "%s_ts1" % ident) diff --git a/lib/sqlalchemy/dialects/oracle/types.py b/lib/sqlalchemy/dialects/oracle/types.py index 36caaa05e60..2f84415ea8f 100644 --- a/lib/sqlalchemy/dialects/oracle/types.py +++ b/lib/sqlalchemy/dialects/oracle/types.py @@ -64,17 +64,18 @@ def _type_affinity(self): class FLOAT(sqltypes.FLOAT): - """Oracle FLOAT. + """Oracle Database FLOAT. This is the same as :class:`_sqltypes.FLOAT` except that - an Oracle-specific :paramref:`_oracle.FLOAT.binary_precision` + an Oracle Database -specific :paramref:`_oracle.FLOAT.binary_precision` parameter is accepted, and the :paramref:`_sqltypes.Float.precision` parameter is not accepted. - Oracle FLOAT types indicate precision in terms of "binary precision", which - defaults to 126. For a REAL type, the value is 63. This parameter does not - cleanly map to a specific number of decimal places but is roughly - equivalent to the desired number of decimal places divided by 0.3103. + Oracle Database FLOAT types indicate precision in terms of "binary + precision", which defaults to 126. For a REAL type, the value is 63. This + parameter does not cleanly map to a specific number of decimal places but + is roughly equivalent to the desired number of decimal places divided by + 0.3103. .. versionadded:: 2.0 @@ -91,10 +92,11 @@ def __init__( r""" Construct a FLOAT - :param binary_precision: Oracle binary precision value to be rendered - in DDL. This may be approximated to the number of decimal characters - using the formula "decimal precision = 0.30103 * binary precision". - The default value used by Oracle for FLOAT / DOUBLE PRECISION is 126. + :param binary_precision: Oracle Database binary precision value to be + rendered in DDL. This may be approximated to the number of decimal + characters using the formula "decimal precision = 0.30103 * binary + precision". The default value used by Oracle Database for FLOAT / + DOUBLE PRECISION is 126. :param asdecimal: See :paramref:`_sqltypes.Float.asdecimal` @@ -163,10 +165,10 @@ def process(value): class DATE(_OracleDateLiteralRender, sqltypes.DateTime): - """Provide the oracle DATE type. + """Provide the Oracle Database DATE type. This type has no special Python behavior, except that it subclasses - :class:`_types.DateTime`; this is to suit the fact that the Oracle + :class:`_types.DateTime`; this is to suit the fact that the Oracle Database ``DATE`` type supports a time value. """ @@ -246,8 +248,8 @@ def process(value: dt.timedelta) -> str: class TIMESTAMP(sqltypes.TIMESTAMP): - """Oracle implementation of ``TIMESTAMP``, which supports additional - Oracle-specific modes + """Oracle Database implementation of ``TIMESTAMP``, which supports + additional Oracle Database-specific modes .. versionadded:: 2.0 @@ -257,10 +259,11 @@ def __init__(self, timezone: bool = False, local_timezone: bool = False): """Construct a new :class:`_oracle.TIMESTAMP`. :param timezone: boolean. Indicates that the TIMESTAMP type should - use Oracle's ``TIMESTAMP WITH TIME ZONE`` datatype. + use Oracle Database's ``TIMESTAMP WITH TIME ZONE`` datatype. :param local_timezone: boolean. Indicates that the TIMESTAMP type - should use Oracle's ``TIMESTAMP WITH LOCAL TIME ZONE`` datatype. + should use Oracle Database's ``TIMESTAMP WITH LOCAL TIME ZONE`` + datatype. """ @@ -273,7 +276,7 @@ def __init__(self, timezone: bool = False, local_timezone: bool = False): class ROWID(sqltypes.TypeEngine): - """Oracle ROWID type. + """Oracle Database ROWID type. When used in a cast() or similar, generates ROWID. diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 983bdae037f..4fd4e30896e 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -378,12 +378,11 @@ def execution_options(self, **opt: Any) -> Connection: :param stream_results: Available on: :class:`_engine.Connection`, :class:`_sql.Executable`. - Indicate to the dialect that results should be - "streamed" and not pre-buffered, if possible. For backends - such as PostgreSQL, MySQL and MariaDB, this indicates the use of - a "server side cursor" as opposed to a client side cursor. - Other backends such as that of Oracle may already use server - side cursors by default. + Indicate to the dialect that results should be "streamed" and not + pre-buffered, if possible. For backends such as PostgreSQL, MySQL + and MariaDB, this indicates the use of a "server side cursor" as + opposed to a client side cursor. Other backends such as that of + Oracle Database may already use server side cursors by default. The usage of :paramref:`_engine.Connection.execution_options.stream_results` is diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 9b769265fa0..dc4c1a61263 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -392,7 +392,8 @@ def insert_executemany_returning(self): available if the dialect in use has opted into using the "use_insertmanyvalues" feature. If they haven't opted into that, then this attribute is False, unless the dialect in question overrides this - and provides some other implementation (such as the Oracle dialect). + and provides some other implementation (such as the Oracle Database + dialects). """ return self.insert_returning and self.use_insertmanyvalues @@ -415,7 +416,7 @@ def insert_executemany_returning_sort_by_parameter_order(self): If the dialect in use hasn't opted into that, then this attribute is False, unless the dialect in question overrides this and provides some - other implementation (such as the Oracle dialect). + other implementation (such as the Oracle Database dialects). """ return self.insert_returning and self.use_insertmanyvalues @@ -2050,10 +2051,11 @@ def _prepare_set_input_sizes( style of ``setinputsizes()`` on the cursor, using DB-API types from the bind parameter's ``TypeEngine`` objects. - This method only called by those dialects which set - the :attr:`.Dialect.bind_typing` attribute to - :attr:`.BindTyping.SETINPUTSIZES`. cx_Oracle is the only DBAPI - that requires setinputsizes(), pyodbc offers it as an option. + This method only called by those dialects which set the + :attr:`.Dialect.bind_typing` attribute to + :attr:`.BindTyping.SETINPUTSIZES`. Python-oracledb and cx_Oracle are + the only DBAPIs that requires setinputsizes(); pyodbc offers it as an + option. Prior to SQLAlchemy 2.0, the setinputsizes() approach was also used for pg8000 and asyncpg, which has been changed to inline rendering diff --git a/lib/sqlalchemy/engine/events.py b/lib/sqlalchemy/engine/events.py index b8e8936b94c..cbc08063d55 100644 --- a/lib/sqlalchemy/engine/events.py +++ b/lib/sqlalchemy/engine/events.py @@ -928,7 +928,8 @@ def do_setinputsizes( The setinputsizes hook overall is only used for dialects which include the flag ``use_setinputsizes=True``. Dialects which use this - include cx_Oracle, pg8000, asyncpg, and pyodbc dialects. + include python-oracledb, cx_Oracle, pg8000, asyncpg, and pyodbc + dialects. .. note:: diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 6696a787064..861c5deae3f 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -580,8 +580,8 @@ class BindTyping(Enum): """Use the pep-249 setinputsizes method. This is only implemented for DBAPIs that support this method and for which - the SQLAlchemy dialect has the appropriate infrastructure for that - dialect set up. Current dialects include cx_Oracle as well as + the SQLAlchemy dialect has the appropriate infrastructure for that dialect + set up. Current dialects include python-oracledb, cx_Oracle as well as optional support for SQL Server using pyodbc. When using setinputsizes, dialects also have a means of only using the @@ -871,12 +871,12 @@ def loaded_dbapi(self) -> ModuleType: the statement multiple times for a series of batches when large numbers of rows are given. - The parameter is False for the default dialect, and is set to - True for SQLAlchemy internal dialects SQLite, MySQL/MariaDB, PostgreSQL, - SQL Server. It remains at False for Oracle, which provides native - "executemany with RETURNING" support and also does not support - ``supports_multivalues_insert``. For MySQL/MariaDB, those MySQL - dialects that don't support RETURNING will not report + The parameter is False for the default dialect, and is set to True for + SQLAlchemy internal dialects SQLite, MySQL/MariaDB, PostgreSQL, SQL Server. + It remains at False for Oracle Database, which provides native "executemany + with RETURNING" support and also does not support + ``supports_multivalues_insert``. For MySQL/MariaDB, those MySQL dialects + that don't support RETURNING will not report ``insert_executemany_returning`` as True. .. versionadded:: 2.0 @@ -1093,7 +1093,8 @@ def loaded_dbapi(self) -> ModuleType: established on a :class:`.Table` object which will be passed as "reflection options" when using :paramref:`.Table.autoload_with`. - Current example is "oracle_resolve_synonyms" in the Oracle dialect. + Current example is "oracle_resolve_synonyms" in the Oracle Database + dialects. """ diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 09b09880350..7e2586e1e18 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -630,7 +630,7 @@ def get_temp_table_names(self, **kw: Any) -> List[str]: r"""Return a list of temporary table names for the current bind. This method is unsupported by most dialects; currently - only Oracle, PostgreSQL and SQLite implements it. + only Oracle Database, PostgreSQL and SQLite implements it. :param \**kw: Additional keyword argument to pass to the dialect specific implementation. See the documentation of the dialect @@ -666,7 +666,7 @@ def get_table_options( given name was created. This currently includes some options that apply to MySQL and Oracle - tables. + Database tables. :param table_name: string name of the table. For special quoting, use :class:`.quoted_name`. diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index b870adce92c..9d4be255c0d 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -226,7 +226,8 @@ def compile(element, compiler, **kw): @compiles(coalesce, 'oracle') def compile(element, compiler, **kw): if len(element.clauses) > 2: - raise TypeError("coalesce only supports two arguments on Oracle") + raise TypeError("coalesce only supports two arguments on " + "Oracle Database") return "nvl(%s)" % compiler.process(element.clauses, **kw) * :class:`.ExecutableDDLElement` - The root of all DDL expressions, diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 6bacd77ebac..4f119e35caf 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -1570,10 +1570,10 @@ def _compound_eager_statement(self): ) statement._label_style = self.label_style - # Oracle however does not allow FOR UPDATE on the subquery, - # and the Oracle dialect ignores it, plus for PostgreSQL, MySQL - # we expect that all elements of the row are locked, so also put it - # on the outside (except in the case of PG when OF is used) + # Oracle Database however does not allow FOR UPDATE on the subquery, + # and the Oracle Database dialects ignore it, plus for PostgreSQL, + # MySQL we expect that all elements of the row are locked, so also put + # it on the outside (except in the case of PG when OF is used) if ( self._for_update_arg is not None and self._for_update_arg.of is None diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index bdc0534abe2..55e92dd0c4f 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -647,12 +647,12 @@ def bindparam( :param quote: True if this parameter name requires quoting and is not currently known as a SQLAlchemy reserved word; this currently - only applies to the Oracle backend, where bound names must + only applies to the Oracle Database backends, where bound names must sometimes be quoted. :param isoutparam: if True, the parameter should be treated like a stored procedure - "OUT" parameter. This applies to backends such as Oracle which + "OUT" parameter. This applies to backends such as Oracle Database which support OUT parameters. :param expanding: diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 634e5ce118d..46110eeae7f 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -2346,7 +2346,8 @@ def default_from(self): """Called when a SELECT statement has no froms, and no FROM clause is to be appended. - Gives Oracle a chance to tack on a ``FROM DUAL`` to the string output. + Gives Oracle Database a chance to tack on a ``FROM DUAL`` to the string + output. """ return "" diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 241aa4231e9..441974707b9 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -4296,7 +4296,7 @@ class WithinGroup(ColumnElement[_T]): ``rank()``, ``dense_rank()``, etc. It's supported only by certain database backends, such as PostgreSQL, - Oracle and MS SQL Server. + Oracle Database and MS SQL Server. The :class:`.WithinGroup` construct extracts its type from the method :meth:`.FunctionElement.within_group_type`. If this returns @@ -5172,7 +5172,7 @@ class quoted_name(util.MemoizedSlots, str): A :class:`.quoted_name` object with ``quote=True`` is also prevented from being modified in the case of a so-called "name normalize" option. Certain database backends, such as - Oracle, Firebird, and DB2 "normalize" case-insensitive names + Oracle Database, Firebird, and DB2 "normalize" case-insensitive names as uppercase. The SQLAlchemy dialects for these backends convert from SQLAlchemy's lower-case-means-insensitive convention to the upper-case-means-insensitive conventions of those backends. @@ -5193,11 +5193,11 @@ class quoted_name(util.MemoizedSlots, str): from sqlalchemy import inspect from sqlalchemy.sql import quoted_name - engine = create_engine("oracle+cx_oracle://some_dsn") + engine = create_engine("oracle+oracledb://some_dsn") print(inspect(engine).has_table(quoted_name("some_table", True))) - The above logic will run the "has table" logic against the Oracle backend, - passing the name exactly as ``"some_table"`` without converting to + The above logic will run the "has table" logic against the Oracle Database + backend, passing the name exactly as ``"some_table"`` without converting to upper case. .. versionchanged:: 1.2 The :class:`.quoted_name` construct is now diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 5939f124948..c611004b97e 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -1569,7 +1569,7 @@ def match(self, other: Any, **kwargs: Any) -> ColumnOperators: :class:`_mysql.match` - MySQL specific construct with additional features. - * Oracle - renders ``CONTAINS(x, y)`` + * Oracle Database - renders ``CONTAINS(x, y)`` * other backends may provide special implementations. * Backends without any special implementation will emit the operator as "MATCH". This is compatible with SQLite, for @@ -1597,7 +1597,7 @@ def regexp_match( Examples include: * PostgreSQL - renders ``x ~ y`` or ``x !~ y`` when negated. - * Oracle - renders ``REGEXP_LIKE(x, y)`` + * Oracle Database - renders ``REGEXP_LIKE(x, y)`` * SQLite - uses SQLite's ``REGEXP`` placeholder operator and calls into the Python ``re.match()`` builtin. * other backends may provide special implementations. @@ -1605,9 +1605,9 @@ def regexp_match( the operator as "REGEXP" or "NOT REGEXP". This is compatible with SQLite and MySQL, for example. - Regular expression support is currently implemented for Oracle, - PostgreSQL, MySQL and MariaDB. Partial support is available for - SQLite. Support among third-party dialects may vary. + Regular expression support is currently implemented for Oracle + Database, PostgreSQL, MySQL and MariaDB. Partial support is available + for SQLite. Support among third-party dialects may vary. :param pattern: The regular expression pattern string or column clause. @@ -1658,8 +1658,8 @@ def regexp_replace( **not backend agnostic**. Regular expression replacement support is currently implemented for - Oracle, PostgreSQL, MySQL 8 or greater and MariaDB. Support among - third-party dialects may vary. + Oracle Database, PostgreSQL, MySQL 8 or greater and MariaDB. Support + among third-party dialects may vary. :param pattern: The regular expression pattern string or column clause. diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 65c12b308fe..c4b7f0d3132 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -1544,7 +1544,7 @@ def __init__( unless they are a reserved word. Names with any number of upper case characters will be quoted and sent exactly. Note that this behavior applies even for databases which standardize upper - case names as case insensitive such as Oracle. + case names as case insensitive such as Oracle Database. The name field may be omitted at construction time and applied later, at any time before the Column is associated with a @@ -1616,8 +1616,8 @@ def __init__( will imply that database-specific keywords such as PostgreSQL ``SERIAL``, MySQL ``AUTO_INCREMENT``, or ``IDENTITY`` on SQL Server should also be rendered. Not every database backend has an - "implied" default generator available; for example the Oracle - backend always needs an explicit construct such as + "implied" default generator available; for example the Oracle Database + backends alway needs an explicit construct such as :class:`.Identity` to be included with a :class:`.Column` in order for the DDL rendered to include auto-generating constructs to also be produced in the database. @@ -1691,7 +1691,7 @@ def __init__( is not included as this is unnecessary and not recommended by the database vendor. See the section :ref:`sqlite_autoincrement` for more background. - * Oracle - The Oracle dialect has no default "autoincrement" + * Oracle Database - The Oracle Database dialects have no default "autoincrement" feature available at this time, instead the :class:`.Identity` construct is recommended to achieve this (the :class:`.Sequence` construct may also be used). @@ -1708,10 +1708,10 @@ def __init__( (see `https://www.python.org/dev/peps/pep-0249/#lastrowid `_) - * PostgreSQL, SQL Server, Oracle - use RETURNING or an equivalent + * PostgreSQL, SQL Server, Oracle Database - use RETURNING or an equivalent construct when rendering an INSERT statement, and then retrieving the newly generated primary key values after execution - * PostgreSQL, Oracle for :class:`_schema.Table` objects that + * PostgreSQL, Oracle Database for :class:`_schema.Table` objects that set :paramref:`_schema.Table.implicit_returning` to False - for a :class:`.Sequence` only, the :class:`.Sequence` is invoked explicitly before the INSERT statement takes place so that the @@ -3793,11 +3793,11 @@ def __init__( :param cache: optional integer value; number of future values in the sequence which are calculated in advance. Renders the CACHE keyword - understood by Oracle and PostgreSQL. + understood by Oracle Database and PostgreSQL. :param order: optional boolean value; if ``True``, renders the - ORDER keyword, understood by Oracle, indicating the sequence is - definitively ordered. May be necessary to provide deterministic + ORDER keyword, understood by Oracle Database, indicating the sequence + is definitively ordered. May be necessary to provide deterministic ordering using Oracle RAC. :param data_type: The type to be returned by the sequence, for @@ -6075,7 +6075,7 @@ def __init__( :param on_null: Set to ``True`` to specify ON NULL in conjunction with a ``always=False`` identity column. This option is only supported on - some backends, like Oracle. + some backends, like Oracle Database. :param start: the starting index of the sequence. :param increment: the increment value of the sequence. diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index c6aa8a18815..922a8e4a682 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -503,7 +503,7 @@ def with_statement_hint(self, text: str, dialect_name: str = "*") -> Self: :meth:`_expression.Select.prefix_with` - generic SELECT prefixing which also can suit some database-specific HINT syntaxes such as - MySQL or Oracle optimizer hints + MySQL or Oracle Database optimizer hints """ return self._with_hint(None, text, dialect_name) @@ -525,9 +525,9 @@ def with_hint( **specific to a single table** to a statement, in a location that is **dialect-specific**. To add generic optimizer hints to the **beginning** of a statement ahead of the SELECT keyword such as - for MySQL or Oracle, use the :meth:`_expression.Select.prefix_with` - method. To add optimizer hints to the **end** of a statement such - as for PostgreSQL, use the + for MySQL or Oracle Database, use the + :meth:`_expression.Select.prefix_with` method. To add optimizer + hints to the **end** of a statement such as for PostgreSQL, use the :meth:`_expression.Select.with_statement_hint` method. The text of the hint is rendered in the appropriate @@ -537,7 +537,7 @@ def with_hint( ``selectable`` argument. The dialect implementation typically uses Python string substitution syntax with the token ``%(name)s`` to render the name of - the table or alias. E.g. when using Oracle, the + the table or alias. E.g. when using Oracle Database, the following:: select(mytable).\ @@ -549,7 +549,7 @@ def with_hint( The ``dialect_name`` option will limit the rendering of a particular hint to a particular backend. Such as, to add hints for both Oracle - and Sybase simultaneously:: + Database and Sybase simultaneously:: select(mytable).\ with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\ @@ -561,7 +561,7 @@ def with_hint( :meth:`_expression.Select.prefix_with` - generic SELECT prefixing which also can suit some database-specific HINT syntaxes such as - MySQL or Oracle optimizer hints + MySQL or Oracle Database optimizer hints """ @@ -1036,7 +1036,7 @@ def table_valued(self) -> TableValuedColumn[Any]: A :class:`_sql.TableValuedColumn` is a :class:`_sql.ColumnElement` that represents a complete row in a table. Support for this construct is backend dependent, and is supported in various forms by backends - such as PostgreSQL, Oracle and SQL Server. + such as PostgreSQL, Oracle Database and SQL Server. E.g.: @@ -1720,7 +1720,7 @@ class Alias(roles.DMLTableRole, FromClauseAlias): Represents an alias, as typically applied to any table or sub-select within a SQL statement using the ``AS`` keyword (or - without the keyword on certain databases such as Oracle). + without the keyword on certain databases such as Oracle Database). This object is constructed from the :func:`_expression.alias` module level function as well as the :meth:`_expression.FromClause.alias` @@ -3836,8 +3836,8 @@ def with_for_update( stmt = select(table).with_for_update(nowait=True) - On a database like PostgreSQL or Oracle, the above would render a - statement like:: + On a database like PostgreSQL or Oracle Database, the above would + render a statement like:: SELECT table.a, table.b FROM table FOR UPDATE NOWAIT @@ -3852,7 +3852,7 @@ def with_for_update( variants. :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle - and PostgreSQL dialects. + Database and PostgreSQL dialects. :param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL, ``FOR SHARE`` on PostgreSQL. On PostgreSQL, when combined with @@ -3861,13 +3861,13 @@ def with_for_update( :param of: SQL expression or list of SQL expression elements, (typically :class:`_schema.Column` objects or a compatible expression, for some backends may also be a table expression) which will render - into a ``FOR UPDATE OF`` clause; supported by PostgreSQL, Oracle, some - MySQL versions and possibly others. May render as a table or as a - column depending on backend. + into a ``FOR UPDATE OF`` clause; supported by PostgreSQL, Oracle + Database, some MySQL versions and possibly others. May render as a + table or as a column depending on backend. - :param skip_locked: boolean, will render ``FOR UPDATE SKIP LOCKED`` - on Oracle and PostgreSQL dialects or ``FOR SHARE SKIP LOCKED`` if - ``read=True`` is also specified. + :param skip_locked: boolean, will render ``FOR UPDATE SKIP LOCKED`` on + Oracle Database and PostgreSQL dialects or ``FOR SHARE SKIP LOCKED`` + if ``read=True`` is also specified. :param key_share: boolean, will render ``FOR NO KEY UPDATE``, or if combined with ``read=True`` will render ``FOR KEY SHARE``, @@ -4073,10 +4073,10 @@ def fetch( """Return a new selectable with the given FETCH FIRST criterion applied. - This is a numeric value which usually renders as - ``FETCH {FIRST | NEXT} [ count ] {ROW | ROWS} {ONLY | WITH TIES}`` - expression in the resulting select. This functionality is - is currently implemented for Oracle, PostgreSQL, MSSQL. + This is a numeric value which usually renders as ``FETCH {FIRST | NEXT} + [ count ] {ROW | ROWS} {ONLY | WITH TIES}`` expression in the resulting + select. This functionality is is currently implemented for Oracle + Database, PostgreSQL, MSSQL. Use :meth:`_sql.GenerativeSelect.offset` to specify the offset. diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index dd7110e8801..c181f24d91b 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -274,8 +274,8 @@ class Unicode(String): The :class:`.Unicode` type is a :class:`.String` subclass that assumes input and output strings that may contain non-ASCII characters, and for some backends implies an underlying column type that is explicitly - supporting of non-ASCII data, such as ``NVARCHAR`` on Oracle and SQL - Server. This will impact the output of ``CREATE TABLE`` statements and + supporting of non-ASCII data, such as ``NVARCHAR`` on Oracle Database and + SQL Server. This will impact the output of ``CREATE TABLE`` statements and ``CAST`` functions at the dialect level. The character encoding used by the :class:`.Unicode` type that is used to @@ -306,7 +306,6 @@ class Unicode(String): :meth:`.DialectEvents.do_setinputsizes` - """ __visit_name__ = "unicode" @@ -634,16 +633,16 @@ def __init__( indicates a number of digits for the generic :class:`_sqltypes.Float` datatype. - .. note:: For the Oracle backend, the + .. note:: For the Oracle Database backend, the :paramref:`_sqltypes.Float.precision` parameter is not accepted - when rendering DDL, as Oracle does not support float precision + when rendering DDL, as Oracle Database does not support float precision specified as a number of decimal places. Instead, use the - Oracle-specific :class:`_oracle.FLOAT` datatype and specify the + Oracle Database-specific :class:`_oracle.FLOAT` datatype and specify the :paramref:`_oracle.FLOAT.binary_precision` parameter. This is new in version 2.0 of SQLAlchemy. To create a database agnostic :class:`_types.Float` that - separately specifies binary precision for Oracle, use + separately specifies binary precision for Oracle Database, use :meth:`_types.TypeEngine.with_variant` as follows:: from sqlalchemy import Column @@ -754,7 +753,7 @@ def __init__(self, timezone: bool = False): to make use of the :class:`_types.TIMESTAMP` datatype directly when using this flag, as some databases include separate generic date/time-holding types distinct from the timezone-capable - TIMESTAMP datatype, such as Oracle. + TIMESTAMP datatype, such as Oracle Database. """ @@ -2030,10 +2029,9 @@ def _type_affinity(self) -> Type[Interval]: class Interval(Emulated, _AbstractInterval, TypeDecorator[dt.timedelta]): """A type for ``datetime.timedelta()`` objects. - The Interval type deals with ``datetime.timedelta`` objects. In - PostgreSQL and Oracle, the native ``INTERVAL`` type is used; for others, - the value is stored as a date which is relative to the "epoch" - (Jan. 1, 1970). + The Interval type deals with ``datetime.timedelta`` objects. In PostgreSQL + and Oracle Database, the native ``INTERVAL`` type is used; for others, the + value is stored as a date which is relative to the "epoch" (Jan. 1, 1970). Note that the ``Interval`` type does not currently provide date arithmetic operations on platforms which do not support interval types natively. Such @@ -2058,16 +2056,16 @@ def __init__( :param native: when True, use the actual INTERVAL type provided by the database, if - supported (currently PostgreSQL, Oracle). + supported (currently PostgreSQL, Oracle Database). Otherwise, represent the interval data as an epoch value regardless. :param second_precision: For native interval types which support a "fractional seconds precision" parameter, - i.e. Oracle and PostgreSQL + i.e. Oracle Database and PostgreSQL :param day_precision: for native interval types which - support a "day precision" parameter, i.e. Oracle. + support a "day precision" parameter, i.e. Oracle Database. """ super().__init__() @@ -3323,8 +3321,8 @@ class BIGINT(BigInteger): class TIMESTAMP(DateTime): """The SQL TIMESTAMP type. - :class:`_types.TIMESTAMP` datatypes have support for timezone - storage on some backends, such as PostgreSQL and Oracle. Use the + :class:`_types.TIMESTAMP` datatypes have support for timezone storage on + some backends, such as PostgreSQL and Oracle Database. Use the :paramref:`~types.TIMESTAMP.timezone` argument in order to enable "TIMESTAMP WITH TIMEZONE" for these backends. @@ -3376,7 +3374,7 @@ class TEXT(Text): class CLOB(Text): """The CLOB type. - This type is found in Oracle and Informix. + This type is found in Oracle Database and Informix. """ __visit_name__ = "CLOB" diff --git a/reap_dbs.py b/reap_dbs.py index 81f9b8f26ee..11a09ab67fb 100644 --- a/reap_dbs.py +++ b/reap_dbs.py @@ -1,4 +1,4 @@ -"""Drop Oracle, SQL Server databases that are left over from a +"""Drop Oracle Database, SQL Server databases that are left over from a multiprocessing test run. Currently the cx_Oracle driver seems to sometimes not release a diff --git a/test/dialect/oracle/_oracledb_mode.py b/test/dialect/oracle/_oracledb_mode.py index a02a5389b2c..d9c426b4bb9 100644 --- a/test/dialect/oracle/_oracledb_mode.py +++ b/test/dialect/oracle/_oracledb_mode.py @@ -5,7 +5,7 @@ def _get_version(conn): # this is the suggested way of finding the mode, from - # https://python-oracledb.readthedocs.io/en/latest/user_guide/tracing.html#vsessconinfo + # https://python-oracledb.readthedocs.io/en/latest/user_guide/tracing.html#finding-the-python-oracledb-mode sql = ( "SELECT UNIQUE CLIENT_DRIVER " "FROM V$SESSION_CONNECT_INFO " diff --git a/test/dialect/oracle/test_compiler.py b/test/dialect/oracle/test_compiler.py index 560298800e7..7f02540c5df 100644 --- a/test/dialect/oracle/test_compiler.py +++ b/test/dialect/oracle/test_compiler.py @@ -810,8 +810,8 @@ class MyType(TypeDecorator): def test_use_binds_for_limits_disabled_one_legacy(self): t = table("sometable", column("col1"), column("col2")) with testing.expect_deprecated( - "The ``use_binds_for_limits`` Oracle dialect parameter is " - "deprecated." + "The ``use_binds_for_limits`` Oracle Database dialect parameter " + "is deprecated." ): dialect = oracle.OracleDialect( use_binds_for_limits=False, enable_offset_fetch=False @@ -829,8 +829,8 @@ def test_use_binds_for_limits_disabled_one_legacy(self): def test_use_binds_for_limits_disabled_two_legacy(self): t = table("sometable", column("col1"), column("col2")) with testing.expect_deprecated( - "The ``use_binds_for_limits`` Oracle dialect parameter is " - "deprecated." + "The ``use_binds_for_limits`` Oracle Database dialect parameter " + "is deprecated." ): dialect = oracle.OracleDialect( use_binds_for_limits=False, enable_offset_fetch=False @@ -849,8 +849,8 @@ def test_use_binds_for_limits_disabled_two_legacy(self): def test_use_binds_for_limits_disabled_three_legacy(self): t = table("sometable", column("col1"), column("col2")) with testing.expect_deprecated( - "The ``use_binds_for_limits`` Oracle dialect parameter is " - "deprecated." + "The ``use_binds_for_limits`` Oracle Database dialect parameter " + "is deprecated." ): dialect = oracle.OracleDialect( use_binds_for_limits=False, enable_offset_fetch=False @@ -871,8 +871,8 @@ def test_use_binds_for_limits_disabled_three_legacy(self): def test_use_binds_for_limits_enabled_one_legacy(self): t = table("sometable", column("col1"), column("col2")) with testing.expect_deprecated( - "The ``use_binds_for_limits`` Oracle dialect parameter is " - "deprecated." + "The ``use_binds_for_limits`` Oracle Database dialect parameter " + "is deprecated." ): dialect = oracle.OracleDialect( use_binds_for_limits=True, enable_offset_fetch=False @@ -890,8 +890,8 @@ def test_use_binds_for_limits_enabled_one_legacy(self): def test_use_binds_for_limits_enabled_two_legacy(self): t = table("sometable", column("col1"), column("col2")) with testing.expect_deprecated( - "The ``use_binds_for_limits`` Oracle dialect parameter is " - "deprecated." + "The ``use_binds_for_limits`` Oracle Database dialect parameter " + "is deprecated." ): dialect = oracle.OracleDialect( use_binds_for_limits=True, enable_offset_fetch=False @@ -911,8 +911,8 @@ def test_use_binds_for_limits_enabled_two_legacy(self): def test_use_binds_for_limits_enabled_three_legacy(self): t = table("sometable", column("col1"), column("col2")) with testing.expect_deprecated( - "The ``use_binds_for_limits`` Oracle dialect parameter is " - "deprecated." + "The ``use_binds_for_limits`` Oracle Database dialect parameter " + "is deprecated." ): dialect = oracle.OracleDialect( use_binds_for_limits=True, enable_offset_fetch=False @@ -1416,7 +1416,7 @@ def test_returning_update_computed_warning(self): ) with testing.expect_warnings( - "Computed columns don't work with Oracle UPDATE" + "Computed columns don't work with Oracle Database UPDATE" ): self.assert_compile( t1.update().values(id=1, foo=5).returning(t1.c.bar), @@ -1552,7 +1552,7 @@ def test_column_computed_persisted_true(self): ) assert_raises_message( exc.CompileError, - r".*Oracle computed columns do not support 'stored' ", + r".*Oracle Database computed columns do not support 'stored' ", schema.CreateTable(t).compile, dialect=oracle.dialect(), ) diff --git a/test/dialect/oracle/test_dialect.py b/test/dialect/oracle/test_dialect.py index 684f9d49458..8ea523fb7e5 100644 --- a/test/dialect/oracle/test_dialect.py +++ b/test/dialect/oracle/test_dialect.py @@ -485,7 +485,7 @@ def test_computed_update_warning(self, connection): eq_(result.returned_defaults, (52,)) else: with testing.expect_warnings( - "Computed columns don't work with Oracle UPDATE" + "Computed columns don't work with Oracle Database UPDATE" ): result = conn.execute( test.update().values(foo=10).return_defaults() @@ -556,7 +556,7 @@ def test_no_out_params_w_returning(self, connection, metadata): exc.InvalidRequestError, r"Using explicit outparam\(\) objects with " r"UpdateBase.returning\(\) in the same Core DML statement " - "is not supported in the Oracle dialect.", + "is not supported in the Oracle Database dialects.", ): connection.execute(stmt) diff --git a/test/dialect/oracle/test_types.py b/test/dialect/oracle/test_types.py index d236bf2841e..7b03de88c53 100644 --- a/test/dialect/oracle/test_types.py +++ b/test/dialect/oracle/test_types.py @@ -376,12 +376,13 @@ def test_interval_literal_processor(self, connection): def test_no_decimal_float_precision(self): with expect_raises_message( exc.ArgumentError, - "Oracle FLOAT types use 'binary precision', which does not " - "convert cleanly from decimal 'precision'. Please specify this " - "type with a separate Oracle variant, such as " + "Oracle Database FLOAT types use 'binary precision', which does " + "not convert cleanly from decimal 'precision'. Please specify " + "this type with a separate Oracle Database variant, such as " r"FLOAT\(precision=5\).with_variant\(oracle.FLOAT\(" r"binary_precision=16\), 'oracle'\), so that the Oracle " - "specific 'binary_precision' may be specified accurately.", + "Database specific 'binary_precision' may be specified " + "accurately.", ): FLOAT(5).compile(dialect=oracle.dialect()) @@ -572,7 +573,7 @@ def _dont_test_numeric_nan_decimal(self, metadata, connection): ) def test_numerics_broken_inspection(self, metadata, connection): - """Numeric scenarios where Oracle type info is 'broken', + """Numeric scenarios where Oracle Database type info is 'broken', returning us precision, scale of the form (0, 0) or (0, -127). We convert to Decimal and let int()/float() processors take over. From 9732f152939e42a70bd279c780d2f3333481fa8e Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 15 Nov 2024 13:07:00 -0500 Subject: [PATCH 381/544] Add Range.__contains__ ### Description Fixes #12093 ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [x] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #12094 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12094 Pull-request-sha: 3f900e96b95c6dbd20ee6f5aa3f49dd6124ffba9 Change-Id: I4c3945eec6a931acd0a8c1682988c5f26e96a499 (cherry picked from commit 31975cfa38689dc0a45fe26d0563eb7b5b3bda6c) --- doc/build/changelog/unreleased_20/12093.rst | 6 ++++++ lib/sqlalchemy/dialects/postgresql/ranges.py | 2 ++ test/dialect/postgresql/test_types.py | 15 +++++++++++++++ 3 files changed, 23 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/12093.rst diff --git a/doc/build/changelog/unreleased_20/12093.rst b/doc/build/changelog/unreleased_20/12093.rst new file mode 100644 index 00000000000..b9ec3b1f88b --- /dev/null +++ b/doc/build/changelog/unreleased_20/12093.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: usecase, postgresql + :ticket: 12093 + + The :class:`_postgresql.Range` type now supports ``__contains__``. + Pull request courtesy of Frazer McLean. diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index b793ca49f18..fa0c0c5df81 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -360,6 +360,8 @@ def contains(self, value: Union[_T, Range[_T]]) -> bool: else: return self._contains_value(value) + __contains__ = contains + def overlaps(self, other: Range[_T]) -> bool: "Determine whether this range overlaps with `other`." diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 25237656735..2c5bd98fde1 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -4377,12 +4377,14 @@ def test_basic_py_sanity(self): ) is_true(range_.contains(values["il"])) + is_true(values["il"] in range_) is_false( range_.contains(Range(lower=values["ll"], upper=values["ih"])) ) is_false(range_.contains(values["rh"])) + is_false(values["rh"] in range_) is_true(range_ == range_) is_false(range_ != range_) @@ -4430,6 +4432,7 @@ def test_contains_value( ) r, expected = connection.execute(q).first() eq_(r.contains(v), expected) + eq_(v in r, expected) _common_ranges_to_test = ( lambda r, e: Range(empty=True), @@ -4490,6 +4493,12 @@ def test_contains_range(self, connection, r1t, r2t): f"{r1}.contains({r2}): got {py_contains}," f" expected {pg_contains}", ) + r2_in_r1 = r2 in r1 + eq_( + r2_in_r1, + pg_contains, + f"{r2} in {r1}: got {r2_in_r1}, expected {pg_contains}", + ) py_contained = r1.contained_by(r2) eq_( py_contained, @@ -4503,6 +4512,12 @@ def test_contains_range(self, connection, r1t, r2t): f"{r2}.contains({r1}: got {r2.contains(r1)}," f" expected {pg_contained})", ) + r1_in_r2 = r1 in r2 + eq_( + r1_in_r2, + pg_contained, + f"{r1} in {r2}: got {r1_in_r2}, expected {pg_contained}", + ) @testing.combinations( *_common_ranges_to_test, From 2ee82dfe97e304bee5a20f2badbb9f22764a090c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 16 Nov 2024 19:15:10 -0500 Subject: [PATCH 382/544] correct pep-593/pep-681 doc section as of 73a273c90cda2369ec071435edd9c6dc5c1d31c4 and later 4c6429d068 we have decided that Annotated should not allow dataclass arguments in mapped_column(), which emits a depreaction warning. the docs in this section were never updated Fixes: #12108 Change-Id: I6f301c4bac621d5ca1afb1b1dadf754ec929d179 (cherry picked from commit bc4174a15572f134bbdc5fc154078bd992573f10) --- doc/build/orm/dataclasses.rst | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/doc/build/orm/dataclasses.rst b/doc/build/orm/dataclasses.rst index 910d6a21c55..7f6c2670d96 100644 --- a/doc/build/orm/dataclasses.rst +++ b/doc/build/orm/dataclasses.rst @@ -278,17 +278,24 @@ parameter for ``created_at`` were passed proceeds as: Integration with Annotated ~~~~~~~~~~~~~~~~~~~~~~~~~~ -The approach introduced at :ref:`orm_declarative_mapped_column_pep593` illustrates -how to use :pep:`593` ``Annotated`` objects to package whole -:func:`_orm.mapped_column` constructs for re-use. This feature is supported -with the dataclasses feature. One aspect of the feature however requires -a workaround when working with typing tools, which is that the -:pep:`681`-specific arguments ``init``, ``default``, ``repr``, and ``default_factory`` -**must** be on the right hand side, packaged into an explicit :func:`_orm.mapped_column` -construct, in order for the typing tool to interpret the attribute correctly. -As an example, the approach below will work perfectly fine at runtime, -however typing tools will consider the ``User()`` construction to be -invalid, as they do not see the ``init=False`` parameter present:: +The approach introduced at :ref:`orm_declarative_mapped_column_pep593` +illustrates how to use :pep:`593` ``Annotated`` objects to package whole +:func:`_orm.mapped_column` constructs for re-use. While ``Annotated`` objects +can be combined with the use of dataclasses, **dataclass-specific keyword +arguments unfortunately cannot be used within the Annotated construct**. This +includes :pep:`681`-specific arguments ``init``, ``default``, ``repr``, and +``default_factory``, which **must** be present in a :func:`_orm.mapped_column` +or similar construct inline with the class attribute. + +.. versionchanged:: 2.0.14/2.0.22 the ``Annotated`` construct when used with + an ORM construct like :func:`_orm.mapped_column` cannot accommodate dataclass + field parameters such as ``init`` and ``repr`` - this use goes against the + design of Python dataclasses and is not supported by :pep:`681`, and therefore + is also rejected by the SQLAlchemy ORM at runtime. A deprecation warning + is now emitted and the attribute will be ignored. + +As an example, the ``init=False`` parameter below will be ignored and additionally +emit a deprecation warning:: from typing import Annotated @@ -296,7 +303,7 @@ invalid, as they do not see the ``init=False`` parameter present:: from sqlalchemy.orm import mapped_column from sqlalchemy.orm import registry - # typing tools will ignore init=False here + # typing tools as well as SQLAlchemy will ignore init=False here intpk = Annotated[int, mapped_column(init=False, primary_key=True)] reg = registry() @@ -308,7 +315,7 @@ invalid, as they do not see the ``init=False`` parameter present:: id: Mapped[intpk] - # typing error: Argument missing for parameter "id" + # typing error as well as runtime error: Argument missing for parameter "id" u1 = User() Instead, :func:`_orm.mapped_column` must be present on the right side From d206a2635084d25de9011eceac204cdc8448c397 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20=C5=BDurek?= Date: Fri, 15 Nov 2024 13:12:54 -0500 Subject: [PATCH 383/544] Added INET4 and INET6 types for MariaDB Added sql types ``INET4`` and ``INET6`` in the MariaDB dialect. Fixes: #10720 Closes: #12028 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12028 Pull-request-sha: 25f939076eda0a763bc33fb0455d45ef00002110 Change-Id: I2efa53420aa5566f61a19f228cb421116b2e2720 (cherry picked from commit 0f81c14b7cd9ac821205d6c48cf2393447058394) --- doc/build/changelog/unreleased_20/10720.rst | 5 ++++ doc/build/dialects/mysql.rst | 13 ++++++++- lib/sqlalchemy/dialects/mysql/__init__.py | 5 +++- lib/sqlalchemy/dialects/mysql/mariadb.py | 29 +++++++++++++++++++++ test/dialect/mysql/test_types.py | 12 +++++++++ 5 files changed, 62 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10720.rst diff --git a/doc/build/changelog/unreleased_20/10720.rst b/doc/build/changelog/unreleased_20/10720.rst new file mode 100644 index 00000000000..d676a4425d8 --- /dev/null +++ b/doc/build/changelog/unreleased_20/10720.rst @@ -0,0 +1,5 @@ +.. change:: + :tags: usecase, mariadb + :ticket: 10720 + + Added sql types ``INET4`` and ``INET6`` in the MariaDB dialect. diff --git a/doc/build/dialects/mysql.rst b/doc/build/dialects/mysql.rst index a46bf721e21..657cd2a4189 100644 --- a/doc/build/dialects/mysql.rst +++ b/doc/build/dialects/mysql.rst @@ -56,7 +56,14 @@ valid with MySQL are importable from the top level dialect:: YEAR, ) -Types which are specific to MySQL, or have MySQL-specific +In addition to the above types, MariaDB also supports the following:: + + from sqlalchemy.dialects.mysql import ( + INET4, + INET6, + ) + +Types which are specific to MySQL or MariaDB, or have specific construction arguments, are as follows: .. note: where :noindex: is used, indicates a type that is not redefined @@ -117,6 +124,10 @@ construction arguments, are as follows: :members: __init__ +.. autoclass:: INET4 + +.. autoclass:: INET6 + .. autoclass:: INTEGER :members: __init__ diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py index 60bac87443d..05f41cf3512 100644 --- a/lib/sqlalchemy/dialects/mysql/__init__.py +++ b/lib/sqlalchemy/dialects/mysql/__init__.py @@ -53,7 +53,8 @@ from .dml import Insert from .dml import insert from .expression import match -from ...util import compat +from .mariadb import INET4 +from .mariadb import INET6 # default dialect base.dialect = dialect = mysqldb.dialect @@ -71,6 +72,8 @@ "DOUBLE", "ENUM", "FLOAT", + "INET4", + "INET6", "INTEGER", "INTEGER", "JSON", diff --git a/lib/sqlalchemy/dialects/mysql/mariadb.py b/lib/sqlalchemy/dialects/mysql/mariadb.py index 10a05f9cb36..be7aebeaeb4 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadb.py +++ b/lib/sqlalchemy/dialects/mysql/mariadb.py @@ -7,6 +7,34 @@ # mypy: ignore-errors from .base import MariaDBIdentifierPreparer from .base import MySQLDialect +from .base import MySQLTypeCompiler +from ...sql import sqltypes + + +class INET4(sqltypes.TypeEngine[str]): + """INET4 column type for MariaDB + + .. versionadded:: 2.0.37 + """ + + __visit_name__ = "INET4" + + +class INET6(sqltypes.TypeEngine[str]): + """INET6 column type for MariaDB + + .. versionadded:: 2.0.37 + """ + + __visit_name__ = "INET6" + + +class MariaDBTypeCompiler(MySQLTypeCompiler): + def visit_INET4(self, type_, **kwargs) -> str: + return "INET4" + + def visit_INET6(self, type_, **kwargs) -> str: + return "INET6" class MariaDBDialect(MySQLDialect): @@ -14,6 +42,7 @@ class MariaDBDialect(MySQLDialect): supports_statement_cache = True name = "mariadb" preparer = MariaDBIdentifierPreparer + type_compiler_cls = MariaDBTypeCompiler def loader(driver): diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py index c73e82a945b..2e5033ec571 100644 --- a/test/dialect/mysql/test_types.py +++ b/test/dialect/mysql/test_types.py @@ -21,6 +21,7 @@ from sqlalchemy import types as sqltypes from sqlalchemy import UnicodeText from sqlalchemy.dialects.mysql import base as mysql +from sqlalchemy.dialects.mysql import mariadb from sqlalchemy.testing import assert_raises from sqlalchemy.testing import assert_raises_message from sqlalchemy.testing import AssertsCompiledSQL @@ -474,6 +475,17 @@ def test_float_type_compile(self, type_, sql_text): self.assert_compile(type_, sql_text) +class INETMariadbTest(fixtures.TestBase, AssertsCompiledSQL): + __dialect__ = mariadb.MariaDBDialect() + + @testing.combinations( + (mariadb.INET4(), "INET4"), + (mariadb.INET6(), "INET6"), + ) + def test_mariadb_inet6(self, type_, res): + self.assert_compile(type_, res) + + class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults): __dialect__ = mysql.dialect() __only_on__ = "mysql", "mariadb" From 61fdc82958ef607ed675701eba82947700a0270a Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 30 Nov 2024 19:50:38 +0100 Subject: [PATCH 384/544] update the format_docs_code to also work on python files Change-Id: I0a6c9610b3fd85365ed4c2c199e3cad87ee64022 (cherry picked from commit d539bc3a0fecdc2deb5b952e410fbd4f382a1ff4) --- README.dialects.rst | 20 +- README.unittests.rst | 2 +- examples/adjacency_list/__init__.py | 6 +- examples/dogpile_caching/__init__.py | 8 +- examples/performance/__init__.py | 19 +- examples/space_invaders/__init__.py | 4 +- examples/versioned_history/__init__.py | 20 +- examples/vertical/__init__.py | 19 +- examples/vertical/dictlike-polymorphic.py | 20 +- examples/vertical/dictlike.py | 28 +- lib/sqlalchemy/dialects/mssql/aioodbc.py | 3 +- lib/sqlalchemy/dialects/mssql/base.py | 214 ++++++---- lib/sqlalchemy/dialects/mssql/json.py | 8 +- lib/sqlalchemy/dialects/mssql/pyodbc.py | 35 +- lib/sqlalchemy/dialects/mysql/aiomysql.py | 4 +- lib/sqlalchemy/dialects/mysql/asyncmy.py | 4 +- lib/sqlalchemy/dialects/mysql/base.py | 246 ++++++----- lib/sqlalchemy/dialects/mysql/dml.py | 6 +- lib/sqlalchemy/dialects/mysql/enumerated.py | 5 +- lib/sqlalchemy/dialects/mysql/expression.py | 4 +- lib/sqlalchemy/dialects/mysql/mysqldb.py | 8 +- lib/sqlalchemy/dialects/mysql/pymysql.py | 1 - lib/sqlalchemy/dialects/mysql/pyodbc.py | 15 +- lib/sqlalchemy/dialects/oracle/base.py | 106 +++-- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 82 +++- lib/sqlalchemy/dialects/oracle/oracledb.py | 177 +++++--- lib/sqlalchemy/dialects/postgresql/array.py | 38 +- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 15 +- lib/sqlalchemy/dialects/postgresql/base.py | 327 +++++++------- lib/sqlalchemy/dialects/postgresql/ext.py | 39 +- lib/sqlalchemy/dialects/postgresql/hstore.py | 31 +- lib/sqlalchemy/dialects/postgresql/json.py | 39 +- .../dialects/postgresql/named_types.py | 26 +- lib/sqlalchemy/dialects/postgresql/pg8000.py | 12 +- lib/sqlalchemy/dialects/postgresql/psycopg.py | 15 +- .../dialects/postgresql/psycopg2.py | 46 +- lib/sqlalchemy/dialects/postgresql/types.py | 8 +- lib/sqlalchemy/dialects/sqlite/aiosqlite.py | 3 + lib/sqlalchemy/dialects/sqlite/base.py | 239 ++++++----- lib/sqlalchemy/dialects/sqlite/pysqlcipher.py | 10 +- lib/sqlalchemy/dialects/sqlite/pysqlite.py | 57 ++- lib/sqlalchemy/engine/base.py | 29 +- lib/sqlalchemy/engine/create.py | 7 +- lib/sqlalchemy/engine/cursor.py | 25 +- lib/sqlalchemy/engine/events.py | 51 ++- lib/sqlalchemy/engine/interfaces.py | 62 +-- lib/sqlalchemy/engine/mock.py | 4 +- lib/sqlalchemy/engine/reflection.py | 7 +- lib/sqlalchemy/engine/result.py | 10 +- lib/sqlalchemy/engine/row.py | 5 +- lib/sqlalchemy/engine/url.py | 28 +- lib/sqlalchemy/event/api.py | 21 +- lib/sqlalchemy/exc.py | 4 +- lib/sqlalchemy/ext/associationproxy.py | 8 +- lib/sqlalchemy/ext/asyncio/base.py | 4 +- lib/sqlalchemy/ext/asyncio/engine.py | 23 +- lib/sqlalchemy/ext/asyncio/scoping.py | 22 +- lib/sqlalchemy/ext/asyncio/session.py | 49 ++- lib/sqlalchemy/ext/automap.py | 20 +- lib/sqlalchemy/ext/baked.py | 14 +- lib/sqlalchemy/ext/compiler.py | 119 ++++-- lib/sqlalchemy/ext/declarative/extensions.py | 62 ++- lib/sqlalchemy/ext/horizontal_shard.py | 13 +- lib/sqlalchemy/ext/hybrid.py | 79 ++-- lib/sqlalchemy/ext/indexable.py | 54 +-- lib/sqlalchemy/ext/mutable.py | 57 ++- lib/sqlalchemy/ext/mypy/apply.py | 4 + lib/sqlalchemy/ext/mypy/infer.py | 4 +- lib/sqlalchemy/ext/orderinglist.py | 33 +- lib/sqlalchemy/ext/serializer.py | 10 +- lib/sqlalchemy/orm/_orm_constructors.py | 87 ++-- lib/sqlalchemy/orm/attributes.py | 2 +- lib/sqlalchemy/orm/collections.py | 23 +- lib/sqlalchemy/orm/decl_api.py | 51 ++- lib/sqlalchemy/orm/events.py | 108 ++--- lib/sqlalchemy/orm/interfaces.py | 52 ++- lib/sqlalchemy/orm/mapper.py | 40 +- lib/sqlalchemy/orm/properties.py | 4 +- lib/sqlalchemy/orm/query.py | 273 ++++++------ lib/sqlalchemy/orm/relationships.py | 42 +- lib/sqlalchemy/orm/scoping.py | 14 +- lib/sqlalchemy/orm/session.py | 38 +- lib/sqlalchemy/orm/strategy_options.py | 75 ++-- lib/sqlalchemy/orm/util.py | 61 ++- lib/sqlalchemy/pool/events.py | 6 +- lib/sqlalchemy/sql/_dml_constructors.py | 18 +- lib/sqlalchemy/sql/_elements_constructors.py | 284 +++++++------ .../sql/_selectable_constructors.py | 38 +- lib/sqlalchemy/sql/base.py | 21 +- lib/sqlalchemy/sql/ddl.py | 70 +-- lib/sqlalchemy/sql/dml.py | 30 +- lib/sqlalchemy/sql/elements.py | 176 ++++---- lib/sqlalchemy/sql/events.py | 29 +- lib/sqlalchemy/sql/functions.py | 49 ++- lib/sqlalchemy/sql/lambdas.py | 14 +- lib/sqlalchemy/sql/operators.py | 186 ++++---- lib/sqlalchemy/sql/schema.py | 220 ++++++---- lib/sqlalchemy/sql/selectable.py | 401 +++++++++++------- lib/sqlalchemy/sql/sqltypes.py | 135 +++--- lib/sqlalchemy/sql/type_api.py | 57 +-- lib/sqlalchemy/sql/util.py | 39 +- lib/sqlalchemy/sql/visitors.py | 4 +- lib/sqlalchemy/testing/config.py | 16 +- lib/sqlalchemy/testing/provision.py | 10 +- lib/sqlalchemy/testing/requirements.py | 26 +- lib/sqlalchemy/testing/util.py | 15 +- lib/sqlalchemy/util/_collections.py | 4 +- lib/sqlalchemy/util/deprecations.py | 4 +- lib/sqlalchemy/util/langhelpers.py | 12 +- reap_dbs.py | 1 + test/dialect/test_sqlite.py | 3 +- .../declarative/test_tm_future_annotations.py | 2 +- test/orm/inheritance/_poly_fixtures.py | 50 +-- test/orm/test_relationships.py | 8 +- test/requirements.py | 12 +- test/sql/test_cte.py | 4 +- test/sql/test_from_linter.py | 2 +- test/sql/test_functions.py | 8 +- test/sql/test_quote.py | 4 +- tools/format_docs_code.py | 54 ++- tools/generate_proxy_methods.py | 13 +- tools/generate_sql_functions.py | 13 +- tools/generate_tuple_map_overloads.py | 13 +- tools/trace_orm_adapter.py | 10 +- 124 files changed, 3292 insertions(+), 2436 deletions(-) diff --git a/README.dialects.rst b/README.dialects.rst index 810267a20cf..798ed21fbd3 100644 --- a/README.dialects.rst +++ b/README.dialects.rst @@ -26,7 +26,9 @@ compliance suite" should be viewed as the primary target for new dialects. Dialect Layout =============== -The file structure of a dialect is typically similar to the following:: +The file structure of a dialect is typically similar to the following: + +.. sourcecode:: text sqlalchemy-/ setup.py @@ -52,9 +54,9 @@ Key aspects of this file layout include: dialect to be usable from create_engine(), e.g.:: entry_points = { - 'sqlalchemy.dialects': [ - 'access.pyodbc = sqlalchemy_access.pyodbc:AccessDialect_pyodbc', - ] + "sqlalchemy.dialects": [ + "access.pyodbc = sqlalchemy_access.pyodbc:AccessDialect_pyodbc", + ] } Above, the entrypoint ``access.pyodbc`` allow URLs to be used such as:: @@ -63,7 +65,9 @@ Key aspects of this file layout include: * setup.cfg - this file contains the traditional contents such as [tool:pytest] directives, but also contains new directives that are used - by SQLAlchemy's testing framework. E.g. for Access:: + by SQLAlchemy's testing framework. E.g. for Access: + + .. sourcecode:: text [tool:pytest] addopts= --tb native -v -r fxX --maxfail=25 -p no:warnings @@ -129,6 +133,7 @@ Key aspects of this file layout include: from sqlalchemy.testing import exclusions + class Requirements(SuiteRequirements): @property def nullable_booleans(self): @@ -148,7 +153,9 @@ Key aspects of this file layout include: The requirements system can also be used when running SQLAlchemy's primary test suite against the external dialect. In this use case, a ``--dburi`` as well as a ``--requirements`` flag are passed to SQLAlchemy's - test runner so that exclusions specific to the dialect take place:: + test runner so that exclusions specific to the dialect take place: + + .. sourcecode:: text cd /path/to/sqlalchemy pytest -v \ @@ -175,6 +182,7 @@ Key aspects of this file layout include: from sqlalchemy.testing.suite import IntegerTest as _IntegerTest + class IntegerTest(_IntegerTest): @testing.skip("access") diff --git a/README.unittests.rst b/README.unittests.rst index 2ce4f0fff12..ce280bb4d23 100644 --- a/README.unittests.rst +++ b/README.unittests.rst @@ -49,7 +49,7 @@ database options and test selection. A generic pytest run looks like:: - pytest -n4 + pytest - n4 Above, the full test suite will run against SQLite, using four processes. If the "-n" flag is not used, the pytest-xdist is skipped and the tests will diff --git a/examples/adjacency_list/__init__.py b/examples/adjacency_list/__init__.py index 65ce311e6de..b029e421b93 100644 --- a/examples/adjacency_list/__init__.py +++ b/examples/adjacency_list/__init__.py @@ -4,9 +4,9 @@ E.g.:: - node = TreeNode('rootnode') - node.append('node1') - node.append('node3') + node = TreeNode("rootnode") + node.append("node1") + node.append("node3") session.add(node) session.commit() diff --git a/examples/dogpile_caching/__init__.py b/examples/dogpile_caching/__init__.py index f8c1bb582bc..7fd6dba7217 100644 --- a/examples/dogpile_caching/__init__.py +++ b/examples/dogpile_caching/__init__.py @@ -44,13 +44,13 @@ The demo scripts themselves, in order of complexity, are run as Python modules so that relative imports work:: - python -m examples.dogpile_caching.helloworld + $ python -m examples.dogpile_caching.helloworld - python -m examples.dogpile_caching.relationship_caching + $ python -m examples.dogpile_caching.relationship_caching - python -m examples.dogpile_caching.advanced + $ python -m examples.dogpile_caching.advanced - python -m examples.dogpile_caching.local_session_caching + $ python -m examples.dogpile_caching.local_session_caching .. autosource:: :files: environment.py, caching_query.py, model.py, fixture_data.py, \ diff --git a/examples/performance/__init__.py b/examples/performance/__init__.py index 34db251e5c7..3854fdbea52 100644 --- a/examples/performance/__init__.py +++ b/examples/performance/__init__.py @@ -129,15 +129,15 @@ class Parent(Base): - __tablename__ = 'parent' + __tablename__ = "parent" id = Column(Integer, primary_key=True) children = relationship("Child") class Child(Base): - __tablename__ = 'child' + __tablename__ = "child" id = Column(Integer, primary_key=True) - parent_id = Column(Integer, ForeignKey('parent.id')) + parent_id = Column(Integer, ForeignKey("parent.id")) # Init with name of file, default number of items @@ -152,10 +152,12 @@ def setup_once(dburl, echo, num): Base.metadata.drop_all(engine) Base.metadata.create_all(engine) sess = Session(engine) - sess.add_all([ - Parent(children=[Child() for j in range(100)]) - for i in range(num) - ]) + sess.add_all( + [ + Parent(children=[Child() for j in range(100)]) + for i in range(num) + ] + ) sess.commit() @@ -191,7 +193,8 @@ def test_subqueryload(n): for parent in session.query(Parent).options(subqueryload("children")): parent.children - if __name__ == '__main__': + + if __name__ == "__main__": Profiler.main() We can run our new script directly:: diff --git a/examples/space_invaders/__init__.py b/examples/space_invaders/__init__.py index 944f8bb466c..993d1e45431 100644 --- a/examples/space_invaders/__init__.py +++ b/examples/space_invaders/__init__.py @@ -11,11 +11,11 @@ To run:: - python -m examples.space_invaders.space_invaders + $ python -m examples.space_invaders.space_invaders While it runs, watch the SQL output in the log:: - tail -f space_invaders.log + $ tail -f space_invaders.log enjoy! diff --git a/examples/versioned_history/__init__.py b/examples/versioned_history/__init__.py index 2fa281b8dd1..a872a63c034 100644 --- a/examples/versioned_history/__init__.py +++ b/examples/versioned_history/__init__.py @@ -9,18 +9,20 @@ class which represents historical versions of the target object. Usage is illustrated via a unit test module ``test_versioning.py``, which is run using SQLAlchemy's internal pytest plugin:: - pytest test/base/test_examples.py + $ pytest test/base/test_examples.py A fragment of example usage, using declarative:: from history_meta import Versioned, versioned_session + class Base(DeclarativeBase): pass + class SomeClass(Versioned, Base): - __tablename__ = 'sometable' + __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50)) @@ -28,25 +30,25 @@ class SomeClass(Versioned, Base): def __eq__(self, other): assert type(other) is SomeClass and other.id == self.id + Session = sessionmaker(bind=engine) versioned_session(Session) sess = Session() - sc = SomeClass(name='sc1') + sc = SomeClass(name="sc1") sess.add(sc) sess.commit() - sc.name = 'sc1modified' + sc.name = "sc1modified" sess.commit() assert sc.version == 2 SomeClassHistory = SomeClass.__history_mapper__.class_ - assert sess.query(SomeClassHistory).\\ - filter(SomeClassHistory.version == 1).\\ - all() \\ - == [SomeClassHistory(version=1, name='sc1')] + assert sess.query(SomeClassHistory).filter( + SomeClassHistory.version == 1 + ).all() == [SomeClassHistory(version=1, name="sc1")] The ``Versioned`` mixin is designed to work with declarative. To use the extension with classical mappers, the ``_history_mapper`` function @@ -64,7 +66,7 @@ def __eq__(self, other): set the flag ``Versioned.use_mapper_versioning`` to True:: class SomeClass(Versioned, Base): - __tablename__ = 'sometable' + __tablename__ = "sometable" use_mapper_versioning = True diff --git a/examples/vertical/__init__.py b/examples/vertical/__init__.py index b0c00b664e7..997510e1b07 100644 --- a/examples/vertical/__init__.py +++ b/examples/vertical/__init__.py @@ -15,19 +15,20 @@ Example:: - shrew = Animal(u'shrew') - shrew[u'cuteness'] = 5 - shrew[u'weasel-like'] = False - shrew[u'poisonous'] = True + shrew = Animal("shrew") + shrew["cuteness"] = 5 + shrew["weasel-like"] = False + shrew["poisonous"] = True session.add(shrew) session.flush() - q = (session.query(Animal). - filter(Animal.facts.any( - and_(AnimalFact.key == u'weasel-like', - AnimalFact.value == True)))) - print('weasel-like animals', q.all()) + q = session.query(Animal).filter( + Animal.facts.any( + and_(AnimalFact.key == "weasel-like", AnimalFact.value == True) + ) + ) + print("weasel-like animals", q.all()) .. autosource:: diff --git a/examples/vertical/dictlike-polymorphic.py b/examples/vertical/dictlike-polymorphic.py index 69f32cf4a8e..7de8fa80d9f 100644 --- a/examples/vertical/dictlike-polymorphic.py +++ b/examples/vertical/dictlike-polymorphic.py @@ -3,15 +3,17 @@ Builds upon the dictlike.py example to also add differently typed columns to the "fact" table, e.g.:: - Table('properties', metadata - Column('owner_id', Integer, ForeignKey('owner.id'), - primary_key=True), - Column('key', UnicodeText), - Column('type', Unicode(16)), - Column('int_value', Integer), - Column('char_value', UnicodeText), - Column('bool_value', Boolean), - Column('decimal_value', Numeric(10,2))) + Table( + "properties", + metadata, + Column("owner_id", Integer, ForeignKey("owner.id"), primary_key=True), + Column("key", UnicodeText), + Column("type", Unicode(16)), + Column("int_value", Integer), + Column("char_value", UnicodeText), + Column("bool_value", Boolean), + Column("decimal_value", Numeric(10, 2)), + ) For any given properties row, the value of the 'type' column will point to the '_value' column active for that row. diff --git a/examples/vertical/dictlike.py b/examples/vertical/dictlike.py index f561499e8fd..bd1701c89c6 100644 --- a/examples/vertical/dictlike.py +++ b/examples/vertical/dictlike.py @@ -6,24 +6,30 @@ example, instead of:: # A regular ("horizontal") table has columns for 'species' and 'size' - Table('animal', metadata, - Column('id', Integer, primary_key=True), - Column('species', Unicode), - Column('size', Unicode)) + Table( + "animal", + metadata, + Column("id", Integer, primary_key=True), + Column("species", Unicode), + Column("size", Unicode), + ) A vertical table models this as two tables: one table for the base or parent entity, and another related table holding key/value pairs:: - Table('animal', metadata, - Column('id', Integer, primary_key=True)) + Table("animal", metadata, Column("id", Integer, primary_key=True)) # The properties table will have one row for a 'species' value, and # another row for the 'size' value. - Table('properties', metadata - Column('animal_id', Integer, ForeignKey('animal.id'), - primary_key=True), - Column('key', UnicodeText), - Column('value', UnicodeText)) + Table( + "properties", + metadata, + Column( + "animal_id", Integer, ForeignKey("animal.id"), primary_key=True + ), + Column("key", UnicodeText), + Column("value", UnicodeText), + ) Because the key/value pairs in a vertical scheme are not fixed in advance, accessing them like a Python dict can be very convenient. The example below diff --git a/lib/sqlalchemy/dialects/mssql/aioodbc.py b/lib/sqlalchemy/dialects/mssql/aioodbc.py index 65945d97559..518d7ce0669 100644 --- a/lib/sqlalchemy/dialects/mssql/aioodbc.py +++ b/lib/sqlalchemy/dialects/mssql/aioodbc.py @@ -32,13 +32,12 @@ styles are otherwise equivalent to those documented in the pyodbc section:: from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine( "mssql+aioodbc://scott:tiger@mssql2017:1433/test?" "driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes" ) - - """ from __future__ import annotations diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 07dbe401d43..e304073535b 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -39,9 +39,12 @@ from sqlalchemy import Table, MetaData, Column, Integer m = MetaData() - t = Table('t', m, - Column('id', Integer, primary_key=True), - Column('x', Integer)) + t = Table( + "t", + m, + Column("id", Integer, primary_key=True), + Column("x", Integer), + ) m.create_all(engine) The above example will generate DDL as: @@ -59,9 +62,12 @@ on the first integer primary key column:: m = MetaData() - t = Table('t', m, - Column('id', Integer, primary_key=True, autoincrement=False), - Column('x', Integer)) + t = Table( + "t", + m, + Column("id", Integer, primary_key=True, autoincrement=False), + Column("x", Integer), + ) m.create_all(engine) To add the ``IDENTITY`` keyword to a non-primary key column, specify @@ -71,9 +77,12 @@ is set to ``False`` on any integer primary key column:: m = MetaData() - t = Table('t', m, - Column('id', Integer, primary_key=True, autoincrement=False), - Column('x', Integer, autoincrement=True)) + t = Table( + "t", + m, + Column("id", Integer, primary_key=True, autoincrement=False), + Column("x", Integer, autoincrement=True), + ) m.create_all(engine) .. versionchanged:: 1.4 Added :class:`_schema.Identity` construct @@ -136,14 +145,12 @@ from sqlalchemy import Table, Integer, Column, Identity test = Table( - 'test', metadata, + "test", + metadata, Column( - 'id', - Integer, - primary_key=True, - Identity(start=100, increment=10) + "id", Integer, primary_key=True, Identity(start=100, increment=10) ), - Column('name', String(20)) + Column("name", String(20)), ) The CREATE TABLE for the above :class:`_schema.Table` object would be: @@ -153,7 +160,7 @@ CREATE TABLE test ( id INTEGER NOT NULL IDENTITY(100,10) PRIMARY KEY, name VARCHAR(20) NULL, - ) + ) .. note:: @@ -186,6 +193,7 @@ Base = declarative_base() + class TestTable(Base): __tablename__ = "test" id = Column( @@ -211,8 +219,9 @@ class TestTable(Base): from sqlalchemy import TypeDecorator + class NumericAsInteger(TypeDecorator): - '''normalize floating point return values into ints''' + "normalize floating point return values into ints" impl = Numeric(10, 0, asdecimal=False) cache_ok = True @@ -222,6 +231,7 @@ def process_result_value(self, value, dialect): value = int(value) return value + class TestTable(Base): __tablename__ = "test" id = Column( @@ -270,11 +280,11 @@ class TestTable(Base): fetched in order to receive the value. Given a table as:: t = Table( - 't', + "t", metadata, - Column('id', Integer, primary_key=True), - Column('x', Integer), - implicit_returning=False + Column("id", Integer, primary_key=True), + Column("x", Integer), + implicit_returning=False, ) an INSERT will look like: @@ -300,12 +310,13 @@ class TestTable(Base): execution. Given this example:: m = MetaData() - t = Table('t', m, Column('id', Integer, primary_key=True), - Column('x', Integer)) + t = Table( + "t", m, Column("id", Integer, primary_key=True), Column("x", Integer) + ) m.create_all(engine) with engine.begin() as conn: - conn.execute(t.insert(), {'id': 1, 'x':1}, {'id':2, 'x':2}) + conn.execute(t.insert(), {"id": 1, "x": 1}, {"id": 2, "x": 2}) The above column will be created with IDENTITY, however the INSERT statement we emit is specifying explicit values. In the echo output we can see @@ -341,7 +352,11 @@ class TestTable(Base): >>> from sqlalchemy import Sequence >>> from sqlalchemy.schema import CreateSequence >>> from sqlalchemy.dialects import mssql - >>> print(CreateSequence(Sequence("my_seq", start=1)).compile(dialect=mssql.dialect())) + >>> print( + ... CreateSequence(Sequence("my_seq", start=1)).compile( + ... dialect=mssql.dialect() + ... ) + ... ) {printsql}CREATE SEQUENCE my_seq START WITH 1 For integer primary key generation, SQL Server's ``IDENTITY`` construct should @@ -375,12 +390,12 @@ class TestTable(Base): To build a SQL Server VARCHAR or NVARCHAR with MAX length, use None:: my_table = Table( - 'my_table', metadata, - Column('my_data', VARCHAR(None)), - Column('my_n_data', NVARCHAR(None)) + "my_table", + metadata, + Column("my_data", VARCHAR(None)), + Column("my_n_data", NVARCHAR(None)), ) - Collation Support ----------------- @@ -388,10 +403,13 @@ class TestTable(Base): specified by the string argument "collation":: from sqlalchemy import VARCHAR - Column('login', VARCHAR(32, collation='Latin1_General_CI_AS')) + + Column("login", VARCHAR(32, collation="Latin1_General_CI_AS")) When such a column is associated with a :class:`_schema.Table`, the -CREATE TABLE statement for this column will yield:: +CREATE TABLE statement for this column will yield: + +.. sourcecode:: sql login VARCHAR(32) COLLATE Latin1_General_CI_AS NULL @@ -411,7 +429,9 @@ class TestTable(Base): select(some_table).limit(5) -will render similarly to:: +will render similarly to: + +.. sourcecode:: sql SELECT TOP 5 col1, col2.. FROM table @@ -421,7 +441,9 @@ class TestTable(Base): select(some_table).order_by(some_table.c.col3).limit(5).offset(10) -will render similarly to:: +will render similarly to: + +.. sourcecode:: sql SELECT anon_1.col1, anon_1.col2 FROM (SELECT col1, col2, ROW_NUMBER() OVER (ORDER BY col3) AS @@ -474,16 +496,13 @@ class TestTable(Base): To set isolation level using :func:`_sa.create_engine`:: engine = create_engine( - "mssql+pyodbc://scott:tiger@ms_2008", - isolation_level="REPEATABLE READ" + "mssql+pyodbc://scott:tiger@ms_2008", isolation_level="REPEATABLE READ" ) To set using per-connection execution options:: connection = engine.connect() - connection = connection.execution_options( - isolation_level="READ COMMITTED" - ) + connection = connection.execution_options(isolation_level="READ COMMITTED") Valid values for ``isolation_level`` include: @@ -533,7 +552,6 @@ class TestTable(Base): mssql_engine = create_engine( "mssql+pyodbc://scott:tiger^5HHH@mssql2017:1433/test?driver=ODBC+Driver+17+for+SQL+Server", - # disable default reset-on-return scheme pool_reset_on_return=None, ) @@ -562,13 +580,17 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): ----------- MSSQL has support for three levels of column nullability. The default nullability allows nulls and is explicit in the CREATE TABLE -construct:: +construct: + +.. sourcecode:: sql name VARCHAR(20) NULL If ``nullable=None`` is specified then no specification is made. In other words the database's configured default is used. This will -render:: +render: + +.. sourcecode:: sql name VARCHAR(20) @@ -624,8 +646,9 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): * The flag can be set to either ``True`` or ``False`` when the dialect is created, typically via :func:`_sa.create_engine`:: - eng = create_engine("mssql+pymssql://user:pass@host/db", - deprecate_large_types=True) + eng = create_engine( + "mssql+pymssql://user:pass@host/db", deprecate_large_types=True + ) * Complete control over whether the "old" or "new" types are rendered is available in all SQLAlchemy versions by using the UPPERCASE type objects @@ -647,9 +670,10 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): :class:`_schema.Table`:: Table( - "some_table", metadata, + "some_table", + metadata, Column("q", String(50)), - schema="mydatabase.dbo" + schema="mydatabase.dbo", ) When performing operations such as table or component reflection, a schema @@ -661,9 +685,10 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): special characters. Given an argument as below:: Table( - "some_table", metadata, + "some_table", + metadata, Column("q", String(50)), - schema="MyDataBase.dbo" + schema="MyDataBase.dbo", ) The above schema would be rendered as ``[MyDataBase].dbo``, and also in @@ -676,21 +701,22 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): "database" will be None:: Table( - "some_table", metadata, + "some_table", + metadata, Column("q", String(50)), - schema="[MyDataBase.dbo]" + schema="[MyDataBase.dbo]", ) To individually specify both database and owner name with special characters or embedded dots, use two sets of brackets:: Table( - "some_table", metadata, + "some_table", + metadata, Column("q", String(50)), - schema="[MyDataBase.Period].[MyOwner.Dot]" + schema="[MyDataBase.Period].[MyOwner.Dot]", ) - .. versionchanged:: 1.2 the SQL Server dialect now treats brackets as identifier delimiters splitting the schema into separate database and owner tokens, to allow dots within either name itself. @@ -705,10 +731,11 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): SELECT statement; given a table:: account_table = Table( - 'account', metadata, - Column('id', Integer, primary_key=True), - Column('info', String(100)), - schema="customer_schema" + "account", + metadata, + Column("id", Integer, primary_key=True), + Column("info", String(100)), + schema="customer_schema", ) this legacy mode of rendering would assume that "customer_schema.account" @@ -751,37 +778,55 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): To generate a clustered primary key use:: - Table('my_table', metadata, - Column('x', ...), - Column('y', ...), - PrimaryKeyConstraint("x", "y", mssql_clustered=True)) + Table( + "my_table", + metadata, + Column("x", ...), + Column("y", ...), + PrimaryKeyConstraint("x", "y", mssql_clustered=True), + ) -which will render the table, for example, as:: +which will render the table, for example, as: - CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, - PRIMARY KEY CLUSTERED (x, y)) +.. sourcecode:: sql + + CREATE TABLE my_table ( + x INTEGER NOT NULL, + y INTEGER NOT NULL, + PRIMARY KEY CLUSTERED (x, y) + ) Similarly, we can generate a clustered unique constraint using:: - Table('my_table', metadata, - Column('x', ...), - Column('y', ...), - PrimaryKeyConstraint("x"), - UniqueConstraint("y", mssql_clustered=True), - ) + Table( + "my_table", + metadata, + Column("x", ...), + Column("y", ...), + PrimaryKeyConstraint("x"), + UniqueConstraint("y", mssql_clustered=True), + ) To explicitly request a non-clustered primary key (for example, when a separate clustered index is desired), use:: - Table('my_table', metadata, - Column('x', ...), - Column('y', ...), - PrimaryKeyConstraint("x", "y", mssql_clustered=False)) + Table( + "my_table", + metadata, + Column("x", ...), + Column("y", ...), + PrimaryKeyConstraint("x", "y", mssql_clustered=False), + ) -which will render the table, for example, as:: +which will render the table, for example, as: + +.. sourcecode:: sql - CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, - PRIMARY KEY NONCLUSTERED (x, y)) + CREATE TABLE my_table ( + x INTEGER NOT NULL, + y INTEGER NOT NULL, + PRIMARY KEY NONCLUSTERED (x, y) + ) Columnstore Index Support ------------------------- @@ -819,7 +864,7 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): The ``mssql_include`` option renders INCLUDE(colname) for the given string names:: - Index("my_index", table.c.x, mssql_include=['y']) + Index("my_index", table.c.x, mssql_include=["y"]) would render the index as ``CREATE INDEX my_index ON table (x) INCLUDE (y)`` @@ -874,18 +919,19 @@ def _reset_mssql(dbapi_connection, connection_record, reset_state): specify ``implicit_returning=False`` for each :class:`_schema.Table` which has triggers:: - Table('mytable', metadata, - Column('id', Integer, primary_key=True), + Table( + "mytable", + metadata, + Column("id", Integer, primary_key=True), # ..., - implicit_returning=False + implicit_returning=False, ) Declarative form:: class MyClass(Base): # ... - __table_args__ = {'implicit_returning':False} - + __table_args__ = {"implicit_returning": False} .. _mssql_rowcount_versioning: @@ -919,7 +965,9 @@ class MyClass(Base): applications to have long held locks and frequent deadlocks. Enabling snapshot isolation for the database as a whole is recommended for modern levels of concurrency support. This is accomplished via the -following ALTER DATABASE commands executed at the SQL prompt:: +following ALTER DATABASE commands executed at the SQL prompt: + +.. sourcecode:: sql ALTER DATABASE MyDatabase SET ALLOW_SNAPSHOT_ISOLATION ON diff --git a/lib/sqlalchemy/dialects/mssql/json.py b/lib/sqlalchemy/dialects/mssql/json.py index 18bea09d0f1..305aef77d10 100644 --- a/lib/sqlalchemy/dialects/mssql/json.py +++ b/lib/sqlalchemy/dialects/mssql/json.py @@ -54,9 +54,7 @@ class JSON(sqltypes.JSON): dictionary or list, the :meth:`_types.JSON.Comparator.as_json` accessor should be used:: - stmt = select( - data_table.c.data["some key"].as_json() - ).where( + stmt = select(data_table.c.data["some key"].as_json()).where( data_table.c.data["some key"].as_json() == {"sub": "structure"} ) @@ -67,9 +65,7 @@ class JSON(sqltypes.JSON): :meth:`_types.JSON.Comparator.as_integer`, :meth:`_types.JSON.Comparator.as_float`:: - stmt = select( - data_table.c.data["some key"].as_string() - ).where( + stmt = select(data_table.c.data["some key"].as_string()).where( data_table.c.data["some key"].as_string() == "some string" ) diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index 76ea046de99..421472c2552 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -30,7 +30,9 @@ engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn") -Which above, will pass the following connection string to PyODBC:: +Which above, will pass the following connection string to PyODBC: + +.. sourcecode:: text DSN=some_dsn;UID=scott;PWD=tiger @@ -49,7 +51,9 @@ query parameters of the URL. As these names usually have spaces in them, the name must be URL encoded which means using plus signs for spaces:: - engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=ODBC+Driver+17+for+SQL+Server") + engine = create_engine( + "mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=ODBC+Driver+17+for+SQL+Server" + ) The ``driver`` keyword is significant to the pyodbc dialect and must be specified in lowercase. @@ -69,6 +73,7 @@ The equivalent URL can be constructed using :class:`_sa.engine.URL`:: from sqlalchemy.engine import URL + connection_url = URL.create( "mssql+pyodbc", username="scott", @@ -83,7 +88,6 @@ }, ) - Pass through exact Pyodbc string ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -94,8 +98,11 @@ can help make this easier:: from sqlalchemy.engine import URL + connection_string = "DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password" - connection_url = URL.create("mssql+pyodbc", query={"odbc_connect": connection_string}) + connection_url = URL.create( + "mssql+pyodbc", query={"odbc_connect": connection_string} + ) engine = create_engine(connection_url) @@ -127,7 +134,8 @@ from sqlalchemy.engine.url import URL from azure import identity - SQL_COPT_SS_ACCESS_TOKEN = 1256 # Connection option for access tokens, as defined in msodbcsql.h + # Connection option for access tokens, as defined in msodbcsql.h + SQL_COPT_SS_ACCESS_TOKEN = 1256 TOKEN_URL = "https://database.windows.net/" # The token URL for any Azure SQL database connection_string = "mssql+pyodbc://@my-server.database.windows.net/myDb?driver=ODBC+Driver+17+for+SQL+Server" @@ -136,14 +144,19 @@ azure_credentials = identity.DefaultAzureCredential() + @event.listens_for(engine, "do_connect") def provide_token(dialect, conn_rec, cargs, cparams): # remove the "Trusted_Connection" parameter that SQLAlchemy adds cargs[0] = cargs[0].replace(";Trusted_Connection=Yes", "") # create token credential - raw_token = azure_credentials.get_token(TOKEN_URL).token.encode("utf-16-le") - token_struct = struct.pack(f">> t = Table('mytable', metadata, - ... Column('mytable_id', Integer, primary_key=True) + >>> t = Table( + ... "mytable", metadata, Column("mytable_id", Integer, primary_key=True) ... ) >>> t.create() CREATE TABLE mytable ( @@ -270,10 +272,12 @@ can also be used to enable auto-increment on a secondary column in a multi-column key for some storage engines:: - Table('mytable', metadata, - Column('gid', Integer, primary_key=True, autoincrement=False), - Column('id', Integer, primary_key=True) - ) + Table( + "mytable", + metadata, + Column("gid", Integer, primary_key=True, autoincrement=False), + Column("id", Integer, primary_key=True), + ) .. _mysql_ss_cursors: @@ -291,7 +295,9 @@ option:: with engine.connect() as conn: - result = conn.execution_options(stream_results=True).execute(text("select * from table")) + result = conn.execution_options(stream_results=True).execute( + text("select * from table") + ) Note that some kinds of SQL statements may not be supported with server side cursors; generally, only SQL statements that return rows should be @@ -319,7 +325,8 @@ in the URL, such as:: e = create_engine( - "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4") + "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4" + ) This charset is the **client character set** for the connection. Some MySQL DBAPIs will default this to a value such as ``latin1``, and some @@ -339,7 +346,8 @@ DBAPI, as in:: e = create_engine( - "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4") + "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4" + ) All modern DBAPIs should support the ``utf8mb4`` charset. @@ -361,7 +369,9 @@ MySQL versions 5.6, 5.7 and later (not MariaDB at the time of this writing) now emit a warning when attempting to pass binary data to the database, while a character set encoding is also in place, when the binary data itself is not -valid for that encoding:: +valid for that encoding: + +.. sourcecode:: text default.py:509: Warning: (1300, "Invalid utf8mb4 character string: 'F9876A'") @@ -371,7 +381,9 @@ interpret the binary string as a unicode object even if a datatype such as :class:`.LargeBinary` is in use. To resolve this, the SQL statement requires a binary "character set introducer" be present before any non-NULL value -that renders like this:: +that renders like this: + +.. sourcecode:: sql INSERT INTO table (data) VALUES (_binary %s) @@ -381,12 +393,13 @@ # mysqlclient engine = create_engine( - "mysql+mysqldb://scott:tiger@localhost/test?charset=utf8mb4&binary_prefix=true") + "mysql+mysqldb://scott:tiger@localhost/test?charset=utf8mb4&binary_prefix=true" + ) # PyMySQL engine = create_engine( - "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4&binary_prefix=true") - + "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4&binary_prefix=true" + ) The ``binary_prefix`` flag may or may not be supported by other MySQL drivers. @@ -429,7 +442,10 @@ from sqlalchemy import create_engine, event - eng = create_engine("mysql+mysqldb://scott:tiger@localhost/test", echo='debug') + eng = create_engine( + "mysql+mysqldb://scott:tiger@localhost/test", echo="debug" + ) + # `insert=True` will ensure this is the very first listener to run @event.listens_for(eng, "connect", insert=True) @@ -437,6 +453,7 @@ def connect(dbapi_connection, connection_record): cursor = dbapi_connection.cursor() cursor.execute("SET sql_mode = 'STRICT_ALL_TABLES'") + conn = eng.connect() In the example illustrated above, the "connect" event will invoke the "SET" @@ -453,8 +470,8 @@ def connect(dbapi_connection, connection_record): Many of the MySQL / MariaDB SQL extensions are handled through SQLAlchemy's generic function and operator support:: - table.select(table.c.password==func.md5('plaintext')) - table.select(table.c.username.op('regexp')('^[a-d]')) + table.select(table.c.password == func.md5("plaintext")) + table.select(table.c.username.op("regexp")("^[a-d]")) And of course any valid SQL statement can be executed as a string as well. @@ -467,7 +484,7 @@ def connect(dbapi_connection, connection_record): * SELECT pragma, use :meth:`_expression.Select.prefix_with` and :meth:`_query.Query.prefix_with`:: - select(...).prefix_with(['HIGH_PRIORITY', 'SQL_SMALL_RESULT']) + select(...).prefix_with(["HIGH_PRIORITY", "SQL_SMALL_RESULT"]) * UPDATE with LIMIT:: @@ -483,14 +500,16 @@ def connect(dbapi_connection, connection_record): select(...).with_hint(some_table, "USE INDEX xyz") -* MATCH operator support:: +* MATCH + operator support:: + + from sqlalchemy.dialects.mysql import match - from sqlalchemy.dialects.mysql import match - select(...).where(match(col1, col2, against="some expr").in_boolean_mode()) + select(...).where(match(col1, col2, against="some expr").in_boolean_mode()) - .. seealso:: + .. seealso:: - :class:`_mysql.match` + :class:`_mysql.match` INSERT/DELETE...RETURNING ------------------------- @@ -507,17 +526,15 @@ def connect(dbapi_connection, connection_record): # INSERT..RETURNING result = connection.execute( - table.insert(). - values(name='foo'). - returning(table.c.col1, table.c.col2) + table.insert().values(name="foo").returning(table.c.col1, table.c.col2) ) print(result.all()) # DELETE..RETURNING result = connection.execute( - table.delete(). - where(table.c.name=='foo'). - returning(table.c.col1, table.c.col2) + table.delete() + .where(table.c.name == "foo") + .returning(table.c.col1, table.c.col2) ) print(result.all()) @@ -544,12 +561,11 @@ def connect(dbapi_connection, connection_record): >>> from sqlalchemy.dialects.mysql import insert >>> insert_stmt = insert(my_table).values( - ... id='some_existing_id', - ... data='inserted value') + ... id="some_existing_id", data="inserted value" + ... ) >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( - ... data=insert_stmt.inserted.data, - ... status='U' + ... data=insert_stmt.inserted.data, status="U" ... ) >>> print(on_duplicate_key_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%s, %s) @@ -574,8 +590,8 @@ def connect(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> insert_stmt = insert(my_table).values( - ... id='some_existing_id', - ... data='inserted value') + ... id="some_existing_id", data="inserted value" + ... ) >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( ... data="some data", @@ -638,13 +654,11 @@ def connect(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> stmt = insert(my_table).values( - ... id='some_id', - ... data='inserted value', - ... author='jlh') + ... id="some_id", data="inserted value", author="jlh" + ... ) >>> do_update_stmt = stmt.on_duplicate_key_update( - ... data="updated value", - ... author=stmt.inserted.author + ... data="updated value", author=stmt.inserted.author ... ) >>> print(do_update_stmt) @@ -689,13 +703,13 @@ def connect(dbapi_connection, connection_record): become part of the index. SQLAlchemy provides this feature via the ``mysql_length`` and/or ``mariadb_length`` parameters:: - Index('my_index', my_table.c.data, mysql_length=10, mariadb_length=10) + Index("my_index", my_table.c.data, mysql_length=10, mariadb_length=10) - Index('a_b_idx', my_table.c.a, my_table.c.b, mysql_length={'a': 4, - 'b': 9}) + Index("a_b_idx", my_table.c.a, my_table.c.b, mysql_length={"a": 4, "b": 9}) - Index('a_b_idx', my_table.c.a, my_table.c.b, mariadb_length={'a': 4, - 'b': 9}) + Index( + "a_b_idx", my_table.c.a, my_table.c.b, mariadb_length={"a": 4, "b": 9} + ) Prefix lengths are given in characters for nonbinary string types and in bytes for binary string types. The value passed to the keyword argument *must* be @@ -712,7 +726,7 @@ def connect(dbapi_connection, connection_record): an index. SQLAlchemy provides this feature via the ``mysql_prefix`` parameter on :class:`.Index`:: - Index('my_index', my_table.c.data, mysql_prefix='FULLTEXT') + Index("my_index", my_table.c.data, mysql_prefix="FULLTEXT") The value passed to the keyword argument will be simply passed through to the underlying CREATE INDEX, so it *must* be a valid index prefix for your MySQL @@ -729,11 +743,13 @@ def connect(dbapi_connection, connection_record): an index or primary key constraint. SQLAlchemy provides this feature via the ``mysql_using`` parameter on :class:`.Index`:: - Index('my_index', my_table.c.data, mysql_using='hash', mariadb_using='hash') + Index( + "my_index", my_table.c.data, mysql_using="hash", mariadb_using="hash" + ) As well as the ``mysql_using`` parameter on :class:`.PrimaryKeyConstraint`:: - PrimaryKeyConstraint("data", mysql_using='hash', mariadb_using='hash') + PrimaryKeyConstraint("data", mysql_using="hash", mariadb_using="hash") The value passed to the keyword argument will be simply passed through to the underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index @@ -752,9 +768,12 @@ def connect(dbapi_connection, connection_record): is available using the keyword argument ``mysql_with_parser``:: Index( - 'my_index', my_table.c.data, - mysql_prefix='FULLTEXT', mysql_with_parser="ngram", - mariadb_prefix='FULLTEXT', mariadb_with_parser="ngram", + "my_index", + my_table.c.data, + mysql_prefix="FULLTEXT", + mysql_with_parser="ngram", + mariadb_prefix="FULLTEXT", + mariadb_with_parser="ngram", ) .. versionadded:: 1.3 @@ -781,6 +800,7 @@ def connect(dbapi_connection, connection_record): from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import ForeignKeyConstraint + @compiles(ForeignKeyConstraint, "mysql", "mariadb") def process(element, compiler, **kw): element.deferrable = element.initially = None @@ -802,10 +822,12 @@ def process(element, compiler, **kw): reflection will not include foreign keys. For these tables, you may supply a :class:`~sqlalchemy.ForeignKeyConstraint` at reflection time:: - Table('mytable', metadata, - ForeignKeyConstraint(['other_id'], ['othertable.other_id']), - autoload_with=engine - ) + Table( + "mytable", + metadata, + ForeignKeyConstraint(["other_id"], ["othertable.other_id"]), + autoload_with=engine, + ) .. seealso:: @@ -877,13 +899,15 @@ def process(element, compiler, **kw): mytable = Table( "mytable", metadata, - Column('id', Integer, primary_key=True), - Column('data', String(50)), + Column("id", Integer, primary_key=True), + Column("data", String(50)), Column( - 'last_updated', + "last_updated", TIMESTAMP, - server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - ) + server_default=text( + "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP" + ), + ), ) The same instructions apply to use of the :class:`_types.DateTime` and @@ -894,34 +918,37 @@ def process(element, compiler, **kw): mytable = Table( "mytable", metadata, - Column('id', Integer, primary_key=True), - Column('data', String(50)), + Column("id", Integer, primary_key=True), + Column("data", String(50)), Column( - 'last_updated', + "last_updated", DateTime, - server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") - ) + server_default=text( + "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP" + ), + ), ) - Even though the :paramref:`_schema.Column.server_onupdate` feature does not generate this DDL, it still may be desirable to signal to the ORM that this updated value should be fetched. This syntax looks like the following:: from sqlalchemy.schema import FetchedValue + class MyClass(Base): - __tablename__ = 'mytable' + __tablename__ = "mytable" id = Column(Integer, primary_key=True) data = Column(String(50)) last_updated = Column( TIMESTAMP, - server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"), - server_onupdate=FetchedValue() + server_default=text( + "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP" + ), + server_onupdate=FetchedValue(), ) - .. _mysql_timestamp_null: TIMESTAMP Columns and NULL @@ -931,7 +958,9 @@ class MyClass(Base): TIMESTAMP datatype implicitly includes a default value of CURRENT_TIMESTAMP, even though this is not stated, and additionally sets the column as NOT NULL, the opposite behavior vs. that of all -other datatypes:: +other datatypes: + +.. sourcecode:: text mysql> CREATE TABLE ts_test ( -> a INTEGER, @@ -976,19 +1005,24 @@ class MyClass(Base): from sqlalchemy.dialects.mysql import TIMESTAMP m = MetaData() - t = Table('ts_test', m, - Column('a', Integer), - Column('b', Integer, nullable=False), - Column('c', TIMESTAMP), - Column('d', TIMESTAMP, nullable=False) - ) + t = Table( + "ts_test", + m, + Column("a", Integer), + Column("b", Integer, nullable=False), + Column("c", TIMESTAMP), + Column("d", TIMESTAMP, nullable=False), + ) from sqlalchemy import create_engine + e = create_engine("mysql+mysqldb://scott:tiger@localhost/test", echo=True) m.create_all(e) -output:: +output: + +.. sourcecode:: sql CREATE TABLE ts_test ( a INTEGER, diff --git a/lib/sqlalchemy/dialects/mysql/dml.py b/lib/sqlalchemy/dialects/mysql/dml.py index e4005c267e4..d9164317b09 100644 --- a/lib/sqlalchemy/dialects/mysql/dml.py +++ b/lib/sqlalchemy/dialects/mysql/dml.py @@ -141,7 +141,11 @@ def on_duplicate_key_update(self, *args: _UpdateArg, **kw: Any) -> Self: in :ref:`tutorial_parameter_ordered_updates`:: insert().on_duplicate_key_update( - [("name", "some name"), ("value", "some value")]) + [ + ("name", "some name"), + ("value", "some value"), + ] + ) .. versionchanged:: 1.3 parameters can be specified as a dictionary or list of 2-tuples; the latter form provides for parameter diff --git a/lib/sqlalchemy/dialects/mysql/enumerated.py b/lib/sqlalchemy/dialects/mysql/enumerated.py index 96499d7bee2..d3c10c0021b 100644 --- a/lib/sqlalchemy/dialects/mysql/enumerated.py +++ b/lib/sqlalchemy/dialects/mysql/enumerated.py @@ -28,7 +28,7 @@ def __init__(self, *enums, **kw): E.g.:: - Column('myenum', ENUM("foo", "bar", "baz")) + Column("myenum", ENUM("foo", "bar", "baz")) :param enums: The range of valid values for this ENUM. Values in enums are not quoted, they will be escaped and surrounded by single @@ -102,8 +102,7 @@ def __init__(self, *values, **kw): E.g.:: - Column('myset', SET("foo", "bar", "baz")) - + Column("myset", SET("foo", "bar", "baz")) The list of potential values is required in the case that this set will be used to generate DDL for a table, or if the diff --git a/lib/sqlalchemy/dialects/mysql/expression.py b/lib/sqlalchemy/dialects/mysql/expression.py index b81b58afc28..8c21c748c96 100644 --- a/lib/sqlalchemy/dialects/mysql/expression.py +++ b/lib/sqlalchemy/dialects/mysql/expression.py @@ -38,7 +38,9 @@ class match(Generative, elements.BinaryExpression): .order_by(desc(match_expr)) ) - Would produce SQL resembling:: + Would produce SQL resembling: + + .. sourcecode:: sql SELECT id, firstname, lastname FROM user diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index 0baf10f7056..6e7ccaa1525 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -48,9 +48,9 @@ "ssl": { "ca": "/home/gord/client-ssl/ca.pem", "cert": "/home/gord/client-ssl/client-cert.pem", - "key": "/home/gord/client-ssl/client-key.pem" + "key": "/home/gord/client-ssl/client-key.pem", } - } + }, ) For convenience, the following keys may also be specified inline within the URL @@ -74,7 +74,9 @@ ----------------------------------- Google Cloud SQL now recommends use of the MySQLdb dialect. Connect -using a URL like the following:: +using a URL like the following: + +.. sourcecode:: text mysql+mysqldb://root@/?unix_socket=/cloudsql/: diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index 830e4416c79..ff62e4f0282 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -41,7 +41,6 @@ "&ssl_check_hostname=false" ) - MySQL-Python Compatibility -------------------------- diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py index 428c8dfd385..9ad360bd995 100644 --- a/lib/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py @@ -30,14 +30,15 @@ Pass through exact pyodbc connection string:: import urllib + connection_string = ( - 'DRIVER=MySQL ODBC 8.0 ANSI Driver;' - 'SERVER=localhost;' - 'PORT=3307;' - 'DATABASE=mydb;' - 'UID=root;' - 'PWD=(whatever);' - 'charset=utf8mb4;' + "DRIVER=MySQL ODBC 8.0 ANSI Driver;" + "SERVER=localhost;" + "PORT=3307;" + "DATABASE=mydb;" + "UID=root;" + "PWD=(whatever);" + "charset=utf8mb4;" ) params = urllib.parse.quote_plus(connection_string) connection_uri = "mysql+pyodbc:///?odbc_connect=%s" % params diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index f69247185c9..b0b9032c00d 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -29,9 +29,12 @@ Starting from version 12, Oracle Database can make use of identity columns using the :class:`_sql.Identity` to specify the autoincrementing behavior:: - t = Table('mytable', metadata, - Column('id', Integer, Identity(start=3), primary_key=True), - Column(...), ... + t = Table( + "mytable", + metadata, + Column("id", Integer, Identity(start=3), primary_key=True), + Column(...), + ..., ) The CREATE TABLE for the above :class:`_schema.Table` object would be: @@ -63,16 +66,21 @@ sequences, use the sqlalchemy.schema.Sequence object which is passed to a Column construct:: - t = Table('mytable', metadata, - Column('id', Integer, Sequence('id_seq', start=1), primary_key=True), - Column(...), ... + t = Table( + "mytable", + metadata, + Column("id", Integer, Sequence("id_seq", start=1), primary_key=True), + Column(...), + ..., ) This step is also required when using table reflection, i.e. autoload_with=engine:: - t = Table('mytable', metadata, - Column('id', Integer, Sequence('id_seq', start=1), primary_key=True), - autoload_with=engine + t = Table( + "mytable", + metadata, + Column("id", Integer, Sequence("id_seq", start=1), primary_key=True), + autoload_with=engine, ) .. versionchanged:: 1.4 Added :class:`_schema.Identity` construct @@ -91,9 +99,7 @@ To set using per-connection execution options:: connection = engine.connect() - connection = connection.execution_options( - isolation_level="AUTOCOMMIT" - ) + connection = connection.execution_options(isolation_level="AUTOCOMMIT") For ``READ COMMITTED`` and ``SERIALIZABLE``, the Oracle Database dialects sets the level at the session level using ``ALTER SESSION``, which is reverted back @@ -177,7 +183,8 @@ engine = create_engine( "oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1", - max_identifier_length=30) + max_identifier_length=30, + ) If :paramref:`_sa.create_engine.max_identifier_length` is not set, the oracledb dialect internally uses the ``max_identifier_length`` attribute available on @@ -227,13 +234,17 @@ oracle_dialect = oracle.dialect(max_identifier_length=30) print(CreateIndex(ix).compile(dialect=oracle_dialect)) -With an identifier length of 30, the above CREATE INDEX looks like:: +With an identifier length of 30, the above CREATE INDEX looks like: + +.. sourcecode:: sql CREATE INDEX ix_some_column_name_1s_70cd ON t (some_column_name_1, some_column_name_2, some_column_name_3) However with length of 128, it becomes:: +.. sourcecode:: sql + CREATE INDEX ix_some_column_name_1some_column_name_2some_column_name_3 ON t (some_column_name_1, some_column_name_2, some_column_name_3) @@ -364,8 +375,9 @@ accessed over DBLINK, by passing the flag ``oracle_resolve_synonyms=True`` as a keyword argument to the :class:`_schema.Table` construct:: - some_table = Table('some_table', autoload_with=some_engine, - oracle_resolve_synonyms=True) + some_table = Table( + "some_table", autoload_with=some_engine, oracle_resolve_synonyms=True + ) When this flag is set, the given name (such as ``some_table`` above) will be searched not just in the ``ALL_TABLES`` view, but also within the @@ -410,10 +422,13 @@ from sqlalchemy import create_engine, inspect - engine = create_engine("oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1") + engine = create_engine( + "oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1" + ) inspector = inspect(engine) all_check_constraints = inspector.get_check_constraints( - "some_table", include_all=True) + "some_table", include_all=True + ) * in most cases, when reflecting a :class:`_schema.Table`, a UNIQUE constraint will **not** be available as a :class:`.UniqueConstraint` object, as Oracle @@ -443,8 +458,9 @@ # exclude SYSAUX and SOME_TABLESPACE, but not SYSTEM e = create_engine( - "oracle+oracledb://scott:tiger@localhost:1521/?service_name=freepdb1", - exclude_tablespaces=["SYSAUX", "SOME_TABLESPACE"]) + "oracle+oracledb://scott:tiger@localhost:1521/?service_name=freepdb1", + exclude_tablespaces=["SYSAUX", "SOME_TABLESPACE"], + ) DateTime Compatibility ---------------------- @@ -469,30 +485,35 @@ * ``ON COMMIT``:: Table( - "some_table", metadata, ..., - prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS') + "some_table", + metadata, + ..., + prefixes=["GLOBAL TEMPORARY"], + oracle_on_commit="PRESERVE ROWS", + ) -* ``COMPRESS``:: +* + ``COMPRESS``:: - Table('mytable', metadata, Column('data', String(32)), - oracle_compress=True) + Table( + "mytable", metadata, Column("data", String(32)), oracle_compress=True + ) - Table('mytable', metadata, Column('data', String(32)), - oracle_compress=6) + Table("mytable", metadata, Column("data", String(32)), oracle_compress=6) - The ``oracle_compress`` parameter accepts either an integer compression - level, or ``True`` to use the default compression level. + The ``oracle_compress`` parameter accepts either an integer compression + level, or ``True`` to use the default compression level. -* ``TABLESPACE``:: +* + ``TABLESPACE``:: - Table('mytable', metadata, ..., - oracle_tablespace="EXAMPLE_TABLESPACE") + Table("mytable", metadata, ..., oracle_tablespace="EXAMPLE_TABLESPACE") - The ``oracle_tablespace`` parameter specifies the tablespace in which the - table is to be created. This is useful when you want to create a table in a - tablespace other than the default tablespace of the user. + The ``oracle_tablespace`` parameter specifies the tablespace in which the + table is to be created. This is useful when you want to create a table in a + tablespace other than the default tablespace of the user. - .. versionadded:: 2.0.37 + .. versionadded:: 2.0.37 .. _oracle_index_options: @@ -505,7 +526,7 @@ You can specify the ``oracle_bitmap`` parameter to create a bitmap index instead of a B-tree index:: - Index('my_index', my_table.c.data, oracle_bitmap=True) + Index("my_index", my_table.c.data, oracle_bitmap=True) Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not check for such limitations, only the database will. @@ -517,10 +538,15 @@ of repeated values. Use the ``oracle_compress`` parameter to turn on key compression:: - Index('my_index', my_table.c.data, oracle_compress=True) + Index("my_index", my_table.c.data, oracle_compress=True) - Index('my_index', my_table.c.data1, my_table.c.data2, unique=True, - oracle_compress=1) + Index( + "my_index", + my_table.c.data1, + my_table.c.data2, + unique=True, + oracle_compress=1, + ) The ``oracle_compress`` parameter accepts either an integer specifying the number of prefix columns to compress, or ``True`` to use the default (all diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index babb916a602..9b66d7ea783 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -32,7 +32,9 @@ from Oracle Database's Easy Connect syntax then connect in SQLAlchemy using the ``service_name`` query string parameter:: - engine = create_engine("oracle+cx_oracle://scott:tiger@hostname:port?service_name=myservice&encoding=UTF-8&nencoding=UTF-8") + engine = create_engine( + "oracle+cx_oracle://scott:tiger@hostname:port?service_name=myservice&encoding=UTF-8&nencoding=UTF-8" + ) Note that the default driver value for encoding and nencoding was changed to “UTF-8” in cx_Oracle 8.0 so these parameters can be omitted when using that @@ -42,13 +44,14 @@ :paramref:`_sa.create_engine.connect_args` dictionary:: import cx_Oracle + e = create_engine( "oracle+cx_oracle://@", connect_args={ "user": "scott", "password": "tiger", - "dsn": "hostname:port/myservice?transport_connect_timeout=30&expire_time=60" - } + "dsn": "hostname:port/myservice?transport_connect_timeout=30&expire_time=60", + }, ) Connections with tnsnames.ora or to Oracle Autonomous Database @@ -57,7 +60,9 @@ Alternatively, if no port, database name, or service name is provided, the dialect will use an Oracle Database DSN "connection string". This takes the "hostname" portion of the URL as the data source name. For example, if the -``tnsnames.ora`` file contains a TNS Alias of ``myalias`` as below:: +``tnsnames.ora`` file contains a TNS Alias of ``myalias`` as below: + +.. sourcecode:: text myalias = (DESCRIPTION = @@ -85,7 +90,9 @@ To use Oracle Database's obsolete System Identifier connection syntax, the SID can be passed in a "database name" portion of the URL:: - engine = create_engine("oracle+cx_oracle://scott:tiger@hostname:port/dbname") + engine = create_engine( + "oracle+cx_oracle://scott:tiger@hostname:port/dbname" + ) Above, the DSN passed to cx_Oracle is created by ``cx_Oracle.makedsn()`` as follows:: @@ -107,7 +114,8 @@ symbol:: e = create_engine( - "oracle+cx_oracle://user:pass@dsn?encoding=UTF-8&nencoding=UTF-8&mode=SYSDBA&events=true") + "oracle+cx_oracle://user:pass@dsn?encoding=UTF-8&nencoding=UTF-8&mode=SYSDBA&events=true" + ) .. versionchanged:: 1.3 the cx_Oracle dialect now accepts all argument names within the URL string itself, to be passed to the cx_Oracle DBAPI. As @@ -120,14 +128,15 @@ Any cx_Oracle parameter value and/or constant may be passed, such as:: import cx_Oracle + e = create_engine( "oracle+cx_oracle://user:pass@dsn", connect_args={ "encoding": "UTF-8", "nencoding": "UTF-8", "mode": cx_Oracle.SYSDBA, - "events": True - } + "events": True, + }, ) Note that the default driver value for ``encoding`` and ``nencoding`` was @@ -142,7 +151,8 @@ , such as:: e = create_engine( - "oracle+cx_oracle://user:pass@dsn", coerce_to_decimal=False) + "oracle+cx_oracle://user:pass@dsn", coerce_to_decimal=False + ) The parameters accepted by the cx_oracle dialect are as follows: @@ -184,12 +194,20 @@ from sqlalchemy.pool import NullPool pool = cx_Oracle.SessionPool( - user="scott", password="tiger", dsn="orclpdb", - min=1, max=4, increment=1, threaded=True, - encoding="UTF-8", nencoding="UTF-8" + user="scott", + password="tiger", + dsn="orclpdb", + min=1, + max=4, + increment=1, + threaded=True, + encoding="UTF-8", + nencoding="UTF-8", ) - engine = create_engine("oracle+cx_oracle://", creator=pool.acquire, poolclass=NullPool) + engine = create_engine( + "oracle+cx_oracle://", creator=pool.acquire, poolclass=NullPool + ) The above engine may then be used normally where cx_Oracle's pool handles connection pooling:: @@ -220,15 +238,27 @@ class and "purity" when acquiring a connection from the SessionPool. Refer to from sqlalchemy.pool import NullPool pool = cx_Oracle.SessionPool( - user="scott", password="tiger", dsn="orclpdb", - min=2, max=5, increment=1, threaded=True, - encoding="UTF-8", nencoding="UTF-8" + user="scott", + password="tiger", + dsn="orclpdb", + min=2, + max=5, + increment=1, + threaded=True, + encoding="UTF-8", + nencoding="UTF-8", ) + def creator(): - return pool.acquire(cclass="MYCLASS", purity=cx_Oracle.ATTR_PURITY_SELF) + return pool.acquire( + cclass="MYCLASS", purity=cx_Oracle.ATTR_PURITY_SELF + ) + - engine = create_engine("oracle+cx_oracle://", creator=creator, poolclass=NullPool) + engine = create_engine( + "oracle+cx_oracle://", creator=creator, poolclass=NullPool + ) The above engine may then be used normally where cx_Oracle handles session pooling and Oracle Database additionally uses DRCP:: @@ -261,7 +291,9 @@ def creator(): the ``encoding`` and ``nencoding`` parameters directly to its ``.connect()`` function. These can be present in the URL as follows:: - engine = create_engine("oracle+cx_oracle://scott:tiger@tnsalias?encoding=UTF-8&nencoding=UTF-8") + engine = create_engine( + "oracle+cx_oracle://scott:tiger@tnsalias?encoding=UTF-8&nencoding=UTF-8" + ) For the meaning of the ``encoding`` and ``nencoding`` parameters, please consult @@ -355,13 +387,16 @@ def creator(): engine = create_engine("oracle+cx_oracle://scott:tiger@host/xe") + @event.listens_for(engine, "do_setinputsizes") def _log_setinputsizes(inputsizes, cursor, statement, parameters, context): for bindparam, dbapitype in inputsizes.items(): - log.info( - "Bound parameter name: %s SQLAlchemy type: %r " - "DBAPI object: %s", - bindparam.key, bindparam.type, dbapitype) + log.info( + "Bound parameter name: %s SQLAlchemy type: %r DBAPI object: %s", + bindparam.key, + bindparam.type, + dbapitype, + ) Example 2 - remove all bindings to CLOB ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -375,6 +410,7 @@ def _log_setinputsizes(inputsizes, cursor, statement, parameters, context): engine = create_engine("oracle+cx_oracle://scott:tiger@host/xe") + @event.listens_for(engine, "do_setinputsizes") def _remove_clob(inputsizes, cursor, statement, parameters, context): for bindparam, dbapitype in list(inputsizes.items()): diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index a6f52ecec5d..b8dd7a2f682 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -31,19 +31,28 @@ automatically select the sync version:: from sqlalchemy import create_engine - sync_engine = create_engine("oracle+oracledb://scott:tiger@localhost?service_name=FREEPDB1") + + sync_engine = create_engine( + "oracle+oracledb://scott:tiger@localhost?service_name=FREEPDB1" + ) * calling :func:`_asyncio.create_async_engine` with ``oracle+oracledb://...`` will automatically select the async version:: from sqlalchemy.ext.asyncio import create_async_engine - asyncio_engine = create_async_engine("oracle+oracledb://scott:tiger@localhost?service_name=FREEPDB1") + + asyncio_engine = create_async_engine( + "oracle+oracledb://scott:tiger@localhost?service_name=FREEPDB1" + ) The asyncio version of the dialect may also be specified explicitly using the ``oracledb_async`` suffix:: from sqlalchemy.ext.asyncio import create_async_engine - asyncio_engine = create_async_engine("oracle+oracledb_async://scott:tiger@localhost?service_name=FREEPDB1") + + asyncio_engine = create_async_engine( + "oracle+oracledb_async://scott:tiger@localhost?service_name=FREEPDB1" + ) .. versionadded:: 2.0.25 added support for the async version of oracledb. @@ -62,11 +71,14 @@ ``init_oracle_client()``, like the ``lib_dir`` path, a dict may be passed, for example:: - engine = sa.create_engine("oracle+oracledb://...", thick_mode={ - "lib_dir": "/path/to/oracle/client/lib", - "config_dir": "/path/to/network_config_file_directory", - "driver_name": "my-app : 1.0.0" - }) + engine = sa.create_engine( + "oracle+oracledb://...", + thick_mode={ + "lib_dir": "/path/to/oracle/client/lib", + "config_dir": "/path/to/network_config_file_directory", + "driver_name": "my-app : 1.0.0", + }, + ) Note that passing a ``lib_dir`` path should only be done on macOS or Windows. On Linux it does not behave as you might expect. @@ -85,7 +97,9 @@ Given the hostname, port and service name of the target database, you can connect in SQLAlchemy using the ``service_name`` query string parameter:: - engine = create_engine("oracle+oracledb://scott:tiger@hostname:port?service_name=myservice") + engine = create_engine( + "oracle+oracledb://scott:tiger@hostname:port?service_name=myservice" + ) Connecting with Easy Connect strings ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -107,8 +121,8 @@ connect_args={ "user": "scott", "password": "tiger", - "dsn": "hostname:port/myservice?transport_connect_timeout=30&expire_time=60" - } + "dsn": "hostname:port/myservice?transport_connect_timeout=30&expire_time=60", + }, ) The Easy Connect syntax has been enhanced during the life of Oracle Database. @@ -116,7 +130,9 @@ is at `Understanding the Easy Connect Naming Method `_. -The general syntax is similar to:: +The general syntax is similar to: + +.. sourcecode:: text [[protocol:]//]host[:port][/[service_name]][?parameter_name=value{¶meter_name=value}] @@ -143,8 +159,8 @@ "password": "tiger", "dsn": "hostname:port/myservice", "events": True, - "mode": oracledb.AUTH_MODE_SYSDBA - } + "mode": oracledb.AUTH_MODE_SYSDBA, + }, ) Connecting with tnsnames.ora TNS aliases @@ -155,7 +171,9 @@ the URL as the data source name. For example, if the ``tnsnames.ora`` file contains a `TNS Alias `_ -of ``myalias`` as below:: +of ``myalias`` as below: + +.. sourcecode:: text myalias = (DESCRIPTION = @@ -187,32 +205,32 @@ path in ``sqlnet.ora`` appropriately:: e = create_engine( - "oracle+oracledb://@", - thick_mode={ - # directory containing tnsnames.ora and cwallet.so - "config_dir": "/opt/oracle/wallet_dir", - }, - connect_args={ - "user": "scott", - "password": "tiger", - "dsn": "mydb_high" - } - ) + "oracle+oracledb://@", + thick_mode={ + # directory containing tnsnames.ora and cwallet.so + "config_dir": "/opt/oracle/wallet_dir", + }, + connect_args={ + "user": "scott", + "password": "tiger", + "dsn": "mydb_high", + }, + ) Thin mode users of mTLS should pass the appropriate directories and PEM wallet password when creating the engine, similar to:: e = create_engine( - "oracle+oracledb://@", - connect_args={ - "user": "scott", - "password": "tiger", - "dsn": "mydb_high", - "config_dir": "/opt/oracle/wallet_dir", # directory containing tnsnames.ora - "wallet_location": "/opt/oracle/wallet_dir", # directory containing ewallet.pem - "wallet_password": "top secret" # password for the PEM file - } - ) + "oracle+oracledb://@", + connect_args={ + "user": "scott", + "password": "tiger", + "dsn": "mydb_high", + "config_dir": "/opt/oracle/wallet_dir", # directory containing tnsnames.ora + "wallet_location": "/opt/oracle/wallet_dir", # directory containing ewallet.pem + "wallet_password": "top secret", # password for the PEM file + }, + ) Typically ``config_dir`` and ``wallet_location`` are the same directory, which is where the Oracle Autonomous Database wallet zip file was extracted. Note @@ -246,11 +264,19 @@ # Uncomment to use the optional python-oracledb Thick mode. # Review the python-oracledb doc for the appropriate parameters - #oracledb.init_oracle_client() - - pool = oracledb.create_pool(user="scott", password="tiger", dsn="localhost:1521/freepdb1", - min=1, max=4, increment=1) - engine = create_engine("oracle+oracledb://", creator=pool.acquire, poolclass=NullPool) + # oracledb.init_oracle_client() + + pool = oracledb.create_pool( + user="scott", + password="tiger", + dsn="localhost:1521/freepdb1", + min=1, + max=4, + increment=1, + ) + engine = create_engine( + "oracle+oracledb://", creator=pool.acquire, poolclass=NullPool + ) The above engine may then be used normally. Internally, python-oracledb handles connection pooling:: @@ -280,12 +306,21 @@ # Uncomment to use the optional python-oracledb Thick mode. # Review the python-oracledb doc for the appropriate parameters - #oracledb.init_oracle_client() - - pool = oracledb.create_pool(user="scott", password="tiger", dsn="localhost:1521/freepdb1", - min=1, max=4, increment=1, - cclass="MYCLASS", purity=oracledb.PURITY_SELF) - engine = create_engine("oracle+oracledb://", creator=pool.acquire, poolclass=NullPool) + # oracledb.init_oracle_client() + + pool = oracledb.create_pool( + user="scott", + password="tiger", + dsn="localhost:1521/freepdb1", + min=1, + max=4, + increment=1, + cclass="MYCLASS", + purity=oracledb.PURITY_SELF, + ) + engine = create_engine( + "oracle+oracledb://", creator=pool.acquire, poolclass=NullPool + ) The above engine may then be used normally where python-oracledb handles application connection pooling and Oracle Database additionally uses DRCP:: @@ -303,16 +338,27 @@ # Uncomment to use python-oracledb Thick mode. # Review the python-oracledb doc for the appropriate parameters - #oracledb.init_oracle_client() + # oracledb.init_oracle_client() + + pool = oracledb.create_pool( + user="scott", + password="tiger", + dsn="localhost:1521/freepdb1", + min=1, + max=4, + increment=1, + cclass="MYCLASS", + purity=oracledb.PURITY_SELF, + ) - pool = oracledb.create_pool(user="scott", password="tiger", dsn="localhost:1521/freepdb1", - min=1, max=4, increment=1, - cclass="MYCLASS", purity=oracledb.PURITY_SELF) def creator(): return pool.acquire(cclass="MYOTHERCLASS", purity=oracledb.PURITY_NEW) - engine = create_engine("oracle+oracledb://", creator=creator, poolclass=NullPool) + + engine = create_engine( + "oracle+oracledb://", creator=creator, poolclass=NullPool + ) Engine Options consumed by the SQLAlchemy oracledb dialect outside of the driver -------------------------------------------------------------------------------- @@ -321,8 +367,7 @@ def creator(): itself. These options are always passed directly to :func:`_sa.create_engine`, such as:: - e = create_engine( - "oracle+oracledb://user:pass@tnsalias", arraysize=500) + e = create_engine("oracle+oracledb://user:pass@tnsalias", arraysize=500) The parameters accepted by the oracledb dialect are as follows: @@ -433,15 +478,20 @@ def creator(): from sqlalchemy import create_engine, event - engine = create_engine("oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1") + engine = create_engine( + "oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1" + ) + @event.listens_for(engine, "do_setinputsizes") def _log_setinputsizes(inputsizes, cursor, statement, parameters, context): for bindparam, dbapitype in inputsizes.items(): - log.info( - "Bound parameter name: %s SQLAlchemy type: %r " - "DBAPI object: %s", - bindparam.key, bindparam.type, dbapitype) + log.info( + "Bound parameter name: %s SQLAlchemy type: %r DBAPI object: %s", + bindparam.key, + bindparam.type, + dbapitype, + ) Example 2 - remove all bindings to CLOB ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -454,7 +504,10 @@ def _log_setinputsizes(inputsizes, cursor, statement, parameters, context): from sqlalchemy import create_engine, event from oracledb import CLOB - engine = create_engine("oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1") + engine = create_engine( + "oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1" + ) + @event.listens_for(engine, "do_setinputsizes") def _remove_clob(inputsizes, cursor, statement, parameters, context): @@ -524,7 +577,9 @@ def _remove_clob(inputsizes, cursor, statement, parameters, context): disable this coercion to decimal for performance reasons, pass the flag ``coerce_to_decimal=False`` to :func:`_sa.create_engine`:: - engine = create_engine("oracle+oracledb://scott:tiger@tnsalias", coerce_to_decimal=False) + engine = create_engine( + "oracle+oracledb://scott:tiger@tnsalias", coerce_to_decimal=False + ) The ``coerce_to_decimal`` flag only impacts the results of plain string SQL statements that are not otherwise associated with a :class:`.Numeric` diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 1d63655ee05..fcb98e65183 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -54,11 +54,13 @@ class array(expression.ExpressionClauseList[_T]): from sqlalchemy.dialects import postgresql from sqlalchemy import select, func - stmt = select(array([1,2]) + array([3,4,5])) + stmt = select(array([1, 2]) + array([3, 4, 5])) print(stmt.compile(dialect=postgresql.dialect())) - Produces the SQL:: + Produces the SQL: + + .. sourcecode:: sql SELECT ARRAY[%(param_1)s, %(param_2)s] || ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1 @@ -67,7 +69,7 @@ class array(expression.ExpressionClauseList[_T]): :class:`_types.ARRAY`. The "inner" type of the array is inferred from the values present, unless the ``type_`` keyword argument is passed:: - array(['foo', 'bar'], type_=CHAR) + array(["foo", "bar"], type_=CHAR) Multidimensional arrays are produced by nesting :class:`.array` constructs. The dimensionality of the final :class:`_types.ARRAY` @@ -76,16 +78,21 @@ class array(expression.ExpressionClauseList[_T]): type:: stmt = select( - array([ - array([1, 2]), array([3, 4]), array([column('q'), column('x')]) - ]) + array( + [array([1, 2]), array([3, 4]), array([column("q"), column("x")])] + ) ) print(stmt.compile(dialect=postgresql.dialect())) - Produces:: + Produces: - SELECT ARRAY[ARRAY[%(param_1)s, %(param_2)s], - ARRAY[%(param_3)s, %(param_4)s], ARRAY[q, x]] AS anon_1 + .. sourcecode:: sql + + SELECT ARRAY[ + ARRAY[%(param_1)s, %(param_2)s], + ARRAY[%(param_3)s, %(param_4)s], + ARRAY[q, x] + ] AS anon_1 .. versionadded:: 1.3.6 added support for multidimensional array literals @@ -93,7 +100,7 @@ class array(expression.ExpressionClauseList[_T]): :class:`_postgresql.ARRAY` - """ + """ # noqa: E501 __visit_name__ = "array" @@ -166,9 +173,11 @@ class ARRAY(sqltypes.ARRAY): from sqlalchemy.dialects import postgresql - mytable = Table("mytable", metadata, - Column("data", postgresql.ARRAY(Integer, dimensions=2)) - ) + mytable = Table( + "mytable", + metadata, + Column("data", postgresql.ARRAY(Integer, dimensions=2)), + ) The :class:`_postgresql.ARRAY` type provides all operations defined on the core :class:`_types.ARRAY` type, including support for "dimensions", @@ -204,6 +213,7 @@ class also from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.ext.mutable import MutableList + class SomeOrmClass(Base): # ... @@ -236,7 +246,7 @@ def __init__( E.g.:: - Column('myarray', ARRAY(Integer)) + Column("myarray", ARRAY(Integer)) Arguments are: diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 27bd07ab077..0928a0f71ad 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -23,7 +23,10 @@ :func:`_asyncio.create_async_engine` engine creation function:: from sqlalchemy.ext.asyncio import create_async_engine - engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname") + + engine = create_async_engine( + "postgresql+asyncpg://user:pass@hostname/dbname" + ) .. versionadded:: 1.4 @@ -78,11 +81,15 @@ argument):: - engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname?prepared_statement_cache_size=500") + engine = create_async_engine( + "postgresql+asyncpg://user:pass@hostname/dbname?prepared_statement_cache_size=500" + ) To disable the prepared statement cache, use a value of zero:: - engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname?prepared_statement_cache_size=0") + engine = create_async_engine( + "postgresql+asyncpg://user:pass@hostname/dbname?prepared_statement_cache_size=0" + ) .. versionadded:: 1.4.0b2 Added ``prepared_statement_cache_size`` for asyncpg. @@ -131,7 +138,7 @@ "postgresql+asyncpg://user:pass@somepgbouncer/dbname", poolclass=NullPool, connect_args={ - 'prepared_statement_name_func': lambda: f'__asyncpg_{uuid4()}__', + "prepared_statement_name_func": lambda: f"__asyncpg_{uuid4()}__", }, ) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index e5e7fceb188..e3920857a87 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -31,7 +31,7 @@ metadata, Column( "id", Integer, Sequence("some_id_seq", start=1), primary_key=True - ) + ), ) When SQLAlchemy issues a single INSERT statement, to fulfill the contract of @@ -63,9 +63,9 @@ "data", metadata, Column( - 'id', Integer, Identity(start=42, cycle=True), primary_key=True + "id", Integer, Identity(start=42, cycle=True), primary_key=True ), - Column('data', String) + Column("data", String), ) The CREATE TABLE for the above :class:`_schema.Table` object would be: @@ -92,23 +92,21 @@ from sqlalchemy.ext.compiler import compiles - @compiles(CreateColumn, 'postgresql') + @compiles(CreateColumn, "postgresql") def use_identity(element, compiler, **kw): text = compiler.visit_create_column(element, **kw) - text = text.replace( - "SERIAL", "INT GENERATED BY DEFAULT AS IDENTITY" - ) + text = text.replace("SERIAL", "INT GENERATED BY DEFAULT AS IDENTITY") return text Using the above, a table such as:: t = Table( - 't', m, - Column('id', Integer, primary_key=True), - Column('data', String) + "t", m, Column("id", Integer, primary_key=True), Column("data", String) ) - Will generate on the backing database as:: + Will generate on the backing database as: + + .. sourcecode:: sql CREATE TABLE t ( id INT GENERATED BY DEFAULT AS IDENTITY, @@ -129,7 +127,9 @@ def use_identity(element, compiler, **kw): option:: with engine.connect() as conn: - result = conn.execution_options(stream_results=True).execute(text("select * from table")) + result = conn.execution_options(stream_results=True).execute( + text("select * from table") + ) Note that some kinds of SQL statements may not be supported with server side cursors; generally, only SQL statements that return rows should be @@ -168,17 +168,15 @@ def use_identity(element, compiler, **kw): engine = create_engine( "postgresql+pg8000://scott:tiger@localhost/test", - isolation_level = "REPEATABLE READ" + isolation_level="REPEATABLE READ", ) To set using per-connection execution options:: with engine.connect() as conn: - conn = conn.execution_options( - isolation_level="REPEATABLE READ" - ) + conn = conn.execution_options(isolation_level="REPEATABLE READ") with conn.begin(): - # ... work with transaction + ... # work with transaction There are also more options for isolation level configurations, such as "sub-engine" objects linked to a main :class:`_engine.Engine` which each apply @@ -221,10 +219,10 @@ def use_identity(element, compiler, **kw): conn = conn.execution_options( isolation_level="SERIALIZABLE", postgresql_readonly=True, - postgresql_deferrable=True + postgresql_deferrable=True, ) with conn.begin(): - # ... work with transaction + ... # work with transaction Note that some DBAPIs such as asyncpg only support "readonly" with SERIALIZABLE isolation. @@ -269,7 +267,6 @@ def use_identity(element, compiler, **kw): postgresql_engine = create_engine( "postgresql+pyscopg2://scott:tiger@hostname/dbname", - # disable default reset-on-return scheme pool_reset_on_return=None, ) @@ -316,6 +313,7 @@ def _reset_postgresql(dbapi_connection, connection_record, reset_state): engine = create_engine("postgresql+psycopg2://scott:tiger@host/dbname") + @event.listens_for(engine, "connect", insert=True) def set_search_path(dbapi_connection, connection_record): existing_autocommit = dbapi_connection.autocommit @@ -334,9 +332,6 @@ def set_search_path(dbapi_connection, connection_record): :ref:`schema_set_default_connections` - in the :ref:`metadata_toplevel` documentation - - - .. _postgresql_schema_reflection: Remote-Schema Table Introspection and PostgreSQL search_path @@ -360,7 +355,9 @@ def set_search_path(dbapi_connection, connection_record): to **determine the default schema for the current database connection**. It does this using the PostgreSQL ``current_schema()`` function, illustated below using a PostgreSQL client session (i.e. using -the ``psql`` tool):: +the ``psql`` tool): + +.. sourcecode:: sql test=> select current_schema(); current_schema @@ -374,7 +371,9 @@ def set_search_path(dbapi_connection, connection_record): However, if your database username **matches the name of a schema**, PostgreSQL's default is to then **use that name as the default schema**. Below, we log in using the username ``scott``. When we create a schema named ``scott``, **it -implicitly changes the default schema**:: +implicitly changes the default schema**: + +.. sourcecode:: sql test=> select current_schema(); current_schema @@ -393,7 +392,9 @@ def set_search_path(dbapi_connection, connection_record): The behavior of ``current_schema()`` is derived from the `PostgreSQL search path `_ -variable ``search_path``, which in modern PostgreSQL versions defaults to this:: +variable ``search_path``, which in modern PostgreSQL versions defaults to this: + +.. sourcecode:: sql test=> show search_path; search_path @@ -419,7 +420,9 @@ def set_search_path(dbapi_connection, connection_record): returns a sample definition for a particular foreign key constraint, omitting the referenced schema name from that definition when the name is also in the PostgreSQL schema search path. The interaction below -illustrates this behavior:: +illustrates this behavior: + +.. sourcecode:: sql test=> CREATE TABLE test_schema.referred(id INTEGER PRIMARY KEY); CREATE TABLE @@ -446,13 +449,17 @@ def set_search_path(dbapi_connection, connection_record): the function. On the other hand, if we set the search path back to the typical default -of ``public``:: +of ``public``: + +.. sourcecode:: sql test=> SET search_path TO public; SET The same query against ``pg_get_constraintdef()`` now returns the fully -schema-qualified name for us:: +schema-qualified name for us: + +.. sourcecode:: sql test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n @@ -474,16 +481,14 @@ def set_search_path(dbapi_connection, connection_record): >>> with engine.connect() as conn: ... conn.execute(text("SET search_path TO test_schema, public")) ... metadata_obj = MetaData() - ... referring = Table('referring', metadata_obj, - ... autoload_with=conn) - ... + ... referring = Table("referring", metadata_obj, autoload_with=conn) The above process would deliver to the :attr:`_schema.MetaData.tables` collection ``referred`` table named **without** the schema:: - >>> metadata_obj.tables['referred'].schema is None + >>> metadata_obj.tables["referred"].schema is None True To alter the behavior of reflection such that the referred schema is @@ -495,15 +500,17 @@ def set_search_path(dbapi_connection, connection_record): >>> with engine.connect() as conn: ... conn.execute(text("SET search_path TO test_schema, public")) ... metadata_obj = MetaData() - ... referring = Table('referring', metadata_obj, - ... autoload_with=conn, - ... postgresql_ignore_search_path=True) - ... + ... referring = Table( + ... "referring", + ... metadata_obj, + ... autoload_with=conn, + ... postgresql_ignore_search_path=True, + ... ) We will now have ``test_schema.referred`` stored as schema-qualified:: - >>> metadata_obj.tables['test_schema.referred'].schema + >>> metadata_obj.tables["test_schema.referred"].schema 'test_schema' .. sidebar:: Best Practices for PostgreSQL Schema reflection @@ -537,18 +544,26 @@ def set_search_path(dbapi_connection, connection_record): use the :meth:`._UpdateBase.returning` method on a per-statement basis:: # INSERT..RETURNING - result = table.insert().returning(table.c.col1, table.c.col2).\ - values(name='foo') + result = ( + table.insert().returning(table.c.col1, table.c.col2).values(name="foo") + ) print(result.fetchall()) # UPDATE..RETURNING - result = table.update().returning(table.c.col1, table.c.col2).\ - where(table.c.name=='foo').values(name='bar') + result = ( + table.update() + .returning(table.c.col1, table.c.col2) + .where(table.c.name == "foo") + .values(name="bar") + ) print(result.fetchall()) # DELETE..RETURNING - result = table.delete().returning(table.c.col1, table.c.col2).\ - where(table.c.name=='foo') + result = ( + table.delete() + .returning(table.c.col1, table.c.col2) + .where(table.c.name == "foo") + ) print(result.fetchall()) .. _postgresql_insert_on_conflict: @@ -578,19 +593,16 @@ def set_search_path(dbapi_connection, connection_record): >>> from sqlalchemy.dialects.postgresql import insert >>> insert_stmt = insert(my_table).values( - ... id='some_existing_id', - ... data='inserted value') - >>> do_nothing_stmt = insert_stmt.on_conflict_do_nothing( - ... index_elements=['id'] + ... id="some_existing_id", data="inserted value" ... ) + >>> do_nothing_stmt = insert_stmt.on_conflict_do_nothing(index_elements=["id"]) >>> print(do_nothing_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) ON CONFLICT (id) DO NOTHING {stop} >>> do_update_stmt = insert_stmt.on_conflict_do_update( - ... constraint='pk_my_table', - ... set_=dict(data='updated value') + ... constraint="pk_my_table", set_=dict(data="updated value") ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) @@ -616,8 +628,7 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> do_update_stmt = insert_stmt.on_conflict_do_update( - ... index_elements=['id'], - ... set_=dict(data='updated value') + ... index_elements=["id"], set_=dict(data="updated value") ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) @@ -625,8 +636,7 @@ def set_search_path(dbapi_connection, connection_record): {stop} >>> do_update_stmt = insert_stmt.on_conflict_do_update( - ... index_elements=[my_table.c.id], - ... set_=dict(data='updated value') + ... index_elements=[my_table.c.id], set_=dict(data="updated value") ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) @@ -638,11 +648,11 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql - >>> stmt = insert(my_table).values(user_email='a@b.com', data='inserted data') + >>> stmt = insert(my_table).values(user_email="a@b.com", data="inserted data") >>> stmt = stmt.on_conflict_do_update( ... index_elements=[my_table.c.user_email], - ... index_where=my_table.c.user_email.like('%@gmail.com'), - ... set_=dict(data=stmt.excluded.data) + ... index_where=my_table.c.user_email.like("%@gmail.com"), + ... set_=dict(data=stmt.excluded.data), ... ) >>> print(stmt) {printsql}INSERT INTO my_table (data, user_email) @@ -656,8 +666,7 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> do_update_stmt = insert_stmt.on_conflict_do_update( - ... constraint='my_table_idx_1', - ... set_=dict(data='updated value') + ... constraint="my_table_idx_1", set_=dict(data="updated value") ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) @@ -665,8 +674,7 @@ def set_search_path(dbapi_connection, connection_record): {stop} >>> do_update_stmt = insert_stmt.on_conflict_do_update( - ... constraint='my_table_pk', - ... set_=dict(data='updated value') + ... constraint="my_table_pk", set_=dict(data="updated value") ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) @@ -688,8 +696,7 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> do_update_stmt = insert_stmt.on_conflict_do_update( - ... constraint=my_table.primary_key, - ... set_=dict(data='updated value') + ... constraint=my_table.primary_key, set_=dict(data="updated value") ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) @@ -707,10 +714,9 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql - >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> stmt = insert(my_table).values(id="some_id", data="inserted value") >>> do_update_stmt = stmt.on_conflict_do_update( - ... index_elements=['id'], - ... set_=dict(data='updated value') + ... index_elements=["id"], set_=dict(data="updated value") ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) @@ -739,13 +745,11 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> stmt = insert(my_table).values( - ... id='some_id', - ... data='inserted value', - ... author='jlh' + ... id="some_id", data="inserted value", author="jlh" ... ) >>> do_update_stmt = stmt.on_conflict_do_update( - ... index_elements=['id'], - ... set_=dict(data='updated value', author=stmt.excluded.author) + ... index_elements=["id"], + ... set_=dict(data="updated value", author=stmt.excluded.author), ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data, author) @@ -762,14 +766,12 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> stmt = insert(my_table).values( - ... id='some_id', - ... data='inserted value', - ... author='jlh' + ... id="some_id", data="inserted value", author="jlh" ... ) >>> on_update_stmt = stmt.on_conflict_do_update( - ... index_elements=['id'], - ... set_=dict(data='updated value', author=stmt.excluded.author), - ... where=(my_table.c.status == 2) + ... index_elements=["id"], + ... set_=dict(data="updated value", author=stmt.excluded.author), + ... where=(my_table.c.status == 2), ... ) >>> print(on_update_stmt) {printsql}INSERT INTO my_table (id, data, author) @@ -787,8 +789,8 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql - >>> stmt = insert(my_table).values(id='some_id', data='inserted value') - >>> stmt = stmt.on_conflict_do_nothing(index_elements=['id']) + >>> stmt = insert(my_table).values(id="some_id", data="inserted value") + >>> stmt = stmt.on_conflict_do_nothing(index_elements=["id"]) >>> print(stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) ON CONFLICT (id) DO NOTHING @@ -799,7 +801,7 @@ def set_search_path(dbapi_connection, connection_record): .. sourcecode:: pycon+sql - >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> stmt = insert(my_table).values(id="some_id", data="inserted value") >>> stmt = stmt.on_conflict_do_nothing() >>> print(stmt) {printsql}INSERT INTO my_table (id, data) VALUES (%(id)s, %(data)s) @@ -830,7 +832,9 @@ def set_search_path(dbapi_connection, connection_record): select(sometable.c.text.match("search string")) -would emit to the database:: +would emit to the database: + +.. sourcecode:: sql SELECT text @@ plainto_tsquery('search string') FROM table @@ -846,11 +850,11 @@ def set_search_path(dbapi_connection, connection_record): from sqlalchemy import func - select( - sometable.c.text.bool_op("@@")(func.to_tsquery("search string")) - ) + select(sometable.c.text.bool_op("@@")(func.to_tsquery("search string"))) - Which would emit:: + Which would emit: + + .. sourcecode:: sql SELECT text @@ to_tsquery('search string') FROM table @@ -864,9 +868,7 @@ def set_search_path(dbapi_connection, connection_record): For example, the query:: - select( - func.to_tsquery('cat').bool_op("@>")(func.to_tsquery('cat & rat')) - ) + select(func.to_tsquery("cat").bool_op("@>")(func.to_tsquery("cat & rat"))) would generate: @@ -879,9 +881,12 @@ def set_search_path(dbapi_connection, connection_record): from sqlalchemy.dialects.postgresql import TSVECTOR from sqlalchemy import select, cast + select(cast("some text", TSVECTOR)) -produces a statement equivalent to:: +produces a statement equivalent to: + +.. sourcecode:: sql SELECT CAST('some text' AS TSVECTOR) AS anon_1 @@ -909,10 +914,12 @@ def set_search_path(dbapi_connection, connection_record): specified using the ``postgresql_regconfig`` parameter, such as:: select(mytable.c.id).where( - mytable.c.title.match('somestring', postgresql_regconfig='english') + mytable.c.title.match("somestring", postgresql_regconfig="english") ) -Which would emit:: +Which would emit: + +.. sourcecode:: sql SELECT mytable.id FROM mytable WHERE mytable.title @@ plainto_tsquery('english', 'somestring') @@ -926,7 +933,9 @@ def set_search_path(dbapi_connection, connection_record): ) ) -produces a statement equivalent to:: +produces a statement equivalent to: + +.. sourcecode:: sql SELECT mytable.id FROM mytable WHERE to_tsvector('english', mytable.title) @@ @@ -950,16 +959,16 @@ def set_search_path(dbapi_connection, connection_record): syntaxes. It uses SQLAlchemy's hints mechanism:: # SELECT ... FROM ONLY ... - result = table.select().with_hint(table, 'ONLY', 'postgresql') + result = table.select().with_hint(table, "ONLY", "postgresql") print(result.fetchall()) # UPDATE ONLY ... - table.update(values=dict(foo='bar')).with_hint('ONLY', - dialect_name='postgresql') + table.update(values=dict(foo="bar")).with_hint( + "ONLY", dialect_name="postgresql" + ) # DELETE FROM ONLY ... - table.delete().with_hint('ONLY', dialect_name='postgresql') - + table.delete().with_hint("ONLY", dialect_name="postgresql") .. _postgresql_indexes: @@ -975,7 +984,7 @@ def set_search_path(dbapi_connection, connection_record): The ``postgresql_include`` option renders INCLUDE(colname) for the given string names:: - Index("my_index", table.c.x, postgresql_include=['y']) + Index("my_index", table.c.x, postgresql_include=["y"]) would render the index as ``CREATE INDEX my_index ON table (x) INCLUDE (y)`` @@ -992,7 +1001,7 @@ def set_search_path(dbapi_connection, connection_record): applied to a subset of rows. These can be specified on :class:`.Index` using the ``postgresql_where`` keyword argument:: - Index('my_index', my_table.c.id, postgresql_where=my_table.c.value > 10) + Index("my_index", my_table.c.id, postgresql_where=my_table.c.value > 10) .. _postgresql_operator_classes: @@ -1006,11 +1015,11 @@ def set_search_path(dbapi_connection, connection_record): ``postgresql_ops`` keyword argument:: Index( - 'my_index', my_table.c.id, my_table.c.data, - postgresql_ops={ - 'data': 'text_pattern_ops', - 'id': 'int4_ops' - }) + "my_index", + my_table.c.id, + my_table.c.data, + postgresql_ops={"data": "text_pattern_ops", "id": "int4_ops"}, + ) Note that the keys in the ``postgresql_ops`` dictionaries are the "key" name of the :class:`_schema.Column`, i.e. the name used to access it from @@ -1022,12 +1031,11 @@ def set_search_path(dbapi_connection, connection_record): that is identified in the dictionary by name, e.g.:: Index( - 'my_index', my_table.c.id, - func.lower(my_table.c.data).label('data_lower'), - postgresql_ops={ - 'data_lower': 'text_pattern_ops', - 'id': 'int4_ops' - }) + "my_index", + my_table.c.id, + func.lower(my_table.c.data).label("data_lower"), + postgresql_ops={"data_lower": "text_pattern_ops", "id": "int4_ops"}, + ) Operator classes are also supported by the :class:`_postgresql.ExcludeConstraint` construct using the @@ -1046,7 +1054,7 @@ def set_search_path(dbapi_connection, connection_record): https://www.postgresql.org/docs/current/static/indexes-types.html). These can be specified on :class:`.Index` using the ``postgresql_using`` keyword argument:: - Index('my_index', my_table.c.data, postgresql_using='gin') + Index("my_index", my_table.c.data, postgresql_using="gin") The value passed to the keyword argument will be simply passed through to the underlying CREATE INDEX command, so it *must* be a valid index type for your @@ -1062,13 +1070,13 @@ def set_search_path(dbapi_connection, connection_record): parameters can be specified on :class:`.Index` using the ``postgresql_with`` keyword argument:: - Index('my_index', my_table.c.data, postgresql_with={"fillfactor": 50}) + Index("my_index", my_table.c.data, postgresql_with={"fillfactor": 50}) PostgreSQL allows to define the tablespace in which to create the index. The tablespace can be specified on :class:`.Index` using the ``postgresql_tablespace`` keyword argument:: - Index('my_index', my_table.c.data, postgresql_tablespace='my_tablespace') + Index("my_index", my_table.c.data, postgresql_tablespace="my_tablespace") Note that the same option is available on :class:`_schema.Table` as well. @@ -1080,17 +1088,21 @@ def set_search_path(dbapi_connection, connection_record): The PostgreSQL index option CONCURRENTLY is supported by passing the flag ``postgresql_concurrently`` to the :class:`.Index` construct:: - tbl = Table('testtbl', m, Column('data', Integer)) + tbl = Table("testtbl", m, Column("data", Integer)) - idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True) + idx1 = Index("test_idx1", tbl.c.data, postgresql_concurrently=True) The above index construct will render DDL for CREATE INDEX, assuming -PostgreSQL 8.2 or higher is detected or for a connection-less dialect, as:: +PostgreSQL 8.2 or higher is detected or for a connection-less dialect, as: + +.. sourcecode:: sql CREATE INDEX CONCURRENTLY test_idx1 ON testtbl (data) For DROP INDEX, assuming PostgreSQL 9.2 or higher is detected or for -a connection-less dialect, it will emit:: +a connection-less dialect, it will emit: + +.. sourcecode:: sql DROP INDEX CONCURRENTLY test_idx1 @@ -1100,14 +1112,11 @@ def set_search_path(dbapi_connection, connection_record): construct, the DBAPI's "autocommit" mode must be used:: metadata = MetaData() - table = Table( - "foo", metadata, - Column("id", String)) - index = Index( - "foo_idx", table.c.id, postgresql_concurrently=True) + table = Table("foo", metadata, Column("id", String)) + index = Index("foo_idx", table.c.id, postgresql_concurrently=True) with engine.connect() as conn: - with conn.execution_options(isolation_level='AUTOCOMMIT'): + with conn.execution_options(isolation_level="AUTOCOMMIT"): table.create(conn) .. seealso:: @@ -1165,26 +1174,33 @@ def set_search_path(dbapi_connection, connection_record): * ``ON COMMIT``:: - Table("some_table", metadata, ..., postgresql_on_commit='PRESERVE ROWS') + Table("some_table", metadata, ..., postgresql_on_commit="PRESERVE ROWS") -* ``PARTITION BY``:: +* + ``PARTITION BY``:: - Table("some_table", metadata, ..., - postgresql_partition_by='LIST (part_column)') + Table( + "some_table", + metadata, + ..., + postgresql_partition_by="LIST (part_column)", + ) - .. versionadded:: 1.2.6 + .. versionadded:: 1.2.6 -* ``TABLESPACE``:: +* + ``TABLESPACE``:: - Table("some_table", metadata, ..., postgresql_tablespace='some_tablespace') + Table("some_table", metadata, ..., postgresql_tablespace="some_tablespace") The above option is also available on the :class:`.Index` construct. -* ``USING``:: +* + ``USING``:: - Table("some_table", metadata, ..., postgresql_using='heap') + Table("some_table", metadata, ..., postgresql_using="heap") - .. versionadded:: 2.0.26 + .. versionadded:: 2.0.26 * ``WITH OIDS``:: @@ -1225,7 +1241,7 @@ def update(): "user", ["user_id"], ["id"], - postgresql_not_valid=True + postgresql_not_valid=True, ) The keyword is ultimately accepted directly by the @@ -1236,7 +1252,9 @@ def update(): CheckConstraint("some_field IS NOT NULL", postgresql_not_valid=True) - ForeignKeyConstraint(["some_id"], ["some_table.some_id"], postgresql_not_valid=True) + ForeignKeyConstraint( + ["some_id"], ["some_table.some_id"], postgresql_not_valid=True + ) .. versionadded:: 1.4.32 @@ -1279,7 +1297,9 @@ def update(): .. sourcecode:: pycon+sql >>> from sqlalchemy import select, func - >>> stmt = select(func.json_each('{"a":"foo", "b":"bar"}').table_valued("key", "value")) + >>> stmt = select( + ... func.json_each('{"a":"foo", "b":"bar"}').table_valued("key", "value") + ... ) >>> print(stmt) {printsql}SELECT anon_1.key, anon_1.value FROM json_each(:json_each_1) AS anon_1 @@ -1291,8 +1311,7 @@ def update(): >>> from sqlalchemy import select, func, literal_column >>> stmt = select( ... func.json_populate_record( - ... literal_column("null::myrowtype"), - ... '{"a":1,"b":2}' + ... literal_column("null::myrowtype"), '{"a":1,"b":2}' ... ).table_valued("a", "b", name="x") ... ) >>> print(stmt) @@ -1310,9 +1329,13 @@ def update(): >>> from sqlalchemy import select, func, column, Integer, Text >>> stmt = select( - ... func.json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}').table_valued( - ... column("a", Integer), column("b", Text), column("d", Text), - ... ).render_derived(name="x", with_types=True) + ... func.json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}') + ... .table_valued( + ... column("a", Integer), + ... column("b", Text), + ... column("d", Text), + ... ) + ... .render_derived(name="x", with_types=True) ... ) >>> print(stmt) {printsql}SELECT x.a, x.b, x.d @@ -1329,9 +1352,9 @@ def update(): >>> from sqlalchemy import select, func >>> stmt = select( - ... func.generate_series(4, 1, -1). - ... table_valued("value", with_ordinality="ordinality"). - ... render_derived() + ... func.generate_series(4, 1, -1) + ... .table_valued("value", with_ordinality="ordinality") + ... .render_derived() ... ) >>> print(stmt) {printsql}SELECT anon_1.value, anon_1.ordinality @@ -1360,7 +1383,9 @@ def update(): .. sourcecode:: pycon+sql >>> from sqlalchemy import select, func - >>> stmt = select(func.json_array_elements('["one", "two"]').column_valued("x")) + >>> stmt = select( + ... func.json_array_elements('["one", "two"]').column_valued("x") + ... ) >>> print(stmt) {printsql}SELECT x FROM json_array_elements(:json_array_elements_1) AS x @@ -1384,7 +1409,7 @@ def update(): >>> from sqlalchemy import table, column, ARRAY, Integer >>> from sqlalchemy import select, func - >>> t = table("t", column('value', ARRAY(Integer))) + >>> t = table("t", column("value", ARRAY(Integer))) >>> stmt = select(func.unnest(t.c.value).column_valued("unnested_value")) >>> print(stmt) {printsql}SELECT unnested_value @@ -1406,10 +1431,10 @@ def update(): >>> from sqlalchemy import table, column, func, tuple_ >>> t = table("t", column("id"), column("fk")) - >>> stmt = t.select().where( - ... tuple_(t.c.id, t.c.fk) > (1,2) - ... ).where( - ... func.ROW(t.c.id, t.c.fk) < func.ROW(3, 7) + >>> stmt = ( + ... t.select() + ... .where(tuple_(t.c.id, t.c.fk) > (1, 2)) + ... .where(func.ROW(t.c.id, t.c.fk) < func.ROW(3, 7)) ... ) >>> print(stmt) {printsql}SELECT t.id, t.fk @@ -1438,7 +1463,7 @@ def update(): .. sourcecode:: pycon+sql >>> from sqlalchemy import table, column, func, select - >>> a = table( "a", column("id"), column("x"), column("y")) + >>> a = table("a", column("id"), column("x"), column("y")) >>> stmt = select(func.row_to_json(a.table_valued())) >>> print(stmt) {printsql}SELECT row_to_json(a) AS row_to_json_1 diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py index 7fc08953fcc..a760773e247 100644 --- a/lib/sqlalchemy/dialects/postgresql/ext.py +++ b/lib/sqlalchemy/dialects/postgresql/ext.py @@ -35,22 +35,26 @@ class aggregate_order_by(expression.ColumnElement): E.g.:: from sqlalchemy.dialects.postgresql import aggregate_order_by + expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc())) stmt = select(expr) - would represent the expression:: + would represent the expression: + + .. sourcecode:: sql SELECT array_agg(a ORDER BY b DESC) FROM table; Similarly:: expr = func.string_agg( - table.c.a, - aggregate_order_by(literal_column("','"), table.c.a) + table.c.a, aggregate_order_by(literal_column("','"), table.c.a) ) stmt = select(expr) - Would represent:: + Would represent: + + .. sourcecode:: sql SELECT string_agg(a, ',' ORDER BY a) FROM table; @@ -131,10 +135,10 @@ def __init__(self, *elements, **kw): E.g.:: const = ExcludeConstraint( - (Column('period'), '&&'), - (Column('group'), '='), - where=(Column('group') != 'some group'), - ops={'group': 'my_operator_class'} + (Column("period"), "&&"), + (Column("group"), "="), + where=(Column("group") != "some group"), + ops={"group": "my_operator_class"}, ) The constraint is normally embedded into the :class:`_schema.Table` @@ -142,19 +146,20 @@ def __init__(self, *elements, **kw): directly, or added later using :meth:`.append_constraint`:: some_table = Table( - 'some_table', metadata, - Column('id', Integer, primary_key=True), - Column('period', TSRANGE()), - Column('group', String) + "some_table", + metadata, + Column("id", Integer, primary_key=True), + Column("period", TSRANGE()), + Column("group", String), ) some_table.append_constraint( ExcludeConstraint( - (some_table.c.period, '&&'), - (some_table.c.group, '='), - where=some_table.c.group != 'some group', - name='some_table_excl_const', - ops={'group': 'my_operator_class'} + (some_table.c.period, "&&"), + (some_table.c.group, "="), + where=some_table.c.group != "some group", + name="some_table_excl_const", + ops={"group": "my_operator_class"}, ) ) diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index 04c8cf16015..5a2d451316d 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -28,28 +28,29 @@ class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine): The :class:`.HSTORE` type stores dictionaries containing strings, e.g.:: - data_table = Table('data_table', metadata, - Column('id', Integer, primary_key=True), - Column('data', HSTORE) + data_table = Table( + "data_table", + metadata, + Column("id", Integer, primary_key=True), + Column("data", HSTORE), ) with engine.connect() as conn: conn.execute( - data_table.insert(), - data = {"key1": "value1", "key2": "value2"} + data_table.insert(), data={"key1": "value1", "key2": "value2"} ) :class:`.HSTORE` provides for a wide range of operations, including: * Index operations:: - data_table.c.data['some key'] == 'some value' + data_table.c.data["some key"] == "some value" * Containment operations:: - data_table.c.data.has_key('some key') + data_table.c.data.has_key("some key") - data_table.c.data.has_all(['one', 'two', 'three']) + data_table.c.data.has_all(["one", "two", "three"]) * Concatenation:: @@ -72,17 +73,19 @@ class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine): from sqlalchemy.ext.mutable import MutableDict + class MyClass(Base): - __tablename__ = 'data_table' + __tablename__ = "data_table" id = Column(Integer, primary_key=True) data = Column(MutableDict.as_mutable(HSTORE)) + my_object = session.query(MyClass).one() # in-place mutation, requires Mutable extension # in order for the ORM to detect - my_object.data['some_key'] = 'some value' + my_object.data["some_key"] = "some value" session.commit() @@ -96,7 +99,7 @@ class MyClass(Base): :class:`.hstore` - render the PostgreSQL ``hstore()`` function. - """ + """ # noqa: E501 __visit_name__ = "HSTORE" hashable = False @@ -221,12 +224,12 @@ class hstore(sqlfunc.GenericFunction): from sqlalchemy.dialects.postgresql import array, hstore - select(hstore('key1', 'value1')) + select(hstore("key1", "value1")) select( hstore( - array(['key1', 'key2', 'key3']), - array(['value1', 'value2', 'value3']) + array(["key1", "key2", "key3"]), + array(["value1", "value2", "value3"]), ) ) diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 914d8423d4b..4e7c15ffe92 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -90,14 +90,14 @@ class JSON(sqltypes.JSON): * Index operations (the ``->`` operator):: - data_table.c.data['some key'] + data_table.c.data["some key"] data_table.c.data[5] + * Index operations returning text + (the ``->>`` operator):: - * Index operations returning text (the ``->>`` operator):: - - data_table.c.data['some key'].astext == 'some value' + data_table.c.data["some key"].astext == "some value" Note that equivalent functionality is available via the :attr:`.JSON.Comparator.as_string` accessor. @@ -105,18 +105,20 @@ class JSON(sqltypes.JSON): * Index operations with CAST (equivalent to ``CAST(col ->> ['some key'] AS )``):: - data_table.c.data['some key'].astext.cast(Integer) == 5 + data_table.c.data["some key"].astext.cast(Integer) == 5 Note that equivalent functionality is available via the :attr:`.JSON.Comparator.as_integer` and similar accessors. * Path index operations (the ``#>`` operator):: - data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')] + data_table.c.data[("key_1", "key_2", 5, ..., "key_n")] * Path index operations returning text (the ``#>>`` operator):: - data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')].astext == 'some value' + data_table.c.data[ + ("key_1", "key_2", 5, ..., "key_n") + ].astext == "some value" Index operations return an expression object whose type defaults to :class:`_types.JSON` by default, @@ -128,10 +130,11 @@ class JSON(sqltypes.JSON): using psycopg2, the DBAPI only allows serializers at the per-cursor or per-connection level. E.g.:: - engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test", - json_serializer=my_serialize_fn, - json_deserializer=my_deserialize_fn - ) + engine = create_engine( + "postgresql+psycopg2://scott:tiger@localhost/test", + json_serializer=my_serialize_fn, + json_deserializer=my_deserialize_fn, + ) When using the psycopg2 dialect, the json_deserializer is registered against the database using ``psycopg2.extras.register_default_json``. @@ -156,6 +159,7 @@ def __init__(self, none_as_null=False, astext_type=None): be used to persist a NULL value:: from sqlalchemy import null + conn.execute(table.insert(), {"data": null()}) .. seealso:: @@ -181,7 +185,7 @@ def astext(self): E.g.:: - select(data_table.c.data['some key'].astext) + select(data_table.c.data["some key"].astext) .. seealso:: @@ -208,15 +212,16 @@ class JSONB(JSON): The :class:`_postgresql.JSONB` type stores arbitrary JSONB format data, e.g.:: - data_table = Table('data_table', metadata, - Column('id', Integer, primary_key=True), - Column('data', JSONB) + data_table = Table( + "data_table", + metadata, + Column("id", Integer, primary_key=True), + Column("data", JSONB), ) with engine.connect() as conn: conn.execute( - data_table.insert(), - data = {"key1": "value1", "key2": "value2"} + data_table.insert(), data={"key1": "value1", "key2": "value2"} ) The :class:`_postgresql.JSONB` type includes all operations provided by diff --git a/lib/sqlalchemy/dialects/postgresql/named_types.py b/lib/sqlalchemy/dialects/postgresql/named_types.py index 16e5c867efc..320de440f86 100644 --- a/lib/sqlalchemy/dialects/postgresql/named_types.py +++ b/lib/sqlalchemy/dialects/postgresql/named_types.py @@ -185,8 +185,10 @@ class ENUM(NamedType, type_api.NativeForEmulated, sqltypes.Enum): :meth:`_schema.Table.drop` methods are called:: - table = Table('sometable', metadata, - Column('some_enum', ENUM('a', 'b', 'c', name='myenum')) + table = Table( + "sometable", + metadata, + Column("some_enum", ENUM("a", "b", "c", name="myenum")), ) table.create(engine) # will emit CREATE ENUM and CREATE TABLE @@ -197,21 +199,17 @@ class ENUM(NamedType, type_api.NativeForEmulated, sqltypes.Enum): :class:`_postgresql.ENUM` independently, and associate it with the :class:`_schema.MetaData` object itself:: - my_enum = ENUM('a', 'b', 'c', name='myenum', metadata=metadata) + my_enum = ENUM("a", "b", "c", name="myenum", metadata=metadata) - t1 = Table('sometable_one', metadata, - Column('some_enum', myenum) - ) + t1 = Table("sometable_one", metadata, Column("some_enum", myenum)) - t2 = Table('sometable_two', metadata, - Column('some_enum', myenum) - ) + t2 = Table("sometable_two", metadata, Column("some_enum", myenum)) When this pattern is used, care must still be taken at the level of individual table creates. Emitting CREATE TABLE without also specifying ``checkfirst=True`` will still cause issues:: - t1.create(engine) # will fail: no such type 'myenum' + t1.create(engine) # will fail: no such type 'myenum' If we specify ``checkfirst=True``, the individual table-level create operation will check for the ``ENUM`` and create if not exists:: @@ -387,14 +385,12 @@ class DOMAIN(NamedType, sqltypes.SchemaType): A domain is essentially a data type with optional constraints that restrict the allowed set of values. E.g.:: - PositiveInt = DOMAIN( - "pos_int", Integer, check="VALUE > 0", not_null=True - ) + PositiveInt = DOMAIN("pos_int", Integer, check="VALUE > 0", not_null=True) UsPostalCode = DOMAIN( "us_postal_code", Text, - check="VALUE ~ '^\d{5}$' OR VALUE ~ '^\d{5}-\d{4}$'" + check="VALUE ~ '^\d{5}$' OR VALUE ~ '^\d{5}-\d{4}$'", ) See the `PostgreSQL documentation`__ for additional details @@ -403,7 +399,7 @@ class DOMAIN(NamedType, sqltypes.SchemaType): .. versionadded:: 2.0 - """ + """ # noqa: E501 DDLGenerator = DomainGenerator DDLDropper = DomainDropper diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index 0151be0253d..aa878c353e0 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -27,19 +27,21 @@ the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. Typically, this can be changed to ``utf-8``, as a more useful default:: - #client_encoding = sql_ascii # actually, defaults to database - # encoding + # client_encoding = sql_ascii # actually, defaults to database encoding client_encoding = utf8 The ``client_encoding`` can be overridden for a session by executing the SQL: -SET CLIENT_ENCODING TO 'utf8'; +.. sourcecode:: sql + + SET CLIENT_ENCODING TO 'utf8'; SQLAlchemy will execute this SQL on all new connections based on the value passed to :func:`_sa.create_engine` using the ``client_encoding`` parameter:: engine = create_engine( - "postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8') + "postgresql+pg8000://user:pass@host/dbname", client_encoding="utf8" + ) .. _pg8000_ssl: @@ -50,6 +52,7 @@ :paramref:`_sa.create_engine.connect_args` dictionary:: import ssl + ssl_context = ssl.create_default_context() engine = sa.create_engine( "postgresql+pg8000://scott:tiger@192.168.0.199/test", @@ -61,6 +64,7 @@ necessary to disable hostname checking:: import ssl + ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index 66a2c774623..3c8a5e4c598 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -29,20 +29,29 @@ automatically select the sync version, e.g.:: from sqlalchemy import create_engine - sync_engine = create_engine("postgresql+psycopg://scott:tiger@localhost/test") + + sync_engine = create_engine( + "postgresql+psycopg://scott:tiger@localhost/test" + ) * calling :func:`_asyncio.create_async_engine` with ``postgresql+psycopg://...`` will automatically select the async version, e.g.:: from sqlalchemy.ext.asyncio import create_async_engine - asyncio_engine = create_async_engine("postgresql+psycopg://scott:tiger@localhost/test") + + asyncio_engine = create_async_engine( + "postgresql+psycopg://scott:tiger@localhost/test" + ) The asyncio version of the dialect may also be specified explicitly using the ``psycopg_async`` suffix, as:: from sqlalchemy.ext.asyncio import create_async_engine - asyncio_engine = create_async_engine("postgresql+psycopg_async://scott:tiger@localhost/test") + + asyncio_engine = create_async_engine( + "postgresql+psycopg_async://scott:tiger@localhost/test" + ) .. seealso:: diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index fc05aca9078..d7efc2eb974 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -88,7 +88,6 @@ "postgresql+psycopg2://scott:tiger@192.168.0.199:5432/test?sslmode=require" ) - Unix Domain Connections ------------------------ @@ -103,13 +102,17 @@ was built. This value can be overridden by passing a pathname to psycopg2, using ``host`` as an additional keyword argument:: - create_engine("postgresql+psycopg2://user:password@/dbname?host=/var/lib/postgresql") + create_engine( + "postgresql+psycopg2://user:password@/dbname?host=/var/lib/postgresql" + ) .. warning:: The format accepted here allows for a hostname in the main URL in addition to the "host" query string argument. **When using this URL format, the initial host is silently ignored**. That is, this URL:: - engine = create_engine("postgresql+psycopg2://user:password@myhost1/dbname?host=myhost2") + engine = create_engine( + "postgresql+psycopg2://user:password@myhost1/dbname?host=myhost2" + ) Above, the hostname ``myhost1`` is **silently ignored and discarded.** The host which is connected is the ``myhost2`` host. @@ -190,7 +193,7 @@ For this form, the URL can be passed without any elements other than the initial scheme:: - engine = create_engine('postgresql+psycopg2://') + engine = create_engine("postgresql+psycopg2://") In the above form, a blank "dsn" string is passed to the ``psycopg2.connect()`` function which in turn represents an empty DSN passed to libpq. @@ -264,8 +267,8 @@ engine = create_engine( "postgresql+psycopg2://scott:tiger@host/dbname", - executemany_mode='values_plus_batch') - + executemany_mode="values_plus_batch", + ) Possible options for ``executemany_mode`` include: @@ -311,8 +314,10 @@ engine = create_engine( "postgresql+psycopg2://scott:tiger@host/dbname", - executemany_mode='values_plus_batch', - insertmanyvalues_page_size=5000, executemany_batch_page_size=500) + executemany_mode="values_plus_batch", + insertmanyvalues_page_size=5000, + executemany_batch_page_size=500, + ) .. seealso:: @@ -338,7 +343,9 @@ passed in the database URL; this parameter is consumed by the underlying ``libpq`` PostgreSQL client library:: - engine = create_engine("postgresql+psycopg2://user:pass@host/dbname?client_encoding=utf8") + engine = create_engine( + "postgresql+psycopg2://user:pass@host/dbname?client_encoding=utf8" + ) Alternatively, the above ``client_encoding`` value may be passed using :paramref:`_sa.create_engine.connect_args` for programmatic establishment with @@ -346,7 +353,7 @@ engine = create_engine( "postgresql+psycopg2://user:pass@host/dbname", - connect_args={'client_encoding': 'utf8'} + connect_args={"client_encoding": "utf8"}, ) * For all PostgreSQL versions, psycopg2 supports a client-side encoding @@ -355,8 +362,7 @@ ``client_encoding`` parameter passed to :func:`_sa.create_engine`:: engine = create_engine( - "postgresql+psycopg2://user:pass@host/dbname", - client_encoding="utf8" + "postgresql+psycopg2://user:pass@host/dbname", client_encoding="utf8" ) .. tip:: The above ``client_encoding`` parameter admittedly is very similar @@ -375,11 +381,9 @@ # postgresql.conf file # client_encoding = sql_ascii # actually, defaults to database - # encoding + # encoding client_encoding = utf8 - - Transactions ------------ @@ -426,15 +430,15 @@ import logging - logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO) + logging.getLogger("sqlalchemy.dialects.postgresql").setLevel(logging.INFO) Above, it is assumed that logging is configured externally. If this is not the case, configuration such as ``logging.basicConfig()`` must be utilized:: import logging - logging.basicConfig() # log messages to stdout - logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO) + logging.basicConfig() # log messages to stdout + logging.getLogger("sqlalchemy.dialects.postgresql").setLevel(logging.INFO) .. seealso:: @@ -471,8 +475,10 @@ use of the hstore extension by setting ``use_native_hstore`` to ``False`` as follows:: - engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test", - use_native_hstore=False) + engine = create_engine( + "postgresql+psycopg2://scott:tiger@localhost/test", + use_native_hstore=False, + ) The ``HSTORE`` type is **still supported** when the ``psycopg2.extensions.register_hstore()`` extension is not used. It merely diff --git a/lib/sqlalchemy/dialects/postgresql/types.py b/lib/sqlalchemy/dialects/postgresql/types.py index 2acf63bef61..73f9d372ab2 100644 --- a/lib/sqlalchemy/dialects/postgresql/types.py +++ b/lib/sqlalchemy/dialects/postgresql/types.py @@ -94,12 +94,11 @@ class MONEY(sqltypes.TypeEngine[str]): from sqlalchemy import Dialect from sqlalchemy import TypeDecorator + class NumericMoney(TypeDecorator): impl = MONEY - def process_result_value( - self, value: Any, dialect: Dialect - ) -> None: + def process_result_value(self, value: Any, dialect: Dialect) -> None: if value is not None: # adjust this for the currency and numeric m = re.match(r"\$([\d.]+)", value) @@ -114,6 +113,7 @@ def process_result_value( from sqlalchemy import cast from sqlalchemy import TypeDecorator + class NumericMoney(TypeDecorator): impl = MONEY @@ -122,7 +122,7 @@ def column_expression(self, column: Any): .. versionadded:: 1.2 - """ + """ # noqa: E501 __visit_name__ = "MONEY" diff --git a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py index 796a80cf060..43ab2f0beef 100644 --- a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py @@ -31,6 +31,7 @@ :func:`_asyncio.create_async_engine` engine creation function:: from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("sqlite+aiosqlite:///filename") The URL passes through all arguments to the ``pysqlite`` driver, so all @@ -58,12 +59,14 @@ engine = create_async_engine("sqlite+aiosqlite:///myfile.db") + @event.listens_for(engine.sync_engine, "connect") def do_connect(dbapi_connection, connection_record): # disable aiosqlite's emitting of the BEGIN statement entirely. # also stops it from emitting COMMIT before any DDL. dbapi_connection.isolation_level = None + @event.listens_for(engine.sync_engine, "begin") def do_begin(conn): # emit our own BEGIN diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 84bb8937e16..0e4c9694bbf 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -7,7 +7,7 @@ # mypy: ignore-errors -r""" +r''' .. dialect:: sqlite :name: SQLite :normal_support: 3.12+ @@ -69,9 +69,12 @@ when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table construct:: - Table('sometable', metadata, - Column('id', Integer, primary_key=True), - sqlite_autoincrement=True) + Table( + "sometable", + metadata, + Column("id", Integer, primary_key=True), + sqlite_autoincrement=True, + ) Allowing autoincrement behavior SQLAlchemy types other than Integer/INTEGER ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -91,8 +94,13 @@ only using :meth:`.TypeEngine.with_variant`:: table = Table( - "my_table", metadata, - Column("id", BigInteger().with_variant(Integer, "sqlite"), primary_key=True) + "my_table", + metadata, + Column( + "id", + BigInteger().with_variant(Integer, "sqlite"), + primary_key=True, + ), ) Another is to use a subclass of :class:`.BigInteger` that overrides its DDL @@ -101,21 +109,23 @@ from sqlalchemy import BigInteger from sqlalchemy.ext.compiler import compiles + class SLBigInteger(BigInteger): pass - @compiles(SLBigInteger, 'sqlite') + + @compiles(SLBigInteger, "sqlite") def bi_c(element, compiler, **kw): return "INTEGER" + @compiles(SLBigInteger) def bi_c(element, compiler, **kw): return compiler.visit_BIGINT(element, **kw) table = Table( - "my_table", metadata, - Column("id", SLBigInteger(), primary_key=True) + "my_table", metadata, Column("id", SLBigInteger(), primary_key=True) ) .. seealso:: @@ -235,26 +245,24 @@ def bi_c(element, compiler, **kw): # INSERT..RETURNING result = connection.execute( - table.insert(). - values(name='foo'). - returning(table.c.col1, table.c.col2) + table.insert().values(name="foo").returning(table.c.col1, table.c.col2) ) print(result.all()) # UPDATE..RETURNING result = connection.execute( - table.update(). - where(table.c.name=='foo'). - values(name='bar'). - returning(table.c.col1, table.c.col2) + table.update() + .where(table.c.name == "foo") + .values(name="bar") + .returning(table.c.col1, table.c.col2) ) print(result.all()) # DELETE..RETURNING result = connection.execute( - table.delete(). - where(table.c.name=='foo'). - returning(table.c.col1, table.c.col2) + table.delete() + .where(table.c.name == "foo") + .returning(table.c.col1, table.c.col2) ) print(result.all()) @@ -317,6 +325,7 @@ def bi_c(element, compiler, **kw): from sqlalchemy.engine import Engine from sqlalchemy import event + @event.listens_for(Engine, "connect") def set_sqlite_pragma(dbapi_connection, connection_record): cursor = dbapi_connection.cursor() @@ -379,13 +388,16 @@ def set_sqlite_pragma(dbapi_connection, connection_record): that specifies the IGNORE algorithm:: some_table = Table( - 'some_table', metadata, - Column('id', Integer, primary_key=True), - Column('data', Integer), - UniqueConstraint('id', 'data', sqlite_on_conflict='IGNORE') + "some_table", + metadata, + Column("id", Integer, primary_key=True), + Column("data", Integer), + UniqueConstraint("id", "data", sqlite_on_conflict="IGNORE"), ) -The above renders CREATE TABLE DDL as:: +The above renders CREATE TABLE DDL as: + +.. sourcecode:: sql CREATE TABLE some_table ( id INTEGER NOT NULL, @@ -402,13 +414,17 @@ def set_sqlite_pragma(dbapi_connection, connection_record): UNIQUE constraint in the DDL:: some_table = Table( - 'some_table', metadata, - Column('id', Integer, primary_key=True), - Column('data', Integer, unique=True, - sqlite_on_conflict_unique='IGNORE') + "some_table", + metadata, + Column("id", Integer, primary_key=True), + Column( + "data", Integer, unique=True, sqlite_on_conflict_unique="IGNORE" + ), ) -rendering:: +rendering: + +.. sourcecode:: sql CREATE TABLE some_table ( id INTEGER NOT NULL, @@ -421,13 +437,17 @@ def set_sqlite_pragma(dbapi_connection, connection_record): ``sqlite_on_conflict_not_null`` is used:: some_table = Table( - 'some_table', metadata, - Column('id', Integer, primary_key=True), - Column('data', Integer, nullable=False, - sqlite_on_conflict_not_null='FAIL') + "some_table", + metadata, + Column("id", Integer, primary_key=True), + Column( + "data", Integer, nullable=False, sqlite_on_conflict_not_null="FAIL" + ), ) -this renders the column inline ON CONFLICT phrase:: +this renders the column inline ON CONFLICT phrase: + +.. sourcecode:: sql CREATE TABLE some_table ( id INTEGER NOT NULL, @@ -439,13 +459,20 @@ def set_sqlite_pragma(dbapi_connection, connection_record): Similarly, for an inline primary key, use ``sqlite_on_conflict_primary_key``:: some_table = Table( - 'some_table', metadata, - Column('id', Integer, primary_key=True, - sqlite_on_conflict_primary_key='FAIL') + "some_table", + metadata, + Column( + "id", + Integer, + primary_key=True, + sqlite_on_conflict_primary_key="FAIL", + ), ) SQLAlchemy renders the PRIMARY KEY constraint separately, so the conflict -resolution algorithm is applied to the constraint itself:: +resolution algorithm is applied to the constraint itself: + +.. sourcecode:: sql CREATE TABLE some_table ( id INTEGER NOT NULL, @@ -455,7 +482,7 @@ def set_sqlite_pragma(dbapi_connection, connection_record): .. _sqlite_on_conflict_insert: INSERT...ON CONFLICT (Upsert) ------------------------------------ +----------------------------- .. seealso:: This section describes the :term:`DML` version of "ON CONFLICT" for SQLite, which occurs within an INSERT statement. For "ON CONFLICT" as @@ -483,21 +510,18 @@ def set_sqlite_pragma(dbapi_connection, connection_record): >>> from sqlalchemy.dialects.sqlite import insert >>> insert_stmt = insert(my_table).values( - ... id='some_existing_id', - ... data='inserted value') + ... id="some_existing_id", data="inserted value" + ... ) >>> do_update_stmt = insert_stmt.on_conflict_do_update( - ... index_elements=['id'], - ... set_=dict(data='updated value') + ... index_elements=["id"], set_=dict(data="updated value") ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) ON CONFLICT (id) DO UPDATE SET data = ?{stop} - >>> do_nothing_stmt = insert_stmt.on_conflict_do_nothing( - ... index_elements=['id'] - ... ) + >>> do_nothing_stmt = insert_stmt.on_conflict_do_nothing(index_elements=["id"]) >>> print(do_nothing_stmt) {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) @@ -528,13 +552,13 @@ def set_sqlite_pragma(dbapi_connection, connection_record): .. sourcecode:: pycon+sql - >>> stmt = insert(my_table).values(user_email='a@b.com', data='inserted data') + >>> stmt = insert(my_table).values(user_email="a@b.com", data="inserted data") >>> do_update_stmt = stmt.on_conflict_do_update( ... index_elements=[my_table.c.user_email], - ... index_where=my_table.c.user_email.like('%@gmail.com'), - ... set_=dict(data=stmt.excluded.data) - ... ) + ... index_where=my_table.c.user_email.like("%@gmail.com"), + ... set_=dict(data=stmt.excluded.data), + ... ) >>> print(do_update_stmt) {printsql}INSERT INTO my_table (data, user_email) VALUES (?, ?) @@ -554,11 +578,10 @@ def set_sqlite_pragma(dbapi_connection, connection_record): .. sourcecode:: pycon+sql - >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> stmt = insert(my_table).values(id="some_id", data="inserted value") >>> do_update_stmt = stmt.on_conflict_do_update( - ... index_elements=['id'], - ... set_=dict(data='updated value') + ... index_elements=["id"], set_=dict(data="updated value") ... ) >>> print(do_update_stmt) @@ -586,14 +609,12 @@ def set_sqlite_pragma(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> stmt = insert(my_table).values( - ... id='some_id', - ... data='inserted value', - ... author='jlh' + ... id="some_id", data="inserted value", author="jlh" ... ) >>> do_update_stmt = stmt.on_conflict_do_update( - ... index_elements=['id'], - ... set_=dict(data='updated value', author=stmt.excluded.author) + ... index_elements=["id"], + ... set_=dict(data="updated value", author=stmt.excluded.author), ... ) >>> print(do_update_stmt) @@ -610,15 +631,13 @@ def set_sqlite_pragma(dbapi_connection, connection_record): .. sourcecode:: pycon+sql >>> stmt = insert(my_table).values( - ... id='some_id', - ... data='inserted value', - ... author='jlh' + ... id="some_id", data="inserted value", author="jlh" ... ) >>> on_update_stmt = stmt.on_conflict_do_update( - ... index_elements=['id'], - ... set_=dict(data='updated value', author=stmt.excluded.author), - ... where=(my_table.c.status == 2) + ... index_elements=["id"], + ... set_=dict(data="updated value", author=stmt.excluded.author), + ... where=(my_table.c.status == 2), ... ) >>> print(on_update_stmt) {printsql}INSERT INTO my_table (id, data, author) VALUES (?, ?, ?) @@ -635,8 +654,8 @@ def set_sqlite_pragma(dbapi_connection, connection_record): .. sourcecode:: pycon+sql - >>> stmt = insert(my_table).values(id='some_id', data='inserted value') - >>> stmt = stmt.on_conflict_do_nothing(index_elements=['id']) + >>> stmt = insert(my_table).values(id="some_id", data="inserted value") + >>> stmt = stmt.on_conflict_do_nothing(index_elements=["id"]) >>> print(stmt) {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) ON CONFLICT (id) DO NOTHING @@ -647,7 +666,7 @@ def set_sqlite_pragma(dbapi_connection, connection_record): .. sourcecode:: pycon+sql - >>> stmt = insert(my_table).values(id='some_id', data='inserted value') + >>> stmt = insert(my_table).values(id="some_id", data="inserted value") >>> stmt = stmt.on_conflict_do_nothing() >>> print(stmt) {printsql}INSERT INTO my_table (id, data) VALUES (?, ?) ON CONFLICT DO NOTHING @@ -707,11 +726,16 @@ def set_sqlite_pragma(dbapi_connection, connection_record): A partial index, e.g. one which uses a WHERE clause, can be specified with the DDL system using the argument ``sqlite_where``:: - tbl = Table('testtbl', m, Column('data', Integer)) - idx = Index('test_idx1', tbl.c.data, - sqlite_where=and_(tbl.c.data > 5, tbl.c.data < 10)) + tbl = Table("testtbl", m, Column("data", Integer)) + idx = Index( + "test_idx1", + tbl.c.data, + sqlite_where=and_(tbl.c.data > 5, tbl.c.data < 10), + ) + +The index will be rendered at create time as: -The index will be rendered at create time as:: +.. sourcecode:: sql CREATE INDEX test_idx1 ON testtbl (data) WHERE data > 5 AND data < 10 @@ -731,7 +755,11 @@ def set_sqlite_pragma(dbapi_connection, connection_record): import sqlite3 - assert sqlite3.sqlite_version_info < (3, 10, 0), "bug is fixed in this version" + assert sqlite3.sqlite_version_info < ( + 3, + 10, + 0, + ), "bug is fixed in this version" conn = sqlite3.connect(":memory:") cursor = conn.cursor() @@ -741,17 +769,22 @@ def set_sqlite_pragma(dbapi_connection, connection_record): cursor.execute("insert into x (a, b) values (2, 2)") cursor.execute("select x.a, x.b from x") - assert [c[0] for c in cursor.description] == ['a', 'b'] + assert [c[0] for c in cursor.description] == ["a", "b"] - cursor.execute(''' + cursor.execute( + """ select x.a, x.b from x where a=1 union select x.a, x.b from x where a=2 - ''') - assert [c[0] for c in cursor.description] == ['a', 'b'], \ - [c[0] for c in cursor.description] + """ + ) + assert [c[0] for c in cursor.description] == ["a", "b"], [ + c[0] for c in cursor.description + ] -The second assertion fails:: +The second assertion fails: + +.. sourcecode:: text Traceback (most recent call last): File "test.py", line 19, in @@ -779,11 +812,13 @@ def set_sqlite_pragma(dbapi_connection, connection_record): result = conn.exec_driver_sql("select x.a, x.b from x") assert result.keys() == ["a", "b"] - result = conn.exec_driver_sql(''' + result = conn.exec_driver_sql( + """ select x.a, x.b from x where a=1 union select x.a, x.b from x where a=2 - ''') + """ + ) assert result.keys() == ["a", "b"] Note that above, even though SQLAlchemy filters out the dots, *both @@ -807,16 +842,20 @@ def set_sqlite_pragma(dbapi_connection, connection_record): the ``sqlite_raw_colnames`` execution option may be provided, either on a per-:class:`_engine.Connection` basis:: - result = conn.execution_options(sqlite_raw_colnames=True).exec_driver_sql(''' + result = conn.execution_options(sqlite_raw_colnames=True).exec_driver_sql( + """ select x.a, x.b from x where a=1 union select x.a, x.b from x where a=2 - ''') + """ + ) assert result.keys() == ["x.a", "x.b"] or on a per-:class:`_engine.Engine` basis:: - engine = create_engine("sqlite://", execution_options={"sqlite_raw_colnames": True}) + engine = create_engine( + "sqlite://", execution_options={"sqlite_raw_colnames": True} + ) When using the per-:class:`_engine.Engine` execution option, note that **Core and ORM queries that use UNION may not function properly**. @@ -865,7 +904,7 @@ def set_sqlite_pragma(dbapi_connection, connection_record): `SQLite Internal Schema Objects `_ - in the SQLite documentation. -""" # noqa +''' # noqa from __future__ import annotations import datetime @@ -979,7 +1018,9 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime): "%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" - e.g.:: + e.g.: + + .. sourcecode:: text 2021-03-15 12:05:57.105542 @@ -995,9 +1036,11 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime): import re from sqlalchemy.dialects.sqlite import DATETIME - dt = DATETIME(storage_format="%(year)04d/%(month)02d/%(day)02d " - "%(hour)02d:%(minute)02d:%(second)02d", - regexp=r"(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)" + dt = DATETIME( + storage_format=( + "%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(minute)02d:%(second)02d" + ), + regexp=r"(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)", ) :param storage_format: format string which will be applied to the dict @@ -1087,7 +1130,9 @@ class DATE(_DateTimeMixin, sqltypes.Date): "%(year)04d-%(month)02d-%(day)02d" - e.g.:: + e.g.: + + .. sourcecode:: text 2011-03-15 @@ -1105,9 +1150,9 @@ class DATE(_DateTimeMixin, sqltypes.Date): from sqlalchemy.dialects.sqlite import DATE d = DATE( - storage_format="%(month)02d/%(day)02d/%(year)04d", - regexp=re.compile("(?P\d+)/(?P\d+)/(?P\d+)") - ) + storage_format="%(month)02d/%(day)02d/%(year)04d", + regexp=re.compile("(?P\d+)/(?P\d+)/(?P\d+)"), + ) :param storage_format: format string which will be applied to the dict with keys year, month, and day. @@ -1161,7 +1206,9 @@ class TIME(_DateTimeMixin, sqltypes.Time): "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d" - e.g.:: + e.g.: + + .. sourcecode:: text 12:05:57.10558 @@ -1177,9 +1224,9 @@ class TIME(_DateTimeMixin, sqltypes.Time): import re from sqlalchemy.dialects.sqlite import TIME - t = TIME(storage_format="%(hour)02d-%(minute)02d-" - "%(second)02d-%(microsecond)06d", - regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?") + t = TIME( + storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d", + regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?"), ) :param storage_format: format string which will be applied to the dict diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py index 388a4dff817..58471ac90ec 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -39,7 +39,7 @@ e = create_engine( "sqlite+pysqlcipher://:password@/dbname.db", - module=sqlcipher_compatible_driver + module=sqlcipher_compatible_driver, ) These drivers make use of the SQLCipher engine. This system essentially @@ -55,12 +55,12 @@ of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the "password" field is now accepted, which should contain a passphrase:: - e = create_engine('sqlite+pysqlcipher://:testing@/foo.db') + e = create_engine("sqlite+pysqlcipher://:testing@/foo.db") For an absolute file path, two leading slashes should be used for the database name:: - e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db') + e = create_engine("sqlite+pysqlcipher://:testing@//path/to/foo.db") A selection of additional encryption-related pragmas supported by SQLCipher as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed @@ -68,7 +68,9 @@ new connection. Currently, ``cipher``, ``kdf_iter`` ``cipher_page_size`` and ``cipher_use_hmac`` are supported:: - e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000') + e = create_engine( + "sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000" + ) .. warning:: Previous versions of sqlalchemy did not take into consideration the encryption-related pragmas passed in the url string, that were silently diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index 69a902c32ab..25e6da0f521 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -28,7 +28,9 @@ --------------- The file specification for the SQLite database is taken as the "database" -portion of the URL. Note that the format of a SQLAlchemy url is:: +portion of the URL. Note that the format of a SQLAlchemy url is: + +.. sourcecode:: text driver://user:pass@host/database @@ -37,28 +39,28 @@ looks like:: # relative path - e = create_engine('sqlite:///path/to/database.db') + e = create_engine("sqlite:///path/to/database.db") An absolute path, which is denoted by starting with a slash, means you need **four** slashes:: # absolute path - e = create_engine('sqlite:////path/to/database.db') + e = create_engine("sqlite:////path/to/database.db") To use a Windows path, regular drive specifications and backslashes can be used. Double backslashes are probably needed:: # absolute path on Windows - e = create_engine('sqlite:///C:\\path\\to\\database.db') + e = create_engine("sqlite:///C:\\path\\to\\database.db") To use sqlite ``:memory:`` database specify it as the filename using ``sqlite:///:memory:``. It's also the default if no filepath is present, specifying only ``sqlite://`` and nothing else:: # in-memory database (note three slashes) - e = create_engine('sqlite:///:memory:') + e = create_engine("sqlite:///:memory:") # also in-memory database - e2 = create_engine('sqlite://') + e2 = create_engine("sqlite://") .. _pysqlite_uri_connections: @@ -98,7 +100,9 @@ sqlite3.connect( "file:path/to/database?mode=ro&nolock=1", - check_same_thread=True, timeout=10, uri=True + check_same_thread=True, + timeout=10, + uri=True, ) Regarding future parameters added to either the Python or native drivers. new @@ -144,8 +148,11 @@ def regexp(a, b): return re.search(a, b) is not None + sqlite_connection.create_function( - "regexp", 2, regexp, + "regexp", + 2, + regexp, ) There is currently no support for regular expression flags as a separate @@ -186,10 +193,12 @@ def regexp(a, b): nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES can be forced if one configures "native_datetime=True" on create_engine():: - engine = create_engine('sqlite://', - connect_args={'detect_types': - sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, - native_datetime=True + engine = create_engine( + "sqlite://", + connect_args={ + "detect_types": sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES + }, + native_datetime=True, ) With this flag enabled, the DATE and TIMESTAMP types (but note - not the @@ -244,6 +253,7 @@ def regexp(a, b): parameter:: from sqlalchemy import NullPool + engine = create_engine("sqlite:///myfile.db", poolclass=NullPool) It's been observed that the :class:`.NullPool` implementation incurs an @@ -263,9 +273,12 @@ def regexp(a, b): as ``False``:: from sqlalchemy.pool import StaticPool - engine = create_engine('sqlite://', - connect_args={'check_same_thread':False}, - poolclass=StaticPool) + + engine = create_engine( + "sqlite://", + connect_args={"check_same_thread": False}, + poolclass=StaticPool, + ) Note that using a ``:memory:`` database in multiple threads requires a recent version of SQLite. @@ -284,14 +297,14 @@ def regexp(a, b): # maintain the same connection per thread from sqlalchemy.pool import SingletonThreadPool - engine = create_engine('sqlite:///mydb.db', - poolclass=SingletonThreadPool) + + engine = create_engine("sqlite:///mydb.db", poolclass=SingletonThreadPool) # maintain the same connection across all threads from sqlalchemy.pool import StaticPool - engine = create_engine('sqlite:///mydb.db', - poolclass=StaticPool) + + engine = create_engine("sqlite:///mydb.db", poolclass=StaticPool) Note that :class:`.SingletonThreadPool` should be configured for the number of threads that are to be used; beyond that number, connections will be @@ -320,13 +333,14 @@ def regexp(a, b): from sqlalchemy import String from sqlalchemy import TypeDecorator + class MixedBinary(TypeDecorator): impl = String cache_ok = True def process_result_value(self, value, dialect): if isinstance(value, str): - value = bytes(value, 'utf-8') + value = bytes(value, "utf-8") elif value is not None: value = bytes(value) @@ -367,12 +381,14 @@ def process_result_value(self, value, dialect): engine = create_engine("sqlite:///myfile.db") + @event.listens_for(engine, "connect") def do_connect(dbapi_connection, connection_record): # disable pysqlite's emitting of the BEGIN statement entirely. # also stops it from emitting COMMIT before any DDL. dbapi_connection.isolation_level = None + @event.listens_for(engine, "begin") def do_begin(conn): # emit our own BEGIN @@ -442,7 +458,6 @@ def connect(conn, rec): with engine.connect() as conn: print(conn.scalar(text("SELECT UDF()"))) - """ # noqa import math diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 4fd4e30896e..f9853ee4892 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -800,7 +800,6 @@ def begin(self) -> RootTransaction: with conn.begin() as trans: conn.execute(table.insert(), {"username": "sandy"}) - The returned object is an instance of :class:`_engine.RootTransaction`. This object represents the "scope" of the transaction, which completes when either the :meth:`_engine.Transaction.rollback` @@ -906,7 +905,7 @@ def begin_nested(self) -> NestedTransaction: trans.rollback() # rollback to savepoint # outer transaction continues - connection.execute( ... ) + connection.execute(...) If :meth:`_engine.Connection.begin_nested` is called without first calling :meth:`_engine.Connection.begin` or @@ -916,11 +915,11 @@ def begin_nested(self) -> NestedTransaction: with engine.connect() as connection: # begin() wasn't called - with connection.begin_nested(): will auto-"begin()" first - connection.execute( ... ) + with connection.begin_nested(): # will auto-"begin()" first + connection.execute(...) # savepoint is released - connection.execute( ... ) + connection.execute(...) # explicitly commit outer transaction connection.commit() @@ -1737,21 +1736,20 @@ def exec_driver_sql( conn.exec_driver_sql( "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)", - [{"id":1, "value":"v1"}, {"id":2, "value":"v2"}] + [{"id": 1, "value": "v1"}, {"id": 2, "value": "v2"}], ) Single dictionary:: conn.exec_driver_sql( "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)", - dict(id=1, value="v1") + dict(id=1, value="v1"), ) Single tuple:: conn.exec_driver_sql( - "INSERT INTO table (id, value) VALUES (?, ?)", - (1, 'v1') + "INSERT INTO table (id, value) VALUES (?, ?)", (1, "v1") ) .. note:: The :meth:`_engine.Connection.exec_driver_sql` method does @@ -2513,6 +2511,7 @@ class Transaction(TransactionalContext): :class:`_engine.Connection`:: from sqlalchemy import create_engine + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test") connection = engine.connect() trans = connection.begin() @@ -3090,10 +3089,10 @@ def execution_options(self, **opt: Any) -> OptionEngine: shards = {"default": "base", "shard_1": "db1", "shard_2": "db2"} + @event.listens_for(Engine, "before_cursor_execute") - def _switch_shard(conn, cursor, stmt, - params, context, executemany): - shard_id = conn.get_execution_options().get('shard_id', "default") + def _switch_shard(conn, cursor, stmt, params, context, executemany): + shard_id = conn.get_execution_options().get("shard_id", "default") current_shard = conn.info.get("current_shard", None) if current_shard != shard_id: @@ -3219,9 +3218,7 @@ def begin(self) -> Iterator[Connection]: E.g.:: with engine.begin() as conn: - conn.execute( - text("insert into table (x, y, z) values (1, 2, 3)") - ) + conn.execute(text("insert into table (x, y, z) values (1, 2, 3)")) conn.execute(text("my_special_procedure(5)")) Upon successful operation, the :class:`.Transaction` @@ -3237,7 +3234,7 @@ def begin(self) -> Iterator[Connection]: :meth:`_engine.Connection.begin` - start a :class:`.Transaction` for a particular :class:`_engine.Connection`. - """ + """ # noqa: E501 with self.connect() as conn: with conn.begin(): yield conn diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py index 74a3cf801e3..85e0b12474e 100644 --- a/lib/sqlalchemy/engine/create.py +++ b/lib/sqlalchemy/engine/create.py @@ -133,8 +133,11 @@ def create_engine(url: Union[str, _url.URL], **kwargs: Any) -> Engine: and its underlying :class:`.Dialect` and :class:`_pool.Pool` constructs:: - engine = create_engine("mysql+mysqldb://scott:tiger@hostname/dbname", - pool_recycle=3600, echo=True) + engine = create_engine( + "mysql+mysqldb://scott:tiger@hostname/dbname", + pool_recycle=3600, + echo=True, + ) The string form of the URL is ``dialect[+driver]://user:password@host/dbname[?key=value..]``, where diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index b83cb451543..6eb64da839e 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -1162,7 +1162,7 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy): result = conn.execution_options( stream_results=True, max_row_buffer=50 - ).execute(text("select * from table")) + ).execute(text("select * from table")) .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows. @@ -1756,11 +1756,9 @@ def splice_horizontally(self, other): r1 = connection.execute( users.insert().returning( - users.c.user_name, - users.c.user_id, - sort_by_parameter_order=True + users.c.user_name, users.c.user_id, sort_by_parameter_order=True ), - user_values + user_values, ) r2 = connection.execute( @@ -1768,19 +1766,16 @@ def splice_horizontally(self, other): addresses.c.address_id, addresses.c.address, addresses.c.user_id, - sort_by_parameter_order=True + sort_by_parameter_order=True, ), - address_values + address_values, ) rows = r1.splice_horizontally(r2).all() - assert ( - rows == - [ - ("john", 1, 1, "foo@bar.com", 1), - ("jack", 2, 2, "bar@bat.com", 2), - ] - ) + assert rows == [ + ("john", 1, 1, "foo@bar.com", 1), + ("jack", 2, 2, "bar@bat.com", 2), + ] .. versionadded:: 2.0 @@ -1789,7 +1784,7 @@ def splice_horizontally(self, other): :meth:`.CursorResult.splice_vertically` - """ + """ # noqa: E501 clone = self._generate() total_rows = [ diff --git a/lib/sqlalchemy/engine/events.py b/lib/sqlalchemy/engine/events.py index cbc08063d55..e1e9b7d11a8 100644 --- a/lib/sqlalchemy/engine/events.py +++ b/lib/sqlalchemy/engine/events.py @@ -54,19 +54,24 @@ class or instance, such as an :class:`_engine.Engine`, e.g.:: from sqlalchemy import event, create_engine - def before_cursor_execute(conn, cursor, statement, parameters, context, - executemany): + + def before_cursor_execute( + conn, cursor, statement, parameters, context, executemany + ): log.info("Received statement: %s", statement) - engine = create_engine('postgresql+psycopg2://scott:tiger@localhost/test') + + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test") event.listen(engine, "before_cursor_execute", before_cursor_execute) or with a specific :class:`_engine.Connection`:: with engine.begin() as conn: - @event.listens_for(conn, 'before_cursor_execute') - def before_cursor_execute(conn, cursor, statement, parameters, - context, executemany): + + @event.listens_for(conn, "before_cursor_execute") + def before_cursor_execute( + conn, cursor, statement, parameters, context, executemany + ): log.info("Received statement: %s", statement) When the methods are called with a `statement` parameter, such as in @@ -84,9 +89,11 @@ def before_cursor_execute(conn, cursor, statement, parameters, from sqlalchemy.engine import Engine from sqlalchemy import event + @event.listens_for(Engine, "before_cursor_execute", retval=True) - def comment_sql_calls(conn, cursor, statement, parameters, - context, executemany): + def comment_sql_calls( + conn, cursor, statement, parameters, context, executemany + ): statement = statement + " -- some comment" return statement, parameters @@ -316,8 +323,9 @@ def before_cursor_execute( returned as a two-tuple in this case:: @event.listens_for(Engine, "before_cursor_execute", retval=True) - def before_cursor_execute(conn, cursor, statement, - parameters, context, executemany): + def before_cursor_execute( + conn, cursor, statement, parameters, context, executemany + ): # do something with statement, parameters return statement, parameters @@ -766,9 +774,9 @@ def handle_error( @event.listens_for(Engine, "handle_error") def handle_exception(context): - if isinstance(context.original_exception, - psycopg2.OperationalError) and \ - "failed" in str(context.original_exception): + if isinstance( + context.original_exception, psycopg2.OperationalError + ) and "failed" in str(context.original_exception): raise MySpecialException("failed operation") .. warning:: Because the @@ -791,10 +799,13 @@ def handle_exception(context): @event.listens_for(Engine, "handle_error", retval=True) def handle_exception(context): - if context.chained_exception is not None and \ - "special" in context.chained_exception.message: - return MySpecialException("failed", - cause=context.chained_exception) + if ( + context.chained_exception is not None + and "special" in context.chained_exception.message + ): + return MySpecialException( + "failed", cause=context.chained_exception + ) Handlers that return ``None`` may be used within the chain; when a handler returns ``None``, the previous exception instance, @@ -836,7 +847,8 @@ def do_connect( e = create_engine("postgresql+psycopg2://user@host/dbname") - @event.listens_for(e, 'do_connect') + + @event.listens_for(e, "do_connect") def receive_do_connect(dialect, conn_rec, cargs, cparams): cparams["password"] = "some_password" @@ -845,7 +857,8 @@ def receive_do_connect(dialect, conn_rec, cargs, cparams): e = create_engine("postgresql+psycopg2://user@host/dbname") - @event.listens_for(e, 'do_connect') + + @event.listens_for(e, "do_connect") def receive_do_connect(dialect, conn_rec, cargs, cparams): return psycopg2.connect(*cargs, **cparams) diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 861c5deae3f..a14c0708031 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -1060,11 +1060,7 @@ def loaded_dbapi(self) -> ModuleType: To implement, establish as a series of tuples, as in:: construct_arguments = [ - (schema.Index, { - "using": False, - "where": None, - "ops": None - }) + (schema.Index, {"using": False, "where": None, "ops": None}), ] If the above construct is established on the PostgreSQL dialect, @@ -2685,11 +2681,14 @@ class CreateEnginePlugin: from sqlalchemy.engine import CreateEnginePlugin from sqlalchemy import event + class LogCursorEventsPlugin(CreateEnginePlugin): def __init__(self, url, kwargs): # consume the parameter "log_cursor_logging_name" from the # URL query - logging_name = url.query.get("log_cursor_logging_name", "log_cursor") + logging_name = url.query.get( + "log_cursor_logging_name", "log_cursor" + ) self.log = logging.getLogger(logging_name) @@ -2701,7 +2700,6 @@ def engine_created(self, engine): "attach an event listener after the new Engine is constructed" event.listen(engine, "before_cursor_execute", self._log_event) - def _log_event( self, conn, @@ -2709,19 +2707,19 @@ def _log_event( statement, parameters, context, - executemany): + executemany, + ): self.log.info("Plugin logged cursor event: %s", statement) - - Plugins are registered using entry points in a similar way as that of dialects:: - entry_points={ - 'sqlalchemy.plugins': [ - 'log_cursor_plugin = myapp.plugins:LogCursorEventsPlugin' + entry_points = { + "sqlalchemy.plugins": [ + "log_cursor_plugin = myapp.plugins:LogCursorEventsPlugin" ] + } A plugin that uses the above names would be invoked from a database URL as in:: @@ -2738,15 +2736,16 @@ def _log_event( in the URL:: engine = create_engine( - "mysql+pymysql://scott:tiger@localhost/test?" - "plugin=plugin_one&plugin=plugin_twp&plugin=plugin_three") + "mysql+pymysql://scott:tiger@localhost/test?" + "plugin=plugin_one&plugin=plugin_twp&plugin=plugin_three" + ) The plugin names may also be passed directly to :func:`_sa.create_engine` using the :paramref:`_sa.create_engine.plugins` argument:: engine = create_engine( - "mysql+pymysql://scott:tiger@localhost/test", - plugins=["myplugin"]) + "mysql+pymysql://scott:tiger@localhost/test", plugins=["myplugin"] + ) .. versionadded:: 1.2.3 plugin names can also be specified to :func:`_sa.create_engine` as a list @@ -2768,9 +2767,9 @@ def _log_event( class MyPlugin(CreateEnginePlugin): def __init__(self, url, kwargs): - self.my_argument_one = url.query['my_argument_one'] - self.my_argument_two = url.query['my_argument_two'] - self.my_argument_three = kwargs.pop('my_argument_three', None) + self.my_argument_one = url.query["my_argument_one"] + self.my_argument_two = url.query["my_argument_two"] + self.my_argument_three = kwargs.pop("my_argument_three", None) def update_url(self, url): return url.difference_update_query( @@ -2783,9 +2782,9 @@ def update_url(self, url): from sqlalchemy import create_engine engine = create_engine( - "mysql+pymysql://scott:tiger@localhost/test?" - "plugin=myplugin&my_argument_one=foo&my_argument_two=bar", - my_argument_three='bat' + "mysql+pymysql://scott:tiger@localhost/test?" + "plugin=myplugin&my_argument_one=foo&my_argument_two=bar", + my_argument_three="bat", ) .. versionchanged:: 1.4 @@ -2804,15 +2803,15 @@ class MyPlugin(CreateEnginePlugin): def __init__(self, url, kwargs): if hasattr(CreateEnginePlugin, "update_url"): # detect the 1.4 API - self.my_argument_one = url.query['my_argument_one'] - self.my_argument_two = url.query['my_argument_two'] + self.my_argument_one = url.query["my_argument_one"] + self.my_argument_two = url.query["my_argument_two"] else: # detect the 1.3 and earlier API - mutate the # URL directly - self.my_argument_one = url.query.pop('my_argument_one') - self.my_argument_two = url.query.pop('my_argument_two') + self.my_argument_one = url.query.pop("my_argument_one") + self.my_argument_two = url.query.pop("my_argument_two") - self.my_argument_three = kwargs.pop('my_argument_three', None) + self.my_argument_three = kwargs.pop("my_argument_three", None) def update_url(self, url): # this method is only called in the 1.4 version @@ -3383,11 +3382,14 @@ def run_async(self, fn: Callable[[Any], Awaitable[_T]]) -> _T: engine = create_async_engine(...) + @event.listens_for(engine.sync_engine, "connect") - def register_custom_types(dbapi_connection, ...): + def register_custom_types( + dbapi_connection, # ... + ): dbapi_connection.run_async( lambda connection: connection.set_type_codec( - 'MyCustomType', encoder, decoder, ... + "MyCustomType", encoder, decoder, ... ) ) diff --git a/lib/sqlalchemy/engine/mock.py b/lib/sqlalchemy/engine/mock.py index c9fa5eb31a7..fc59521cd26 100644 --- a/lib/sqlalchemy/engine/mock.py +++ b/lib/sqlalchemy/engine/mock.py @@ -90,10 +90,12 @@ def create_mock_engine( from sqlalchemy import create_mock_engine + def dump(sql, *multiparams, **params): print(sql.compile(dialect=engine.dialect)) - engine = create_mock_engine('postgresql+psycopg2://', dump) + + engine = create_mock_engine("postgresql+psycopg2://", dump) metadata.create_all(engine, checkfirst=False) :param url: A string URL which typically needs to contain only the diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 7e2586e1e18..1e03b9f2fec 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -193,7 +193,8 @@ class Inspector(inspection.Inspectable["Inspector"]): or a :class:`_engine.Connection`:: from sqlalchemy import inspect, create_engine - engine = create_engine('...') + + engine = create_engine("...") insp = inspect(engine) Where above, the :class:`~sqlalchemy.engine.interfaces.Dialect` associated @@ -1492,9 +1493,9 @@ def reflect_table( from sqlalchemy import create_engine, MetaData, Table from sqlalchemy import inspect - engine = create_engine('...') + engine = create_engine("...") meta = MetaData() - user_table = Table('user', meta) + user_table = Table("user", meta) insp = inspect(engine) insp.reflect_table(user_table, None) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 5ff41f1cbda..60a8b6446f9 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1099,17 +1099,15 @@ def columns(self, *col_expressions: _KeyIndexType) -> Self: statement = select(table.c.x, table.c.y, table.c.z) result = connection.execute(statement) - for z, y in result.columns('z', 'y'): - # ... - + for z, y in result.columns("z", "y"): + ... Example of using the column objects from the statement itself:: for z, y in result.columns( - statement.selected_columns.c.z, - statement.selected_columns.c.y + statement.selected_columns.c.z, statement.selected_columns.c.y ): - # ... + ... .. versionadded:: 1.4 diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py index bcaffee44f2..de544712b74 100644 --- a/lib/sqlalchemy/engine/row.py +++ b/lib/sqlalchemy/engine/row.py @@ -340,12 +340,11 @@ class RowMapping(BaseRow, typing.Mapping["_KeyType", Any]): as iteration of keys, values, and items:: for row in result: - if 'a' in row._mapping: - print("Column 'a': %s" % row._mapping['a']) + if "a" in row._mapping: + print("Column 'a': %s" % row._mapping["a"]) print("Column b: %s" % row._mapping[table.c.b]) - .. versionadded:: 1.4 The :class:`.RowMapping` object replaces the mapping-like access previously provided by a database result row, which now seeks to behave mostly like a named tuple. diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 1eeb73a2368..f87e3671cc4 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -122,7 +122,9 @@ class URL(NamedTuple): for keys and either strings or tuples of strings for values, e.g.:: >>> from sqlalchemy.engine import make_url - >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt") + >>> url = make_url( + ... "postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt" + ... ) >>> url.query immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'}) @@ -371,7 +373,9 @@ def update_query_string( >>> from sqlalchemy.engine import make_url >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname") - >>> url = url.update_query_string("alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt") + >>> url = url.update_query_string( + ... "alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt" + ... ) >>> str(url) 'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt' @@ -407,7 +411,13 @@ def update_query_pairs( >>> from sqlalchemy.engine import make_url >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname") - >>> url = url.update_query_pairs([("alt_host", "host1"), ("alt_host", "host2"), ("ssl_cipher", "/path/to/crt")]) + >>> url = url.update_query_pairs( + ... [ + ... ("alt_host", "host1"), + ... ("alt_host", "host2"), + ... ("ssl_cipher", "/path/to/crt"), + ... ] + ... ) >>> str(url) 'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt' @@ -489,7 +499,9 @@ def update_query_dict( >>> from sqlalchemy.engine import make_url >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname") - >>> url = url.update_query_dict({"alt_host": ["host1", "host2"], "ssl_cipher": "/path/to/crt"}) + >>> url = url.update_query_dict( + ... {"alt_host": ["host1", "host2"], "ssl_cipher": "/path/to/crt"} + ... ) >>> str(url) 'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt' @@ -527,14 +539,14 @@ def difference_update_query(self, names: Iterable[str]) -> URL: E.g.:: - url = url.difference_update_query(['foo', 'bar']) + url = url.difference_update_query(["foo", "bar"]) Equivalent to using :meth:`_engine.URL.set` as follows:: url = url.set( query={ key: url.query[key] - for key in set(url.query).difference(['foo', 'bar']) + for key in set(url.query).difference(["foo", "bar"]) } ) @@ -583,7 +595,9 @@ def normalized_query(self) -> Mapping[str, Sequence[str]]: >>> from sqlalchemy.engine import make_url - >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt") + >>> url = make_url( + ... "postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt" + ... ) >>> url.query immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'}) >>> url.normalized_query diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py index 230ec698667..f528d74f69f 100644 --- a/lib/sqlalchemy/event/api.py +++ b/lib/sqlalchemy/event/api.py @@ -51,15 +51,14 @@ def listen( from sqlalchemy import event from sqlalchemy.schema import UniqueConstraint + def unique_constraint_name(const, table): - const.name = "uq_%s_%s" % ( - table.name, - list(const.columns)[0].name - ) + const.name = "uq_%s_%s" % (table.name, list(const.columns)[0].name) + + event.listen( - UniqueConstraint, - "after_parent_attach", - unique_constraint_name) + UniqueConstraint, "after_parent_attach", unique_constraint_name + ) :param bool insert: The default behavior for event handlers is to append the decorated user defined function to an internal list of registered @@ -139,12 +138,10 @@ def listens_for( from sqlalchemy import event from sqlalchemy.schema import UniqueConstraint + @event.listens_for(UniqueConstraint, "after_parent_attach") def unique_constraint_name(const, table): - const.name = "uq_%s_%s" % ( - table.name, - list(const.columns)[0].name - ) + const.name = "uq_%s_%s" % (table.name, list(const.columns)[0].name) A given function can also be invoked for only the first invocation of the event using the ``once`` argument:: @@ -153,7 +150,6 @@ def unique_constraint_name(const, table): def on_config(): do_config() - .. warning:: The ``once`` argument does not imply automatic de-registration of the listener function after it has been invoked a first time; a listener entry will remain associated with the target object. @@ -189,6 +185,7 @@ def remove(target: Any, identifier: str, fn: Callable[..., Any]) -> None: def my_listener_function(*arg): pass + # ... it's removed like this event.remove(SomeMappedClass, "before_insert", my_listener_function) diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 7d7eff3606c..ced87df4b2d 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -432,14 +432,16 @@ class DontWrapMixin: from sqlalchemy.exc import DontWrapMixin + class MyCustomException(Exception, DontWrapMixin): pass + class MySpecialType(TypeDecorator): impl = String def process_bind_param(self, value, dialect): - if value == 'invalid': + if value == "invalid": raise MyCustomException("invalid!") """ diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 99cb266e324..a68c298542e 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -458,7 +458,7 @@ def for_class( class User(Base): # ... - keywords = association_proxy('kws', 'keyword') + keywords = association_proxy("kws", "keyword") If we access this :class:`.AssociationProxy` from :attr:`_orm.Mapper.all_orm_descriptors`, and we want to view the @@ -778,9 +778,9 @@ def attr(self) -> Tuple[SQLORMOperations[Any], SQLORMOperations[_T]]: :attr:`.AssociationProxyInstance.remote_attr` attributes separately:: stmt = ( - select(Parent). - join(Parent.proxied.local_attr). - join(Parent.proxied.remote_attr) + select(Parent) + .join(Parent.proxied.local_attr) + .join(Parent.proxied.remote_attr) ) A future release may seek to provide a more succinct join pattern diff --git a/lib/sqlalchemy/ext/asyncio/base.py b/lib/sqlalchemy/ext/asyncio/base.py index 9899364d1ff..e534424c0f4 100644 --- a/lib/sqlalchemy/ext/asyncio/base.py +++ b/lib/sqlalchemy/ext/asyncio/base.py @@ -224,7 +224,9 @@ def asyncstartablecontext( ``@contextlib.asynccontextmanager`` supports, and the usage pattern is different as well. - Typical usage:: + Typical usage: + + .. sourcecode:: text @asyncstartablecontext async def some_async_generator(): diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 8fc8e96db06..4f476483827 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -198,6 +198,7 @@ class AsyncConnection( method of :class:`_asyncio.AsyncEngine`:: from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("postgresql+asyncpg://user:pass@host/dbname") async with engine.connect() as conn: @@ -544,7 +545,7 @@ async def stream( E.g.:: - result = await conn.stream(stmt): + result = await conn.stream(stmt) async for row in result: print(f"{row}") @@ -821,7 +822,7 @@ async def run_sync( *arg: _P.args, **kw: _P.kwargs, ) -> _T: - """Invoke the given synchronous (i.e. not async) callable, + '''Invoke the given synchronous (i.e. not async) callable, passing a synchronous-style :class:`_engine.Connection` as the first argument. @@ -831,26 +832,26 @@ async def run_sync( E.g.:: def do_something_with_core(conn: Connection, arg1: int, arg2: str) -> str: - '''A synchronous function that does not require awaiting + """A synchronous function that does not require awaiting :param conn: a Core SQLAlchemy Connection, used synchronously :return: an optional return value is supported - ''' - conn.execute( - some_table.insert().values(int_col=arg1, str_col=arg2) - ) + """ + conn.execute(some_table.insert().values(int_col=arg1, str_col=arg2)) return "success" async def do_something_async(async_engine: AsyncEngine) -> None: - '''an async function that uses awaiting''' + """an async function that uses awaiting""" async with async_engine.begin() as async_conn: # run do_something_with_core() with a sync-style # Connection, proxied into an awaitable - return_code = await async_conn.run_sync(do_something_with_core, 5, "strval") + return_code = await async_conn.run_sync( + do_something_with_core, 5, "strval" + ) print(return_code) This method maintains the asyncio event loop all the way through @@ -881,7 +882,7 @@ async def do_something_async(async_engine: AsyncEngine) -> None: :ref:`session_run_sync` - """ # noqa: E501 + ''' # noqa: E501 return await greenlet_spawn( fn, self._proxied, *arg, _require_await=False, **kw @@ -1000,6 +1001,7 @@ class AsyncEngine(ProxyComparable[Engine], AsyncConnectable): :func:`_asyncio.create_async_engine` function:: from sqlalchemy.ext.asyncio import create_async_engine + engine = create_async_engine("postgresql+asyncpg://user:pass@host/dbname") .. versionadded:: 1.4 @@ -1056,7 +1058,6 @@ async def begin(self) -> AsyncIterator[AsyncConnection]: ) await conn.execute(text("my_special_procedure(5)")) - """ conn = self.connect() diff --git a/lib/sqlalchemy/ext/asyncio/scoping.py b/lib/sqlalchemy/ext/asyncio/scoping.py index 162f34eabfc..35e68e522f2 100644 --- a/lib/sqlalchemy/ext/asyncio/scoping.py +++ b/lib/sqlalchemy/ext/asyncio/scoping.py @@ -364,7 +364,7 @@ def begin(self) -> AsyncSessionTransaction: object is entered:: async with async_session.begin(): - # .. ORM transaction is begun + ... # ORM transaction is begun Note that database IO will not normally occur when the session-level transaction is begun, as database transactions begin on an @@ -808,28 +808,28 @@ def get_bind( # construct async engines w/ async drivers engines = { - 'leader':create_async_engine("sqlite+aiosqlite:///leader.db"), - 'other':create_async_engine("sqlite+aiosqlite:///other.db"), - 'follower1':create_async_engine("sqlite+aiosqlite:///follower1.db"), - 'follower2':create_async_engine("sqlite+aiosqlite:///follower2.db"), + "leader": create_async_engine("sqlite+aiosqlite:///leader.db"), + "other": create_async_engine("sqlite+aiosqlite:///other.db"), + "follower1": create_async_engine("sqlite+aiosqlite:///follower1.db"), + "follower2": create_async_engine("sqlite+aiosqlite:///follower2.db"), } + class RoutingSession(Session): def get_bind(self, mapper=None, clause=None, **kw): # within get_bind(), return sync engines if mapper and issubclass(mapper.class_, MyOtherClass): - return engines['other'].sync_engine + return engines["other"].sync_engine elif self._flushing or isinstance(clause, (Update, Delete)): - return engines['leader'].sync_engine + return engines["leader"].sync_engine else: return engines[ - random.choice(['follower1','follower2']) + random.choice(["follower1", "follower2"]) ].sync_engine + # apply to AsyncSession using sync_session_class - AsyncSessionMaker = async_sessionmaker( - sync_session_class=RoutingSession - ) + AsyncSessionMaker = async_sessionmaker(sync_session_class=RoutingSession) The :meth:`_orm.Session.get_bind` method is called in a non-asyncio, implicitly non-blocking context in the same manner as ORM event hooks diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index c2a47d78025..550c977ab9b 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -341,7 +341,7 @@ async def run_sync( *arg: _P.args, **kw: _P.kwargs, ) -> _T: - """Invoke the given synchronous (i.e. not async) callable, + '''Invoke the given synchronous (i.e. not async) callable, passing a synchronous-style :class:`_orm.Session` as the first argument. @@ -351,25 +351,27 @@ async def run_sync( E.g.:: def some_business_method(session: Session, param: str) -> str: - '''A synchronous function that does not require awaiting + """A synchronous function that does not require awaiting :param session: a SQLAlchemy Session, used synchronously :return: an optional return value is supported - ''' + """ session.add(MyObject(param=param)) session.flush() return "success" async def do_something_async(async_engine: AsyncEngine) -> None: - '''an async function that uses awaiting''' + """an async function that uses awaiting""" with AsyncSession(async_engine) as async_session: # run some_business_method() with a sync-style # Session, proxied into an awaitable - return_code = await async_session.run_sync(some_business_method, param="param1") + return_code = await async_session.run_sync( + some_business_method, param="param1" + ) print(return_code) This method maintains the asyncio event loop all the way through @@ -391,7 +393,7 @@ async def do_something_async(async_engine: AsyncEngine) -> None: :meth:`.AsyncConnection.run_sync` :ref:`session_run_sync` - """ # noqa: E501 + ''' # noqa: E501 return await greenlet_spawn( fn, self.sync_session, *arg, _require_await=False, **kw @@ -877,28 +879,28 @@ def get_bind( # construct async engines w/ async drivers engines = { - 'leader':create_async_engine("sqlite+aiosqlite:///leader.db"), - 'other':create_async_engine("sqlite+aiosqlite:///other.db"), - 'follower1':create_async_engine("sqlite+aiosqlite:///follower1.db"), - 'follower2':create_async_engine("sqlite+aiosqlite:///follower2.db"), + "leader": create_async_engine("sqlite+aiosqlite:///leader.db"), + "other": create_async_engine("sqlite+aiosqlite:///other.db"), + "follower1": create_async_engine("sqlite+aiosqlite:///follower1.db"), + "follower2": create_async_engine("sqlite+aiosqlite:///follower2.db"), } + class RoutingSession(Session): def get_bind(self, mapper=None, clause=None, **kw): # within get_bind(), return sync engines if mapper and issubclass(mapper.class_, MyOtherClass): - return engines['other'].sync_engine + return engines["other"].sync_engine elif self._flushing or isinstance(clause, (Update, Delete)): - return engines['leader'].sync_engine + return engines["leader"].sync_engine else: return engines[ - random.choice(['follower1','follower2']) + random.choice(["follower1", "follower2"]) ].sync_engine + # apply to AsyncSession using sync_session_class - AsyncSessionMaker = async_sessionmaker( - sync_session_class=RoutingSession - ) + AsyncSessionMaker = async_sessionmaker(sync_session_class=RoutingSession) The :meth:`_orm.Session.get_bind` method is called in a non-asyncio, implicitly non-blocking context in the same manner as ORM event hooks @@ -954,7 +956,7 @@ def begin(self) -> AsyncSessionTransaction: object is entered:: async with async_session.begin(): - # .. ORM transaction is begun + ... # ORM transaction is begun Note that database IO will not normally occur when the session-level transaction is begun, as database transactions begin on an @@ -1631,16 +1633,22 @@ class async_sessionmaker(Generic[_AS]): from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import async_sessionmaker - async def run_some_sql(async_session: async_sessionmaker[AsyncSession]) -> None: + + async def run_some_sql( + async_session: async_sessionmaker[AsyncSession], + ) -> None: async with async_session() as session: session.add(SomeObject(data="object")) session.add(SomeOtherObject(name="other object")) await session.commit() + async def main() -> None: # an AsyncEngine, which the AsyncSession will use for connection # resources - engine = create_async_engine('postgresql+asyncpg://scott:tiger@localhost/') + engine = create_async_engine( + "postgresql+asyncpg://scott:tiger@localhost/" + ) # create a reusable factory for new AsyncSession instances async_session = async_sessionmaker(engine) @@ -1739,7 +1747,6 @@ async def main(): # commits transaction, closes session - """ session = self() @@ -1772,7 +1779,7 @@ def configure(self, **new_kw: Any) -> None: AsyncSession = async_sessionmaker(some_engine) - AsyncSession.configure(bind=create_async_engine('sqlite+aiosqlite://')) + AsyncSession.configure(bind=create_async_engine("sqlite+aiosqlite://")) """ # noqa E501 self.kw.update(new_kw) diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index 70b0fe62c11..c9bc8788331 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -192,8 +192,12 @@ def module_name_for_table(cls, tablename, table): Base = automap_base() Base.prepare(e, modulename_for_table=module_name_for_table) - Base.prepare(e, schema="test_schema", modulename_for_table=module_name_for_table) - Base.prepare(e, schema="test_schema_2", modulename_for_table=module_name_for_table) + Base.prepare( + e, schema="test_schema", modulename_for_table=module_name_for_table + ) + Base.prepare( + e, schema="test_schema_2", modulename_for_table=module_name_for_table + ) The same named-classes are organized into a hierarchical collection available at :attr:`.AutomapBase.by_module`. This collection is traversed using the @@ -550,7 +554,9 @@ class Engineer(Employee): id = Column(Integer, ForeignKey("employee.id"), primary_key=True) favorite_employee_id = Column(Integer, ForeignKey("employee.id")) - favorite_employee = relationship(Employee, foreign_keys=favorite_employee_id) + favorite_employee = relationship( + Employee, foreign_keys=favorite_employee_id + ) __mapper_args__ = { "polymorphic_identity": "engineer", @@ -587,12 +593,16 @@ class Engineer(Employee): We can resolve this conflict by using an underscore as follows:: - def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): + def name_for_scalar_relationship( + base, local_cls, referred_cls, constraint + ): name = referred_cls.__name__.lower() local_table = local_cls.__table__ if name in local_table.columns: newname = name + "_" - warnings.warn("Already detected name %s present. using %s" % (name, newname)) + warnings.warn( + "Already detected name %s present. using %s" % (name, newname) + ) return newname return name diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py index 60f7ae66447..c9dd63a87f8 100644 --- a/lib/sqlalchemy/ext/baked.py +++ b/lib/sqlalchemy/ext/baked.py @@ -258,23 +258,19 @@ def to_query(self, query_or_session): is passed to the lambda:: sub_bq = self.bakery(lambda s: s.query(User.name)) - sub_bq += lambda q: q.filter( - User.id == Address.user_id).correlate(Address) + sub_bq += lambda q: q.filter(User.id == Address.user_id).correlate(Address) main_bq = self.bakery(lambda s: s.query(Address)) - main_bq += lambda q: q.filter( - sub_bq.to_query(q).exists()) + main_bq += lambda q: q.filter(sub_bq.to_query(q).exists()) In the case where the subquery is used in the first callable against a :class:`.Session`, the :class:`.Session` is also accepted:: sub_bq = self.bakery(lambda s: s.query(User.name)) - sub_bq += lambda q: q.filter( - User.id == Address.user_id).correlate(Address) + sub_bq += lambda q: q.filter(User.id == Address.user_id).correlate(Address) main_bq = self.bakery( - lambda s: s.query( - Address.id, sub_bq.to_query(q).scalar_subquery()) + lambda s: s.query(Address.id, sub_bq.to_query(q).scalar_subquery()) ) :param query_or_session: a :class:`_query.Query` object or a class @@ -285,7 +281,7 @@ def to_query(self, query_or_session): .. versionadded:: 1.3 - """ + """ # noqa: E501 if isinstance(query_or_session, Session): session = query_or_session diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index 9d4be255c0d..199329d5b45 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -17,9 +17,11 @@ from sqlalchemy.ext.compiler import compiles from sqlalchemy.sql.expression import ColumnClause + class MyColumn(ColumnClause): inherit_cache = True + @compiles(MyColumn) def compile_mycolumn(element, compiler, **kw): return "[%s]" % element.name @@ -31,10 +33,12 @@ def compile_mycolumn(element, compiler, **kw): from sqlalchemy import select - s = select(MyColumn('x'), MyColumn('y')) + s = select(MyColumn("x"), MyColumn("y")) print(str(s)) -Produces:: +Produces: + +.. sourcecode:: sql SELECT [x], [y] @@ -46,6 +50,7 @@ def compile_mycolumn(element, compiler, **kw): from sqlalchemy.schema import DDLElement + class AlterColumn(DDLElement): inherit_cache = False @@ -53,14 +58,18 @@ def __init__(self, column, cmd): self.column = column self.cmd = cmd + @compiles(AlterColumn) def visit_alter_column(element, compiler, **kw): return "ALTER COLUMN %s ..." % element.column.name - @compiles(AlterColumn, 'postgresql') + + @compiles(AlterColumn, "postgresql") def visit_alter_column(element, compiler, **kw): - return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, - element.column.name) + return "ALTER TABLE %s ALTER COLUMN %s ..." % ( + element.table.name, + element.column.name, + ) The second ``visit_alter_table`` will be invoked when any ``postgresql`` dialect is used. @@ -80,6 +89,7 @@ def visit_alter_column(element, compiler, **kw): from sqlalchemy.sql.expression import Executable, ClauseElement + class InsertFromSelect(Executable, ClauseElement): inherit_cache = False @@ -87,20 +97,27 @@ def __init__(self, table, select): self.table = table self.select = select + @compiles(InsertFromSelect) def visit_insert_from_select(element, compiler, **kw): return "INSERT INTO %s (%s)" % ( compiler.process(element.table, asfrom=True, **kw), - compiler.process(element.select, **kw) + compiler.process(element.select, **kw), ) - insert = InsertFromSelect(t1, select(t1).where(t1.c.x>5)) + + insert = InsertFromSelect(t1, select(t1).where(t1.c.x > 5)) print(insert) -Produces:: +Produces (formatted for readability): + +.. sourcecode:: sql - "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z - FROM mytable WHERE mytable.x > :x_1)" + INSERT INTO mytable ( + SELECT mytable.x, mytable.y, mytable.z + FROM mytable + WHERE mytable.x > :x_1 + ) .. note:: @@ -120,11 +137,10 @@ def visit_insert_from_select(element, compiler, **kw): @compiles(MyConstraint) def compile_my_constraint(constraint, ddlcompiler, **kw): - kw['literal_binds'] = True + kw["literal_binds"] = True return "CONSTRAINT %s CHECK (%s)" % ( constraint.name, - ddlcompiler.sql_compiler.process( - constraint.expression, **kw) + ddlcompiler.sql_compiler.process(constraint.expression, **kw), ) Above, we add an additional flag to the process step as called by @@ -152,6 +168,7 @@ def compile_my_constraint(constraint, ddlcompiler, **kw): from sqlalchemy.sql.expression import Insert + @compiles(Insert) def prefix_inserts(insert, compiler, **kw): return compiler.visit_insert(insert.prefix_with("some prefix"), **kw) @@ -167,17 +184,16 @@ def prefix_inserts(insert, compiler, **kw): ``compiler`` works for types, too, such as below where we implement the MS-SQL specific 'max' keyword for ``String``/``VARCHAR``:: - @compiles(String, 'mssql') - @compiles(VARCHAR, 'mssql') + @compiles(String, "mssql") + @compiles(VARCHAR, "mssql") def compile_varchar(element, compiler, **kw): - if element.length == 'max': + if element.length == "max": return "VARCHAR('max')" else: return compiler.visit_VARCHAR(element, **kw) - foo = Table('foo', metadata, - Column('data', VARCHAR('max')) - ) + + foo = Table("foo", metadata, Column("data", VARCHAR("max"))) Subclassing Guidelines ====================== @@ -215,19 +231,23 @@ class timestamp(ColumnElement): from sqlalchemy.sql.expression import FunctionElement + class coalesce(FunctionElement): - name = 'coalesce' + name = "coalesce" inherit_cache = True + @compiles(coalesce) def compile(element, compiler, **kw): return "coalesce(%s)" % compiler.process(element.clauses, **kw) - @compiles(coalesce, 'oracle') + + @compiles(coalesce, "oracle") def compile(element, compiler, **kw): if len(element.clauses) > 2: - raise TypeError("coalesce only supports two arguments on " - "Oracle Database") + raise TypeError( + "coalesce only supports two arguments on " "Oracle Database" + ) return "nvl(%s)" % compiler.process(element.clauses, **kw) * :class:`.ExecutableDDLElement` - The root of all DDL expressions, @@ -281,6 +301,7 @@ def compile(element, compiler, **kw): class MyColumn(ColumnClause): inherit_cache = True + @compiles(MyColumn) def compile_mycolumn(element, compiler, **kw): return "[%s]" % element.name @@ -319,11 +340,12 @@ def __init__(self, table, select): self.table = table self.select = select + @compiles(InsertFromSelect) def visit_insert_from_select(element, compiler, **kw): return "INSERT INTO %s (%s)" % ( compiler.process(element.table, asfrom=True, **kw), - compiler.process(element.select, **kw) + compiler.process(element.select, **kw), ) While it is also possible that the above ``InsertFromSelect`` could be made to @@ -359,28 +381,32 @@ def visit_insert_from_select(element, compiler, **kw): from sqlalchemy.ext.compiler import compiles from sqlalchemy.types import DateTime + class utcnow(expression.FunctionElement): type = DateTime() inherit_cache = True - @compiles(utcnow, 'postgresql') + + @compiles(utcnow, "postgresql") def pg_utcnow(element, compiler, **kw): return "TIMEZONE('utc', CURRENT_TIMESTAMP)" - @compiles(utcnow, 'mssql') + + @compiles(utcnow, "mssql") def ms_utcnow(element, compiler, **kw): return "GETUTCDATE()" Example usage:: - from sqlalchemy import ( - Table, Column, Integer, String, DateTime, MetaData - ) + from sqlalchemy import Table, Column, Integer, String, DateTime, MetaData + metadata = MetaData() - event = Table("event", metadata, + event = Table( + "event", + metadata, Column("id", Integer, primary_key=True), Column("description", String(50), nullable=False), - Column("timestamp", DateTime, server_default=utcnow()) + Column("timestamp", DateTime, server_default=utcnow()), ) "GREATEST" function @@ -395,30 +421,30 @@ def ms_utcnow(element, compiler, **kw): from sqlalchemy.ext.compiler import compiles from sqlalchemy.types import Numeric + class greatest(expression.FunctionElement): type = Numeric() - name = 'greatest' + name = "greatest" inherit_cache = True + @compiles(greatest) def default_greatest(element, compiler, **kw): return compiler.visit_function(element) - @compiles(greatest, 'sqlite') - @compiles(greatest, 'mssql') - @compiles(greatest, 'oracle') + + @compiles(greatest, "sqlite") + @compiles(greatest, "mssql") + @compiles(greatest, "oracle") def case_greatest(element, compiler, **kw): arg1, arg2 = list(element.clauses) return compiler.process(case((arg1 > arg2, arg1), else_=arg2), **kw) Example usage:: - Session.query(Account).\ - filter( - greatest( - Account.checking_balance, - Account.savings_balance) > 10000 - ) + Session.query(Account).filter( + greatest(Account.checking_balance, Account.savings_balance) > 10000 + ) "false" expression ------------------ @@ -429,16 +455,19 @@ def case_greatest(element, compiler, **kw): from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles + class sql_false(expression.ColumnElement): inherit_cache = True + @compiles(sql_false) def default_false(element, compiler, **kw): return "false" - @compiles(sql_false, 'mssql') - @compiles(sql_false, 'mysql') - @compiles(sql_false, 'oracle') + + @compiles(sql_false, "mssql") + @compiles(sql_false, "mysql") + @compiles(sql_false, "oracle") def int_false(element, compiler, **kw): return "0" @@ -448,7 +477,7 @@ def int_false(element, compiler, **kw): exp = union_all( select(users.c.name, sql_false().label("enrolled")), - select(customers.c.name, customers.c.enrolled) + select(customers.c.name, customers.c.enrolled), ) """ diff --git a/lib/sqlalchemy/ext/declarative/extensions.py b/lib/sqlalchemy/ext/declarative/extensions.py index c0f7e340580..4be4262d0df 100644 --- a/lib/sqlalchemy/ext/declarative/extensions.py +++ b/lib/sqlalchemy/ext/declarative/extensions.py @@ -50,23 +50,26 @@ class ConcreteBase: from sqlalchemy.ext.declarative import ConcreteBase + class Employee(ConcreteBase, Base): - __tablename__ = 'employee' + __tablename__ = "employee" employee_id = Column(Integer, primary_key=True) name = Column(String(50)) __mapper_args__ = { - 'polymorphic_identity':'employee', - 'concrete':True} + "polymorphic_identity": "employee", + "concrete": True, + } + class Manager(Employee): - __tablename__ = 'manager' + __tablename__ = "manager" employee_id = Column(Integer, primary_key=True) name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', - 'concrete':True} - + "polymorphic_identity": "manager", + "concrete": True, + } The name of the discriminator column used by :func:`.polymorphic_union` defaults to the name ``type``. To suit the use case of a mapping where an @@ -75,7 +78,7 @@ class Manager(Employee): ``_concrete_discriminator_name`` attribute:: class Employee(ConcreteBase, Base): - _concrete_discriminator_name = '_concrete_discriminator' + _concrete_discriminator_name = "_concrete_discriminator" .. versionadded:: 1.3.19 Added the ``_concrete_discriminator_name`` attribute to :class:`_declarative.ConcreteBase` so that the @@ -168,23 +171,27 @@ class AbstractConcreteBase(ConcreteBase): from sqlalchemy.orm import DeclarativeBase from sqlalchemy.ext.declarative import AbstractConcreteBase + class Base(DeclarativeBase): pass + class Employee(AbstractConcreteBase, Base): pass + class Manager(Employee): - __tablename__ = 'manager' + __tablename__ = "manager" employee_id = Column(Integer, primary_key=True) name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', - 'concrete':True + "polymorphic_identity": "manager", + "concrete": True, } + Base.registry.configure() The abstract base class is handled by declarative in a special way; @@ -200,10 +207,12 @@ class Manager(Employee): from sqlalchemy.ext.declarative import AbstractConcreteBase + class Company(Base): - __tablename__ = 'company' + __tablename__ = "company" id = Column(Integer, primary_key=True) + class Employee(AbstractConcreteBase, Base): strict_attrs = True @@ -211,31 +220,31 @@ class Employee(AbstractConcreteBase, Base): @declared_attr def company_id(cls): - return Column(ForeignKey('company.id')) + return Column(ForeignKey("company.id")) @declared_attr def company(cls): return relationship("Company") + class Manager(Employee): - __tablename__ = 'manager' + __tablename__ = "manager" name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', - 'concrete':True + "polymorphic_identity": "manager", + "concrete": True, } + Base.registry.configure() When we make use of our mappings however, both ``Manager`` and ``Employee`` will have an independently usable ``.company`` attribute:: - session.execute( - select(Employee).filter(Employee.company.has(id=5)) - ) + session.execute(select(Employee).filter(Employee.company.has(id=5))) :param strict_attrs: when specified on the base class, "strict" attribute mode is enabled which attempts to limit ORM mapped attributes on the @@ -366,10 +375,12 @@ class DeferredReflection: from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import DeferredReflection + Base = declarative_base() + class MyClass(DeferredReflection, Base): - __tablename__ = 'mytable' + __tablename__ = "mytable" Above, ``MyClass`` is not yet mapped. After a series of classes have been defined in the above fashion, all tables @@ -391,17 +402,22 @@ class MyClass(DeferredReflection, Base): class ReflectedOne(DeferredReflection, Base): __abstract__ = True + class ReflectedTwo(DeferredReflection, Base): __abstract__ = True + class MyClass(ReflectedOne): - __tablename__ = 'mytable' + __tablename__ = "mytable" + class MyOtherClass(ReflectedOne): - __tablename__ = 'myothertable' + __tablename__ = "myothertable" + class YetAnotherClass(ReflectedTwo): - __tablename__ = 'yetanothertable' + __tablename__ = "yetanothertable" + # ... etc. diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index d8ee819f268..1d7b3f3580d 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -125,12 +125,9 @@ def set_shard(self, shard_id: ShardIdentifier) -> Self: The shard_id can be passed for a 2.0 style execution to the bind_arguments dictionary of :meth:`.Session.execute`:: - results = session.execute( - stmt, - bind_arguments={"shard_id": "my_shard"} - ) + results = session.execute(stmt, bind_arguments={"shard_id": "my_shard"}) - """ + """ # noqa: E501 return self.execution_options(_sa_shard_id=shard_id) @@ -382,9 +379,9 @@ class set_shard_id(ORMOption): the :meth:`_sql.Executable.options` method of any executable statement:: stmt = ( - select(MyObject). - where(MyObject.name == 'some name'). - options(set_shard_id("shard1")) + select(MyObject) + .where(MyObject.name == "some name") + .options(set_shard_id("shard1")) ) Above, the statement when invoked will limit to the "shard1" shard diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index ee8d6a78184..471256c32f8 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -34,8 +34,9 @@ class level and at the instance level. class Base(DeclarativeBase): pass + class Interval(Base): - __tablename__ = 'interval' + __tablename__ = "interval" id: Mapped[int] = mapped_column(primary_key=True) start: Mapped[int] @@ -57,7 +58,6 @@ def contains(self, point: int) -> bool: def intersects(self, other: Interval) -> bool: return self.contains(other.start) | self.contains(other.end) - Above, the ``length`` property returns the difference between the ``end`` and ``start`` attributes. With an instance of ``Interval``, this subtraction occurs in Python, using normal Python descriptor @@ -150,6 +150,7 @@ def intersects(self, other: Interval) -> bool: from sqlalchemy import func from sqlalchemy import type_coerce + class Interval(Base): # ... @@ -214,6 +215,7 @@ def _radius_expression(cls) -> ColumnElement[float]: # correct use, however is not accepted by pep-484 tooling + class Interval(Base): # ... @@ -256,6 +258,7 @@ def radius(cls): # correct use which is also accepted by pep-484 tooling + class Interval(Base): # ... @@ -330,6 +333,7 @@ def _length_setter(self, value: int) -> None: ``Interval.start``, this could be substituted directly:: from sqlalchemy import update + stmt = update(Interval).values({Interval.start_point: 10}) However, when using a composite hybrid like ``Interval.length``, this @@ -340,6 +344,7 @@ def _length_setter(self, value: int) -> None: from typing import List, Tuple, Any + class Interval(Base): # ... @@ -352,10 +357,10 @@ def _length_setter(self, value: int) -> None: self.end = self.start + value @length.inplace.update_expression - def _length_update_expression(cls, value: Any) -> List[Tuple[Any, Any]]: - return [ - (cls.end, cls.start + value) - ] + def _length_update_expression( + cls, value: Any + ) -> List[Tuple[Any, Any]]: + return [(cls.end, cls.start + value)] Above, if we use ``Interval.length`` in an UPDATE expression, we get a hybrid SET expression: @@ -412,15 +417,16 @@ class Base(DeclarativeBase): class SavingsAccount(Base): - __tablename__ = 'account' + __tablename__ = "account" id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[int] = mapped_column(ForeignKey('user.id')) + user_id: Mapped[int] = mapped_column(ForeignKey("user.id")) balance: Mapped[Decimal] = mapped_column(Numeric(15, 5)) owner: Mapped[User] = relationship(back_populates="accounts") + class User(Base): - __tablename__ = 'user' + __tablename__ = "user" id: Mapped[int] = mapped_column(primary_key=True) name: Mapped[str] = mapped_column(String(100)) @@ -448,7 +454,10 @@ def _balance_setter(self, value: Optional[Decimal]) -> None: @balance.inplace.expression @classmethod def _balance_expression(cls) -> SQLColumnExpression[Optional[Decimal]]: - return cast("SQLColumnExpression[Optional[Decimal]]", SavingsAccount.balance) + return cast( + "SQLColumnExpression[Optional[Decimal]]", + SavingsAccount.balance, + ) The above hybrid property ``balance`` works with the first ``SavingsAccount`` entry in the list of accounts for this user. The @@ -471,8 +480,11 @@ def _balance_expression(cls) -> SQLColumnExpression[Optional[Decimal]]: .. sourcecode:: pycon+sql >>> from sqlalchemy import select - >>> print(select(User, User.balance). - ... join(User.accounts).filter(User.balance > 5000)) + >>> print( + ... select(User, User.balance) + ... .join(User.accounts) + ... .filter(User.balance > 5000) + ... ) {printsql}SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance FROM "user" JOIN account ON "user".id = account.user_id @@ -487,8 +499,11 @@ def _balance_expression(cls) -> SQLColumnExpression[Optional[Decimal]]: >>> from sqlalchemy import select >>> from sqlalchemy import or_ - >>> print (select(User, User.balance).outerjoin(User.accounts). - ... filter(or_(User.balance < 5000, User.balance == None))) + >>> print( + ... select(User, User.balance) + ... .outerjoin(User.accounts) + ... .filter(or_(User.balance < 5000, User.balance == None)) + ... ) {printsql}SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id @@ -528,15 +543,16 @@ class Base(DeclarativeBase): class SavingsAccount(Base): - __tablename__ = 'account' + __tablename__ = "account" id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[int] = mapped_column(ForeignKey('user.id')) + user_id: Mapped[int] = mapped_column(ForeignKey("user.id")) balance: Mapped[Decimal] = mapped_column(Numeric(15, 5)) owner: Mapped[User] = relationship(back_populates="accounts") + class User(Base): - __tablename__ = 'user' + __tablename__ = "user" id: Mapped[int] = mapped_column(primary_key=True) name: Mapped[str] = mapped_column(String(100)) @@ -546,7 +562,9 @@ class User(Base): @hybrid_property def balance(self) -> Decimal: - return sum((acc.balance for acc in self.accounts), start=Decimal("0")) + return sum( + (acc.balance for acc in self.accounts), start=Decimal("0") + ) @balance.inplace.expression @classmethod @@ -557,7 +575,6 @@ def _balance_expression(cls) -> SQLColumnExpression[Decimal]: .label("total_balance") ) - The above recipe will give us the ``balance`` column which renders a correlated SELECT: @@ -604,6 +621,7 @@ def _balance_expression(cls) -> SQLColumnExpression[Decimal]: from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column + class Base(DeclarativeBase): pass @@ -612,8 +630,9 @@ class CaseInsensitiveComparator(Comparator[str]): def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 return func.lower(self.__clause_element__()) == func.lower(other) + class SearchWord(Base): - __tablename__ = 'searchword' + __tablename__ = "searchword" id: Mapped[int] = mapped_column(primary_key=True) word: Mapped[str] @@ -675,6 +694,7 @@ def name(self) -> str: def _name_setter(self, value: str) -> None: self.first_name = value + class FirstNameLastName(FirstNameOnly): # ... @@ -684,11 +704,11 @@ class FirstNameLastName(FirstNameOnly): # of FirstNameOnly.name that is local to FirstNameLastName @FirstNameOnly.name.getter def name(self) -> str: - return self.first_name + ' ' + self.last_name + return self.first_name + " " + self.last_name @name.inplace.setter def _name_setter(self, value: str) -> None: - self.first_name, self.last_name = value.split(' ', 1) + self.first_name, self.last_name = value.split(" ", 1) Above, the ``FirstNameLastName`` class refers to the hybrid from ``FirstNameOnly.name`` to repurpose its getter and setter for the subclass. @@ -709,8 +729,7 @@ class FirstNameLastName(FirstNameOnly): @FirstNameOnly.name.overrides.expression @classmethod def name(cls): - return func.concat(cls.first_name, ' ', cls.last_name) - + return func.concat(cls.first_name, " ", cls.last_name) Hybrid Value Objects -------------------- @@ -751,7 +770,7 @@ def __clause_element__(self): def __str__(self): return self.word - key = 'word' + key = "word" "Label to apply to Query tuple results" Above, the ``CaseInsensitiveWord`` object represents ``self.word``, which may @@ -762,7 +781,7 @@ def __str__(self): ``CaseInsensitiveWord`` object unconditionally from a single hybrid call:: class SearchWord(Base): - __tablename__ = 'searchword' + __tablename__ = "searchword" id: Mapped[int] = mapped_column(primary_key=True) word: Mapped[str] @@ -983,6 +1002,7 @@ def __init__( from sqlalchemy.ext.hybrid import hybrid_method + class SomeClass: @hybrid_method def value(self, x, y): @@ -1080,6 +1100,7 @@ def __init__( from sqlalchemy.ext.hybrid import hybrid_property + class SomeClass: @hybrid_property def value(self): @@ -1158,6 +1179,7 @@ class SuperClass: def foobar(self): return self._foobar + class SubClass(SuperClass): # ... @@ -1367,10 +1389,7 @@ def fullname(self): @fullname.update_expression def fullname(cls, value): fname, lname = value.split(" ", 1) - return [ - (cls.first_name, fname), - (cls.last_name, lname) - ] + return [(cls.first_name, fname), (cls.last_name, lname)] .. versionadded:: 1.2 diff --git a/lib/sqlalchemy/ext/indexable.py b/lib/sqlalchemy/ext/indexable.py index 3c419308a69..e79f613f274 100644 --- a/lib/sqlalchemy/ext/indexable.py +++ b/lib/sqlalchemy/ext/indexable.py @@ -36,19 +36,19 @@ Base = declarative_base() + class Person(Base): - __tablename__ = 'person' + __tablename__ = "person" id = Column(Integer, primary_key=True) data = Column(JSON) - name = index_property('data', 'name') - + name = index_property("data", "name") Above, the ``name`` attribute now behaves like a mapped column. We can compose a new ``Person`` and set the value of ``name``:: - >>> person = Person(name='Alchemist') + >>> person = Person(name="Alchemist") The value is now accessible:: @@ -59,11 +59,11 @@ class Person(Base): and the field was set:: >>> person.data - {"name": "Alchemist'} + {'name': 'Alchemist'} The field is mutable in place:: - >>> person.name = 'Renamed' + >>> person.name = "Renamed" >>> person.name 'Renamed' >>> person.data @@ -87,18 +87,17 @@ class Person(Base): >>> person = Person() >>> person.name - ... AttributeError: 'name' Unless you set a default value:: >>> class Person(Base): - >>> __tablename__ = 'person' - >>> - >>> id = Column(Integer, primary_key=True) - >>> data = Column(JSON) - >>> - >>> name = index_property('data', 'name', default=None) # See default + ... __tablename__ = "person" + ... + ... id = Column(Integer, primary_key=True) + ... data = Column(JSON) + ... + ... name = index_property("data", "name", default=None) # See default >>> person = Person() >>> print(person.name) @@ -111,11 +110,11 @@ class Person(Base): >>> from sqlalchemy.orm import Session >>> session = Session() - >>> query = session.query(Person).filter(Person.name == 'Alchemist') + >>> query = session.query(Person).filter(Person.name == "Alchemist") The above query is equivalent to:: - >>> query = session.query(Person).filter(Person.data['name'] == 'Alchemist') + >>> query = session.query(Person).filter(Person.data["name"] == "Alchemist") Multiple :class:`.index_property` objects can be chained to produce multiple levels of indexing:: @@ -126,22 +125,25 @@ class Person(Base): Base = declarative_base() + class Person(Base): - __tablename__ = 'person' + __tablename__ = "person" id = Column(Integer, primary_key=True) data = Column(JSON) - birthday = index_property('data', 'birthday') - year = index_property('birthday', 'year') - month = index_property('birthday', 'month') - day = index_property('birthday', 'day') + birthday = index_property("data", "birthday") + year = index_property("birthday", "year") + month = index_property("birthday", "month") + day = index_property("birthday", "day") Above, a query such as:: - q = session.query(Person).filter(Person.year == '1980') + q = session.query(Person).filter(Person.year == "1980") -On a PostgreSQL backend, the above query will render as:: +On a PostgreSQL backend, the above query will render as: + +.. sourcecode:: sql SELECT person.id, person.data FROM person @@ -198,13 +200,14 @@ def expr(self, model): Base = declarative_base() + class Person(Base): - __tablename__ = 'person' + __tablename__ = "person" id = Column(Integer, primary_key=True) data = Column(JSON) - age = pg_json_property('data', 'age', Integer) + age = pg_json_property("data", "age", Integer) The ``age`` attribute at the instance level works as before; however when rendering SQL, PostgreSQL's ``->>`` operator will be used @@ -212,7 +215,8 @@ class Person(Base): >>> query = session.query(Person).filter(Person.age < 20) -The above query will render:: +The above query will render: +.. sourcecode:: sql SELECT person.id, person.data FROM person diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index e005e4d63f1..941515e87b5 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -21,6 +21,7 @@ from sqlalchemy.types import TypeDecorator, VARCHAR import json + class JSONEncodedDict(TypeDecorator): "Represents an immutable structure as a json-encoded string." @@ -48,6 +49,7 @@ def process_result_value(self, value, dialect): from sqlalchemy.ext.mutable import Mutable + class MutableDict(Mutable, dict): @classmethod def coerce(cls, key, value): @@ -101,9 +103,11 @@ class and associates a listener that will detect all future mappings from sqlalchemy import Table, Column, Integer - my_data = Table('my_data', metadata, - Column('id', Integer, primary_key=True), - Column('data', MutableDict.as_mutable(JSONEncodedDict)) + my_data = Table( + "my_data", + metadata, + Column("id", Integer, primary_key=True), + Column("data", MutableDict.as_mutable(JSONEncodedDict)), ) Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict`` @@ -115,13 +119,17 @@ class and associates a listener that will detect all future mappings from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column + class Base(DeclarativeBase): pass + class MyDataClass(Base): - __tablename__ = 'my_data' + __tablename__ = "my_data" id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[dict[str, str]] = mapped_column(MutableDict.as_mutable(JSONEncodedDict)) + data: Mapped[dict[str, str]] = mapped_column( + MutableDict.as_mutable(JSONEncodedDict) + ) The ``MyDataClass.data`` member will now be notified of in place changes to its value. @@ -132,11 +140,11 @@ class MyDataClass(Base): >>> from sqlalchemy.orm import Session >>> sess = Session(some_engine) - >>> m1 = MyDataClass(data={'value1':'foo'}) + >>> m1 = MyDataClass(data={"value1": "foo"}) >>> sess.add(m1) >>> sess.commit() - >>> m1.data['value1'] = 'bar' + >>> m1.data["value1"] = "bar" >>> assert m1 in sess.dirty True @@ -153,15 +161,16 @@ class MyDataClass(Base): MutableDict.associate_with(JSONEncodedDict) + class Base(DeclarativeBase): pass + class MyDataClass(Base): - __tablename__ = 'my_data' + __tablename__ = "my_data" id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[dict[str, str]] = mapped_column(JSONEncodedDict) - Supporting Pickling -------------------- @@ -180,7 +189,7 @@ class MyDataClass(Base): class MyMutableType(Mutable): def __getstate__(self): d = self.__dict__.copy() - d.pop('_parents', None) + d.pop("_parents", None) return d With our dictionary example, we need to return the contents of the dict itself @@ -213,13 +222,18 @@ def __setstate__(self, state): from sqlalchemy.orm import mapped_column from sqlalchemy import event + class Base(DeclarativeBase): pass + class MyDataClass(Base): - __tablename__ = 'my_data' + __tablename__ = "my_data" id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[dict[str, str]] = mapped_column(MutableDict.as_mutable(JSONEncodedDict)) + data: Mapped[dict[str, str]] = mapped_column( + MutableDict.as_mutable(JSONEncodedDict) + ) + @event.listens_for(MyDataClass.data, "modified") def modified_json(instance, initiator): @@ -247,6 +261,7 @@ class introduced in :ref:`mapper_composite` to include import dataclasses from sqlalchemy.ext.mutable import MutableComposite + @dataclasses.dataclass class Point(MutableComposite): x: int @@ -261,7 +276,6 @@ def __setattr__(self, key, value): # alert all parents to the change self.changed() - The :class:`.MutableComposite` class makes use of class mapping events to automatically establish listeners for any usage of :func:`_orm.composite` that specifies our ``Point`` type. Below, when ``Point`` is mapped to the ``Vertex`` @@ -271,6 +285,7 @@ def __setattr__(self, key, value): from sqlalchemy.orm import DeclarativeBase, Mapped from sqlalchemy.orm import composite, mapped_column + class Base(DeclarativeBase): pass @@ -280,8 +295,12 @@ class Vertex(Base): id: Mapped[int] = mapped_column(primary_key=True) - start: Mapped[Point] = composite(mapped_column("x1"), mapped_column("y1")) - end: Mapped[Point] = composite(mapped_column("x2"), mapped_column("y2")) + start: Mapped[Point] = composite( + mapped_column("x1"), mapped_column("y1") + ) + end: Mapped[Point] = composite( + mapped_column("x2"), mapped_column("y2") + ) def __repr__(self): return f"Vertex(start={self.start}, end={self.end})" @@ -648,9 +667,11 @@ def as_mutable(cls, sqltype: _TypeEngineArgument[_T]) -> TypeEngine[_T]: The type is returned, unconditionally as an instance, so that :meth:`.as_mutable` can be used inline:: - Table('mytable', metadata, - Column('id', Integer, primary_key=True), - Column('data', MyMutableType.as_mutable(PickleType)) + Table( + "mytable", + metadata, + Column("id", Integer, primary_key=True), + Column("data", MyMutableType.as_mutable(PickleType)), ) Note that the returned type is always an instance, even if a class diff --git a/lib/sqlalchemy/ext/mypy/apply.py b/lib/sqlalchemy/ext/mypy/apply.py index eb9019453d5..84eb9772491 100644 --- a/lib/sqlalchemy/ext/mypy/apply.py +++ b/lib/sqlalchemy/ext/mypy/apply.py @@ -199,11 +199,15 @@ class User(Base): To one that describes the final Python behavior to Mypy:: + ... format: off + class User(Base): # ... attrname : Mapped[Optional[int]] = + ... format: on + """ left_node = lvalue.node assert isinstance(left_node, Var) diff --git a/lib/sqlalchemy/ext/mypy/infer.py b/lib/sqlalchemy/ext/mypy/infer.py index 09b3c443ab0..8826672f72e 100644 --- a/lib/sqlalchemy/ext/mypy/infer.py +++ b/lib/sqlalchemy/ext/mypy/infer.py @@ -385,9 +385,9 @@ class MyClass: class MyClass: # ... - a : Mapped[int] + a: Mapped[int] - b : Mapped[str] + b: Mapped[str] c: Mapped[int] diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py index 1a12cf38c69..ae904b0fc6c 100644 --- a/lib/sqlalchemy/ext/orderinglist.py +++ b/lib/sqlalchemy/ext/orderinglist.py @@ -26,18 +26,20 @@ Base = declarative_base() + class Slide(Base): - __tablename__ = 'slide' + __tablename__ = "slide" id = Column(Integer, primary_key=True) name = Column(String) bullets = relationship("Bullet", order_by="Bullet.position") + class Bullet(Base): - __tablename__ = 'bullet' + __tablename__ = "bullet" id = Column(Integer, primary_key=True) - slide_id = Column(Integer, ForeignKey('slide.id')) + slide_id = Column(Integer, ForeignKey("slide.id")) position = Column(Integer) text = Column(String) @@ -57,19 +59,24 @@ class Bullet(Base): Base = declarative_base() + class Slide(Base): - __tablename__ = 'slide' + __tablename__ = "slide" id = Column(Integer, primary_key=True) name = Column(String) - bullets = relationship("Bullet", order_by="Bullet.position", - collection_class=ordering_list('position')) + bullets = relationship( + "Bullet", + order_by="Bullet.position", + collection_class=ordering_list("position"), + ) + class Bullet(Base): - __tablename__ = 'bullet' + __tablename__ = "bullet" id = Column(Integer, primary_key=True) - slide_id = Column(Integer, ForeignKey('slide.id')) + slide_id = Column(Integer, ForeignKey("slide.id")) position = Column(Integer) text = Column(String) @@ -151,14 +158,18 @@ def ordering_list( from sqlalchemy.ext.orderinglist import ordering_list + class Slide(Base): - __tablename__ = 'slide' + __tablename__ = "slide" id = Column(Integer, primary_key=True) name = Column(String) - bullets = relationship("Bullet", order_by="Bullet.position", - collection_class=ordering_list('position')) + bullets = relationship( + "Bullet", + order_by="Bullet.position", + collection_class=ordering_list("position"), + ) :param attr: Name of the mapped attribute to use for storage and retrieval of diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py index 130d2537474..9cbc61a1c36 100644 --- a/lib/sqlalchemy/ext/serializer.py +++ b/lib/sqlalchemy/ext/serializer.py @@ -28,13 +28,17 @@ Usage is nearly the same as that of the standard Python pickle module:: from sqlalchemy.ext.serializer import loads, dumps + metadata = MetaData(bind=some_engine) Session = scoped_session(sessionmaker()) # ... define mappers - query = Session.query(MyClass). - filter(MyClass.somedata=='foo').order_by(MyClass.sortkey) + query = ( + Session.query(MyClass) + .filter(MyClass.somedata == "foo") + .order_by(MyClass.sortkey) + ) # pickle the query serialized = dumps(query) @@ -42,7 +46,7 @@ # unpickle. Pass in metadata + scoped_session query2 = loads(serialized, metadata, Session) - print query2.all() + print(query2.all()) Similar restrictions as when using raw pickle apply; mapped classes must be themselves be pickleable, meaning they are importable from a module-level diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index 4c777b3b3ac..1b9f9a82159 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -823,7 +823,7 @@ def with_loader_criteria( stmt = select(User).options( selectinload(User.addresses), - with_loader_criteria(Address, Address.email_address != 'foo')) + with_loader_criteria(Address, Address.email_address != "foo"), ) Above, the "selectinload" for ``User.addresses`` will apply the @@ -833,8 +833,10 @@ def with_loader_criteria( ON clause of the join, in this example using :term:`1.x style` queries:: - q = session.query(User).outerjoin(User.addresses).options( - with_loader_criteria(Address, Address.email_address != 'foo')) + q = ( + session.query(User) + .outerjoin(User.addresses) + .options(with_loader_criteria(Address, Address.email_address != "foo")) ) The primary purpose of :func:`_orm.with_loader_criteria` is to use @@ -847,6 +849,7 @@ def with_loader_criteria( session = Session(bind=engine) + @event.listens_for("do_orm_execute", session) def _add_filtering_criteria(execute_state): @@ -858,8 +861,8 @@ def _add_filtering_criteria(execute_state): execute_state.statement = execute_state.statement.options( with_loader_criteria( SecurityRole, - lambda cls: cls.role.in_(['some_role']), - include_aliases=True + lambda cls: cls.role.in_(["some_role"]), + include_aliases=True, ) ) @@ -896,16 +899,19 @@ def _add_filtering_criteria(execute_state): ``A -> A.bs -> B``, the given :func:`_orm.with_loader_criteria` option will affect the way in which the JOIN is rendered:: - stmt = select(A).join(A.bs).options( - contains_eager(A.bs), - with_loader_criteria(B, B.flag == 1) + stmt = ( + select(A) + .join(A.bs) + .options(contains_eager(A.bs), with_loader_criteria(B, B.flag == 1)) ) Above, the given :func:`_orm.with_loader_criteria` option will affect the ON clause of the JOIN that is specified by ``.join(A.bs)``, so is applied as expected. The :func:`_orm.contains_eager` option has the effect that columns from - ``B`` are added to the columns clause:: + ``B`` are added to the columns clause: + + .. sourcecode:: sql SELECT b.id, b.a_id, b.data, b.flag, @@ -971,7 +977,7 @@ class of a particular set of mapped classes, to which the rule .. versionadded:: 1.4.0b2 - """ + """ # noqa: E501 return LoaderCriteriaOption( entity_or_base, where_criteria, @@ -1898,14 +1904,13 @@ def synonym( e.g.:: class MyClass(Base): - __tablename__ = 'my_table' + __tablename__ = "my_table" id = Column(Integer, primary_key=True) job_status = Column(String(50)) status = synonym("job_status") - :param name: the name of the existing mapped property. This can refer to the string name ORM-mapped attribute configured on the class, including column-bound attributes @@ -1933,11 +1938,13 @@ class MyClass(Base): :paramref:`.synonym.descriptor` parameter:: my_table = Table( - "my_table", metadata, - Column('id', Integer, primary_key=True), - Column('job_status', String(50)) + "my_table", + metadata, + Column("id", Integer, primary_key=True), + Column("job_status", String(50)), ) + class MyClass: @property def _job_status_descriptor(self): @@ -1945,11 +1952,15 @@ def _job_status_descriptor(self): mapper( - MyClass, my_table, properties={ + MyClass, + my_table, + properties={ "job_status": synonym( - "_job_status", map_column=True, - descriptor=MyClass._job_status_descriptor) - } + "_job_status", + map_column=True, + descriptor=MyClass._job_status_descriptor, + ) + }, ) Above, the attribute named ``_job_status`` is automatically @@ -2099,8 +2110,7 @@ def backref(name: str, **kwargs: Any) -> ORMBackrefArgument: E.g.:: - 'items':relationship( - SomeItem, backref=backref('parent', lazy='subquery')) + "items": relationship(SomeItem, backref=backref("parent", lazy="subquery")) The :paramref:`_orm.relationship.backref` parameter is generally considered to be legacy; for modern applications, using @@ -2112,7 +2122,7 @@ def backref(name: str, **kwargs: Any) -> ORMBackrefArgument: :ref:`relationships_backref` - background on backrefs - """ + """ # noqa: E501 return (name, kwargs) @@ -2373,17 +2383,21 @@ def aliased( aggregate functions:: class UnitPrice(Base): - __tablename__ = 'unit_price' + __tablename__ = "unit_price" ... unit_id = Column(Integer) price = Column(Numeric) - aggregated_unit_price = Session.query( - func.sum(UnitPrice.price).label('price') - ).group_by(UnitPrice.unit_id).subquery() - aggregated_unit_price = aliased(UnitPrice, - alias=aggregated_unit_price, adapt_on_names=True) + aggregated_unit_price = ( + Session.query(func.sum(UnitPrice.price).label("price")) + .group_by(UnitPrice.unit_id) + .subquery() + ) + + aggregated_unit_price = aliased( + UnitPrice, alias=aggregated_unit_price, adapt_on_names=True + ) Above, functions on ``aggregated_unit_price`` which refer to ``.price`` will return the @@ -2529,16 +2543,21 @@ def join( :meth:`_sql.Select.select_from` method, as in:: from sqlalchemy.orm import join - stmt = select(User).\ - select_from(join(User, Address, User.addresses)).\ - filter(Address.email_address=='foo@bar.com') + + stmt = ( + select(User) + .select_from(join(User, Address, User.addresses)) + .filter(Address.email_address == "foo@bar.com") + ) In modern SQLAlchemy the above join can be written more succinctly as:: - stmt = select(User).\ - join(User.addresses).\ - filter(Address.email_address=='foo@bar.com') + stmt = ( + select(User) + .join(User.addresses) + .filter(Address.email_address == "foo@bar.com") + ) .. warning:: using :func:`_orm.join` directly may not work properly with modern ORM options such as :func:`_orm.with_loader_criteria`. diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 33cca564927..f6114f9db21 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -2663,7 +2663,7 @@ def init_collection(obj: object, key: str) -> CollectionAdapter: This function is used to provide direct access to collection internals for a previously unloaded attribute. e.g.:: - collection_adapter = init_collection(someobject, 'elements') + collection_adapter = init_collection(someobject, "elements") for elem in values: collection_adapter.append_without_event(elem) diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index d713abb0e9d..cb9456f1f3c 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -21,6 +21,8 @@ and return values to events:: from sqlalchemy.orm.collections import collection + + class MyClass: # ... @@ -32,7 +34,6 @@ def store(self, item): def pop(self): return self.data.pop() - The second approach is a bundle of targeted decorators that wrap appropriate append and remove notifiers around the mutation methods present in the standard Python ``list``, ``set`` and ``dict`` interfaces. These could be @@ -73,10 +74,11 @@ class InstrumentedList(list): method that's already instrumented. For example:: class QueueIsh(list): - def push(self, item): - self.append(item) - def shift(self): - return self.pop(0) + def push(self, item): + self.append(item) + + def shift(self): + return self.pop(0) There's no need to decorate these methods. ``append`` and ``pop`` are already instrumented as part of the ``list`` interface. Decorating them would fire @@ -195,9 +197,10 @@ def append(self, append): ... The recipe decorators all require parens, even those that take no arguments:: - @collection.adds('entity') + @collection.adds("entity") def insert(self, position, entity): ... + @collection.removes_return() def popitem(self): ... @@ -217,11 +220,13 @@ def appender(fn): @collection.appender def add(self, append): ... + # or, equivalently @collection.appender @collection.adds(1) def add(self, append): ... + # for mapping type, an 'append' may kick out a previous value # that occupies that slot. consider d['a'] = 'foo'- any previous # value in d['a'] is discarded. @@ -261,10 +266,11 @@ def remover(fn): @collection.remover def zap(self, entity): ... + # or, equivalently @collection.remover @collection.removes_return() - def zap(self, ): ... + def zap(self): ... If the value to remove is not present in the collection, you may raise an exception or return None to ignore the error. @@ -364,7 +370,8 @@ def adds(arg): @collection.adds(1) def push(self, item): ... - @collection.adds('entity') + + @collection.adds("entity") def do_stuff(self, thing, entity=None): ... """ diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 91f9539befc..311a9bd4a51 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -207,7 +207,7 @@ def synonym_for( :paramref:`.orm.synonym.descriptor` parameter:: class MyClass(Base): - __tablename__ = 'my_table' + __tablename__ = "my_table" id = Column(Integer, primary_key=True) _job_status = Column("job_status", String(50)) @@ -373,20 +373,21 @@ def __tablename__(cls) -> str: for subclasses:: class Employee(Base): - __tablename__ = 'employee' + __tablename__ = "employee" id: Mapped[int] = mapped_column(primary_key=True) type: Mapped[str] = mapped_column(String(50)) @declared_attr.directive def __mapper_args__(cls) -> Dict[str, Any]: - if cls.__name__ == 'Employee': + if cls.__name__ == "Employee": return { - "polymorphic_on":cls.type, - "polymorphic_identity":"Employee" + "polymorphic_on": cls.type, + "polymorphic_identity": "Employee", } else: - return {"polymorphic_identity":cls.__name__} + return {"polymorphic_identity": cls.__name__} + class Engineer(Employee): pass @@ -485,6 +486,7 @@ def declarative_mixin(cls: Type[_T]) -> Type[_T]: from sqlalchemy.orm import declared_attr from sqlalchemy.orm import declarative_mixin + @declarative_mixin class MyMixin: @@ -492,10 +494,11 @@ class MyMixin: def __tablename__(cls): return cls.__name__.lower() - __table_args__ = {'mysql_engine': 'InnoDB'} - __mapper_args__= {'always_refresh': True} + __table_args__ = {"mysql_engine": "InnoDB"} + __mapper_args__ = {"always_refresh": True} + + id = Column(Integer, primary_key=True) - id = Column(Integer, primary_key=True) class MyModel(MyMixin, Base): name = Column(String(1000)) @@ -638,10 +641,10 @@ class DeclarativeBase( from sqlalchemy.orm import DeclarativeBase + class Base(DeclarativeBase): pass - The above ``Base`` class is now usable as the base for new declarative mappings. The superclass makes use of the ``__init_subclass__()`` method to set up new classes and metaclasses aren't used. @@ -664,11 +667,12 @@ class Base(DeclarativeBase): bigint = Annotated[int, "bigint"] my_metadata = MetaData() + class Base(DeclarativeBase): metadata = my_metadata type_annotation_map = { str: String().with_variant(String(255), "mysql", "mariadb"), - bigint: BigInteger() + bigint: BigInteger(), } Class-level attributes which may be specified include: @@ -1480,6 +1484,7 @@ def generate_base( Base = mapper_registry.generate_base() + class MyClass(Base): __tablename__ = "my_table" id = Column(Integer, primary_key=True) @@ -1492,6 +1497,7 @@ class MyClass(Base): mapper_registry = registry() + class Base(metaclass=DeclarativeMeta): __abstract__ = True registry = mapper_registry @@ -1657,9 +1663,10 @@ def mapped(self, cls: Type[_O]) -> Type[_O]: mapper_registry = registry() + @mapper_registry.mapped class Foo: - __tablename__ = 'some_table' + __tablename__ = "some_table" id = Column(Integer, primary_key=True) name = Column(String) @@ -1699,15 +1706,17 @@ def as_declarative_base(self, **kw: Any) -> Callable[[Type[_T]], Type[_T]]: mapper_registry = registry() + @mapper_registry.as_declarative_base() class Base: @declared_attr def __tablename__(cls): return cls.__name__.lower() + id = Column(Integer, primary_key=True) - class MyMappedClass(Base): - # ... + + class MyMappedClass(Base): ... All keyword arguments passed to :meth:`_orm.registry.as_declarative_base` are passed @@ -1737,12 +1746,14 @@ def map_declaratively(self, cls: Type[_O]) -> Mapper[_O]: mapper_registry = registry() + class Foo: - __tablename__ = 'some_table' + __tablename__ = "some_table" id = Column(Integer, primary_key=True) name = Column(String) + mapper = mapper_registry.map_declaratively(Foo) This function is more conveniently invoked indirectly via either the @@ -1795,12 +1806,14 @@ def map_imperatively( my_table = Table( "my_table", mapper_registry.metadata, - Column('id', Integer, primary_key=True) + Column("id", Integer, primary_key=True), ) + class MyClass: pass + mapper_registry.map_imperatively(MyClass, my_table) See the section :ref:`orm_imperative_mapping` for complete background @@ -1847,15 +1860,17 @@ def as_declarative(**kw: Any) -> Callable[[Type[_T]], Type[_T]]: from sqlalchemy.orm import as_declarative + @as_declarative() class Base: @declared_attr def __tablename__(cls): return cls.__name__.lower() + id = Column(Integer, primary_key=True) - class MyMappedClass(Base): - # ... + + class MyMappedClass(Base): ... .. seealso:: diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index f2eae852b3c..959ad01043c 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -207,10 +207,12 @@ class InstanceEvents(event.Events[ClassManager[Any]]): from sqlalchemy import event + def my_load_listener(target, context): print("on load!") - event.listen(SomeClass, 'load', my_load_listener) + + event.listen(SomeClass, "load", my_load_listener) Available targets include: @@ -466,8 +468,7 @@ def load(self, target: _O, context: QueryContext) -> None: the existing loading context is maintained for the object after the event is called:: - @event.listens_for( - SomeClass, "load", restore_load_context=True) + @event.listens_for(SomeClass, "load", restore_load_context=True) def on_load(instance, context): instance.some_unloaded_attribute @@ -502,7 +503,7 @@ def on_load(instance, context): :meth:`.SessionEvents.loaded_as_persistent` - """ + """ # noqa: E501 def refresh( self, target: _O, context: QueryContext, attrs: Optional[Iterable[str]] @@ -749,6 +750,7 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]): from sqlalchemy import event + def my_before_insert_listener(mapper, connection, target): # execute a stored procedure upon INSERT, # apply the value to the row to be inserted @@ -756,10 +758,10 @@ def my_before_insert_listener(mapper, connection, target): text("select my_special_function(%d)" % target.special_number) ).scalar() + # associate the listener function with SomeClass, # to execute during the "before_insert" hook - event.listen( - SomeClass, 'before_insert', my_before_insert_listener) + event.listen(SomeClass, "before_insert", my_before_insert_listener) Available targets include: @@ -925,9 +927,10 @@ class overall, or to any un-mapped class which serves as a base Base = declarative_base() + @event.listens_for(Base, "instrument_class", propagate=True) def on_new_class(mapper, cls_): - " ... " + "..." :param mapper: the :class:`_orm.Mapper` which is the target of this event. @@ -1006,13 +1009,16 @@ def before_mapper_configured( DontConfigureBase = declarative_base() + @event.listens_for( DontConfigureBase, - "before_mapper_configured", retval=True, propagate=True) + "before_mapper_configured", + retval=True, + propagate=True, + ) def dont_configure(mapper, cls): return EXT_SKIP - .. seealso:: :meth:`.MapperEvents.before_configured` @@ -1094,9 +1100,9 @@ def before_configured(self) -> None: from sqlalchemy.orm import Mapper + @event.listens_for(Mapper, "before_configured") - def go(): - ... + def go(): ... Contrast this event to :meth:`.MapperEvents.after_configured`, which is invoked after the series of mappers has been configured, @@ -1114,10 +1120,9 @@ def go(): from sqlalchemy.orm import mapper - @event.listens_for(mapper, "before_configured", once=True) - def go(): - ... + @event.listens_for(mapper, "before_configured", once=True) + def go(): ... .. seealso:: @@ -1154,9 +1159,9 @@ def after_configured(self) -> None: from sqlalchemy.orm import Mapper + @event.listens_for(Mapper, "after_configured") - def go(): - # ... + def go(): ... Theoretically this event is called once per application, but is actually called any time new mappers @@ -1168,9 +1173,9 @@ def go(): from sqlalchemy.orm import mapper + @event.listens_for(mapper, "after_configured", once=True) - def go(): - # ... + def go(): ... .. seealso:: @@ -1557,9 +1562,11 @@ class SessionEvents(event.Events[Session]): from sqlalchemy import event from sqlalchemy.orm import sessionmaker + def my_before_commit(session): print("before commit!") + Session = sessionmaker() event.listen(Session, "before_commit", my_before_commit) @@ -1779,7 +1786,7 @@ def after_transaction_create( @event.listens_for(session, "after_transaction_create") def after_transaction_create(session, transaction): if transaction.parent is None: - # work with top-level transaction + ... # work with top-level transaction To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the :attr:`.SessionTransaction.nested` attribute:: @@ -1787,8 +1794,7 @@ def after_transaction_create(session, transaction): @event.listens_for(session, "after_transaction_create") def after_transaction_create(session, transaction): if transaction.nested: - # work with SAVEPOINT transaction - + ... # work with SAVEPOINT transaction .. seealso:: @@ -1820,7 +1826,7 @@ def after_transaction_end( @event.listens_for(session, "after_transaction_create") def after_transaction_end(session, transaction): if transaction.parent is None: - # work with top-level transaction + ... # work with top-level transaction To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the :attr:`.SessionTransaction.nested` attribute:: @@ -1828,8 +1834,7 @@ def after_transaction_end(session, transaction): @event.listens_for(session, "after_transaction_create") def after_transaction_end(session, transaction): if transaction.nested: - # work with SAVEPOINT transaction - + ... # work with SAVEPOINT transaction .. seealso:: @@ -2455,11 +2460,11 @@ class AttributeEvents(event.Events[QueryableAttribute[Any]]): from sqlalchemy import event - @event.listens_for(MyClass.collection, 'append', propagate=True) + + @event.listens_for(MyClass.collection, "append", propagate=True) def my_append_listener(target, value, initiator): print("received append event for target: %s" % target) - Listeners have the option to return a possibly modified version of the value, when the :paramref:`.AttributeEvents.retval` flag is passed to :func:`.event.listen` or :func:`.event.listens_for`, such as below, @@ -2468,11 +2473,12 @@ def my_append_listener(target, value, initiator): def validate_phone(target, value, oldvalue, initiator): "Strip non-numeric characters from a phone number" - return re.sub(r'\D', '', value) + return re.sub(r"\D", "", value) + # setup listener on UserContact.phone attribute, instructing # it to use the return value - listen(UserContact.phone, 'set', validate_phone, retval=True) + listen(UserContact.phone, "set", validate_phone, retval=True) A validation function like the above can also raise an exception such as :exc:`ValueError` to halt the operation. @@ -2482,7 +2488,7 @@ def validate_phone(target, value, oldvalue, initiator): as when using mapper inheritance patterns:: - @event.listens_for(MySuperClass.attr, 'set', propagate=True) + @event.listens_for(MySuperClass.attr, "set", propagate=True) def receive_set(target, value, initiator): print("value set: %s" % target) @@ -2715,10 +2721,12 @@ def bulk_replace( from sqlalchemy.orm.attributes import OP_BULK_REPLACE + @event.listens_for(SomeObject.collection, "bulk_replace") def process_collection(target, values, initiator): values[:] = [_make_value(value) for value in values] + @event.listens_for(SomeObject.collection, "append", retval=True) def process_collection(target, value, initiator): # make sure bulk_replace didn't already do it @@ -2866,16 +2874,18 @@ def init_scalar( SOME_CONSTANT = 3.1415926 + class MyClass(Base): # ... some_attribute = Column(Numeric, default=SOME_CONSTANT) + @event.listens_for( - MyClass.some_attribute, "init_scalar", - retval=True, propagate=True) + MyClass.some_attribute, "init_scalar", retval=True, propagate=True + ) def _init_some_attribute(target, dict_, value): - dict_['some_attribute'] = SOME_CONSTANT + dict_["some_attribute"] = SOME_CONSTANT return SOME_CONSTANT Above, we initialize the attribute ``MyClass.some_attribute`` to the @@ -2911,9 +2921,10 @@ def _init_some_attribute(target, dict_, value): SOME_CONSTANT = 3.1415926 + @event.listens_for( - MyClass.some_attribute, "init_scalar", - retval=True, propagate=True) + MyClass.some_attribute, "init_scalar", retval=True, propagate=True + ) def _init_some_attribute(target, dict_, value): # will also fire off attribute set events target.some_attribute = SOME_CONSTANT @@ -2950,7 +2961,7 @@ def _init_some_attribute(target, dict_, value): :ref:`examples_instrumentation` - see the ``active_column_defaults.py`` example. - """ + """ # noqa: E501 def init_collection( self, @@ -3088,8 +3099,8 @@ def before_compile(self, query: Query[Any]) -> None: @event.listens_for(Query, "before_compile", retval=True) def no_deleted(query): for desc in query.column_descriptions: - if desc['type'] is User: - entity = desc['entity'] + if desc["type"] is User: + entity = desc["entity"] query = query.filter(entity.deleted == False) return query @@ -3105,12 +3116,11 @@ def no_deleted(query): re-establish the query being cached, apply the event adding the ``bake_ok`` flag:: - @event.listens_for( - Query, "before_compile", retval=True, bake_ok=True) + @event.listens_for(Query, "before_compile", retval=True, bake_ok=True) def my_event(query): for desc in query.column_descriptions: - if desc['type'] is User: - entity = desc['entity'] + if desc["type"] is User: + entity = desc["entity"] query = query.filter(entity.deleted == False) return query @@ -3131,7 +3141,7 @@ def my_event(query): :ref:`baked_with_before_compile` - """ + """ # noqa: E501 def before_compile_update( self, query: Query[Any], update_context: BulkUpdate @@ -3151,12 +3161,12 @@ def before_compile_update( @event.listens_for(Query, "before_compile_update", retval=True) def no_deleted(query, update_context): for desc in query.column_descriptions: - if desc['type'] is User: - entity = desc['entity'] + if desc["type"] is User: + entity = desc["entity"] query = query.filter(entity.deleted == False) - update_context.values['timestamp'] = ( - datetime.datetime.now(datetime.UTC) + update_context.values["timestamp"] = datetime.datetime.now( + datetime.UTC ) return query @@ -3185,7 +3195,7 @@ def no_deleted(query, update_context): :meth:`.QueryEvents.before_compile_delete` - """ + """ # noqa: E501 def before_compile_delete( self, query: Query[Any], delete_context: BulkDelete @@ -3204,8 +3214,8 @@ def before_compile_delete( @event.listens_for(Query, "before_compile_delete", retval=True) def no_deleted(query, delete_context): for desc in query.column_descriptions: - if desc['type'] is User: - entity = desc['entity'] + if desc["type"] is User: + entity = desc["entity"] query = query.filter(entity.deleted == False) return query diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 094053fa040..2b0db34fc11 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -685,27 +685,37 @@ class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators): # definition of custom PropComparator subclasses - from sqlalchemy.orm.properties import \ - ColumnProperty,\ - Composite,\ - Relationship + from sqlalchemy.orm.properties import ( + ColumnProperty, + Composite, + Relationship, + ) + class MyColumnComparator(ColumnProperty.Comparator): def __eq__(self, other): return self.__clause_element__() == other + class MyRelationshipComparator(Relationship.Comparator): def any(self, expression): "define the 'any' operation" # ... + class MyCompositeComparator(Composite.Comparator): def __gt__(self, other): "redefine the 'greater than' operation" - return sql.and_(*[a>b for a, b in - zip(self.__clause_element__().clauses, - other.__composite_values__())]) + return sql.and_( + *[ + a > b + for a, b in zip( + self.__clause_element__().clauses, + other.__composite_values__(), + ) + ] + ) # application of custom PropComparator subclasses @@ -713,17 +723,22 @@ def __gt__(self, other): from sqlalchemy.orm import column_property, relationship, composite from sqlalchemy import Column, String + class SomeMappedClass(Base): - some_column = column_property(Column("some_column", String), - comparator_factory=MyColumnComparator) + some_column = column_property( + Column("some_column", String), + comparator_factory=MyColumnComparator, + ) - some_relationship = relationship(SomeOtherClass, - comparator_factory=MyRelationshipComparator) + some_relationship = relationship( + SomeOtherClass, comparator_factory=MyRelationshipComparator + ) some_composite = composite( - Column("a", String), Column("b", String), - comparator_factory=MyCompositeComparator - ) + Column("a", String), + Column("b", String), + comparator_factory=MyCompositeComparator, + ) Note that for column-level operator redefinition, it's usually simpler to define the operators at the Core level, using the @@ -865,8 +880,9 @@ def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T_co]: e.g.:: - query.join(Company.employees.of_type(Engineer)).\ - filter(Engineer.name=='foo') + query.join(Company.employees.of_type(Engineer)).filter( + Engineer.name == "foo" + ) :param \class_: a class or mapper indicating that criterion will be against this specific subclass. @@ -892,11 +908,11 @@ def and_( stmt = select(User).join( - User.addresses.and_(Address.email_address != 'foo') + User.addresses.and_(Address.email_address != "foo") ) stmt = select(User).options( - joinedload(User.addresses.and_(Address.email_address != 'foo')) + joinedload(User.addresses.and_(Address.email_address != "foo")) ) .. versionadded:: 1.4 diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 29659e6e8e2..579e053b28b 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -329,7 +329,7 @@ class will overwrite all data within object instances that already class User(Base): __table__ = user_table - __mapper_args__ = {'column_prefix':'_'} + __mapper_args__ = {"column_prefix": "_"} The above mapping will assign the ``user_id``, ``user_name``, and ``password`` columns to attributes named ``_user_id``, @@ -545,14 +545,14 @@ class User(Base): base-most mapped :class:`.Table`:: class Employee(Base): - __tablename__ = 'employee' + __tablename__ = "employee" id: Mapped[int] = mapped_column(primary_key=True) discriminator: Mapped[str] = mapped_column(String(50)) __mapper_args__ = { - "polymorphic_on":discriminator, - "polymorphic_identity":"employee" + "polymorphic_on": discriminator, + "polymorphic_identity": "employee", } It may also be specified @@ -561,17 +561,18 @@ class Employee(Base): approach:: class Employee(Base): - __tablename__ = 'employee' + __tablename__ = "employee" id: Mapped[int] = mapped_column(primary_key=True) discriminator: Mapped[str] = mapped_column(String(50)) __mapper_args__ = { - "polymorphic_on":case( + "polymorphic_on": case( (discriminator == "EN", "engineer"), (discriminator == "MA", "manager"), - else_="employee"), - "polymorphic_identity":"employee" + else_="employee", + ), + "polymorphic_identity": "employee", } It may also refer to any attribute using its string name, @@ -579,14 +580,14 @@ class Employee(Base): configurations:: class Employee(Base): - __tablename__ = 'employee' + __tablename__ = "employee" id: Mapped[int] = mapped_column(primary_key=True) discriminator: Mapped[str] __mapper_args__ = { "polymorphic_on": "discriminator", - "polymorphic_identity": "employee" + "polymorphic_identity": "employee", } When setting ``polymorphic_on`` to reference an @@ -603,6 +604,7 @@ class Employee(Base): from sqlalchemy import event from sqlalchemy.orm import object_mapper + @event.listens_for(Employee, "init", propagate=True) def set_identity(instance, *arg, **kw): mapper = object_mapper(instance) @@ -3259,14 +3261,9 @@ def _equivalent_columns(self) -> _EquivalentColumnMap: The resulting structure is a dictionary of columns mapped to lists of equivalent columns, e.g.:: - { - tablea.col1: - {tableb.col1, tablec.col1}, - tablea.col2: - {tabled.col2} - } + {tablea.col1: {tableb.col1, tablec.col1}, tablea.col2: {tabled.col2}} - """ + """ # noqa: E501 result: _EquivalentColumnMap = {} def visit_binary(binary): @@ -3739,14 +3736,15 @@ def _would_selectin_load_only_from_given_mapper(self, super_mapper): given:: - class A: - ... + class A: ... + class B(A): __mapper_args__ = {"polymorphic_load": "selectin"} - class C(B): - ... + + class C(B): ... + class D(B): __mapper_args__ = {"polymorphic_load": "selectin"} diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 5c49222be15..b6fb3d43e31 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -280,8 +280,8 @@ class File(Base): name = Column(String(64)) extension = Column(String(8)) - filename = column_property(name + '.' + extension) - path = column_property('C:/' + filename.expression) + filename = column_property(name + "." + extension) + path = column_property("C:/" + filename.expression) .. seealso:: diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 5c1a45b5ffa..59cf2f54907 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -672,41 +672,38 @@ def cte( from sqlalchemy.orm import aliased + class Part(Base): - __tablename__ = 'part' + __tablename__ = "part" part = Column(String, primary_key=True) sub_part = Column(String, primary_key=True) quantity = Column(Integer) - included_parts = session.query( - Part.sub_part, - Part.part, - Part.quantity).\ - filter(Part.part=="our part").\ - cte(name="included_parts", recursive=True) + + included_parts = ( + session.query(Part.sub_part, Part.part, Part.quantity) + .filter(Part.part == "our part") + .cte(name="included_parts", recursive=True) + ) incl_alias = aliased(included_parts, name="pr") parts_alias = aliased(Part, name="p") included_parts = included_parts.union_all( session.query( - parts_alias.sub_part, - parts_alias.part, - parts_alias.quantity).\ - filter(parts_alias.part==incl_alias.c.sub_part) - ) + parts_alias.sub_part, parts_alias.part, parts_alias.quantity + ).filter(parts_alias.part == incl_alias.c.sub_part) + ) q = session.query( - included_parts.c.sub_part, - func.sum(included_parts.c.quantity). - label('total_quantity') - ).\ - group_by(included_parts.c.sub_part) + included_parts.c.sub_part, + func.sum(included_parts.c.quantity).label("total_quantity"), + ).group_by(included_parts.c.sub_part) .. seealso:: :meth:`_sql.Select.cte` - v2 equivalent method. - """ + """ # noqa: E501 return ( self.enable_eagerloads(False) ._get_select_statement_only() @@ -941,9 +938,7 @@ def set_label_style(self, style: SelectLabelStyle) -> Self: :attr:`_query.Query.statement` using :meth:`.Session.execute`:: result = session.execute( - query - .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) - .statement + query.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).statement ) .. versionadded:: 1.4 @@ -1052,8 +1047,7 @@ def get(self, ident: _PKIdentityArgument) -> Optional[Any]: some_object = session.query(VersionedFoo).get((5, 10)) - some_object = session.query(VersionedFoo).get( - {"id": 5, "version_id": 10}) + some_object = session.query(VersionedFoo).get({"id": 5, "version_id": 10}) :meth:`_query.Query.get` is special in that it provides direct access to the identity map of the owning :class:`.Session`. @@ -1119,7 +1113,7 @@ def get(self, ident: _PKIdentityArgument) -> Optional[Any]: :return: The object instance, or ``None``. - """ + """ # noqa: E501 self._no_criterion_assertion("get", order_by=False, distinct=False) # we still implement _get_impl() so that baked query can override @@ -1563,19 +1557,22 @@ def with_entities( # Users, filtered on some arbitrary criterion # and then ordered by related email address - q = session.query(User).\ - join(User.address).\ - filter(User.name.like('%ed%')).\ - order_by(Address.email) + q = ( + session.query(User) + .join(User.address) + .filter(User.name.like("%ed%")) + .order_by(Address.email) + ) # given *only* User.id==5, Address.email, and 'q', what # would the *next* User in the result be ? - subq = q.with_entities(Address.email).\ - order_by(None).\ - filter(User.id==5).\ - subquery() - q = q.join((subq, subq.c.email < Address.email)).\ - limit(1) + subq = ( + q.with_entities(Address.email) + .order_by(None) + .filter(User.id == 5) + .subquery() + ) + q = q.join((subq, subq.c.email < Address.email)).limit(1) .. seealso:: @@ -1671,9 +1668,11 @@ def with_transformation( def filter_something(criterion): def transform(q): return q.filter(criterion) + return transform - q = q.with_transformation(filter_something(x==5)) + + q = q.with_transformation(filter_something(x == 5)) This allows ad-hoc recipes to be created for :class:`_query.Query` objects. @@ -1790,9 +1789,15 @@ def with_for_update( E.g.:: - q = sess.query(User).populate_existing().with_for_update(nowait=True, of=User) + q = ( + sess.query(User) + .populate_existing() + .with_for_update(nowait=True, of=User) + ) + + The above query on a PostgreSQL backend will render like: - The above query on a PostgreSQL backend will render like:: + .. sourcecode:: sql SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT @@ -1870,14 +1875,13 @@ def filter(self, *criterion: _ColumnExpressionArgument[bool]) -> Self: e.g.:: - session.query(MyClass).filter(MyClass.name == 'some name') + session.query(MyClass).filter(MyClass.name == "some name") Multiple criteria may be specified as comma separated; the effect is that they will be joined together using the :func:`.and_` function:: - session.query(MyClass).\ - filter(MyClass.name == 'some name', MyClass.id > 5) + session.query(MyClass).filter(MyClass.name == "some name", MyClass.id > 5) The criterion is any SQL expression object applicable to the WHERE clause of a select. String expressions are coerced @@ -1890,7 +1894,7 @@ def filter(self, *criterion: _ColumnExpressionArgument[bool]) -> Self: :meth:`_sql.Select.where` - v2 equivalent method. - """ + """ # noqa: E501 for crit in list(criterion): crit = coercions.expect( roles.WhereHavingRole, crit, apply_propagate_attrs=self @@ -1958,14 +1962,13 @@ def filter_by(self, **kwargs: Any) -> Self: e.g.:: - session.query(MyClass).filter_by(name = 'some name') + session.query(MyClass).filter_by(name="some name") Multiple criteria may be specified as comma separated; the effect is that they will be joined together using the :func:`.and_` function:: - session.query(MyClass).\ - filter_by(name = 'some name', id = 5) + session.query(MyClass).filter_by(name="some name", id=5) The keyword expressions are extracted from the primary entity of the query, or the last entity that was the @@ -2092,10 +2095,12 @@ def having(self, *having: _ColumnExpressionArgument[bool]) -> Self: HAVING criterion makes it possible to use filters on aggregate functions like COUNT, SUM, AVG, MAX, and MIN, eg.:: - q = session.query(User.id).\ - join(User.addresses).\ - group_by(User.id).\ - having(func.count(Address.id) > 2) + q = ( + session.query(User.id) + .join(User.addresses) + .group_by(User.id) + .having(func.count(Address.id) > 2) + ) .. seealso:: @@ -2119,8 +2124,8 @@ def union(self, *q: Query[Any]) -> Self: e.g.:: - q1 = sess.query(SomeClass).filter(SomeClass.foo=='bar') - q2 = sess.query(SomeClass).filter(SomeClass.bar=='foo') + q1 = sess.query(SomeClass).filter(SomeClass.foo == "bar") + q2 = sess.query(SomeClass).filter(SomeClass.bar == "foo") q3 = q1.union(q2) @@ -2129,7 +2134,9 @@ def union(self, *q: Query[Any]) -> Self: x.union(y).union(z).all() - will nest on each ``union()``, and produces:: + will nest on each ``union()``, and produces: + + .. sourcecode:: sql SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y) UNION SELECT * FROM Z) @@ -2138,7 +2145,9 @@ def union(self, *q: Query[Any]) -> Self: x.union(y, z).all() - produces:: + produces: + + .. sourcecode:: sql SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION SELECT * FROM Z) @@ -2250,7 +2259,9 @@ def join( q = session.query(User).join(User.addresses) Where above, the call to :meth:`_query.Query.join` along - ``User.addresses`` will result in SQL approximately equivalent to:: + ``User.addresses`` will result in SQL approximately equivalent to: + + .. sourcecode:: sql SELECT user.id, user.name FROM user JOIN address ON user.id = address.user_id @@ -2263,10 +2274,12 @@ def join( calls may be used. The relationship-bound attribute implies both the left and right side of the join at once:: - q = session.query(User).\ - join(User.orders).\ - join(Order.items).\ - join(Item.keywords) + q = ( + session.query(User) + .join(User.orders) + .join(Order.items) + .join(Item.keywords) + ) .. note:: as seen in the above example, **the order in which each call to the join() method occurs is important**. Query would not, @@ -2305,7 +2318,7 @@ def join( as the ON clause to be passed explicitly. A example that includes a SQL expression as the ON clause is as follows:: - q = session.query(User).join(Address, User.id==Address.user_id) + q = session.query(User).join(Address, User.id == Address.user_id) The above form may also use a relationship-bound attribute as the ON clause as well:: @@ -2320,11 +2333,13 @@ def join( a1 = aliased(Address) a2 = aliased(Address) - q = session.query(User).\ - join(a1, User.addresses).\ - join(a2, User.addresses).\ - filter(a1.email_address=='ed@foo.com').\ - filter(a2.email_address=='ed@bar.com') + q = ( + session.query(User) + .join(a1, User.addresses) + .join(a2, User.addresses) + .filter(a1.email_address == "ed@foo.com") + .filter(a2.email_address == "ed@bar.com") + ) The relationship-bound calling form can also specify a target entity using the :meth:`_orm.PropComparator.of_type` method; a query @@ -2333,11 +2348,13 @@ def join( a1 = aliased(Address) a2 = aliased(Address) - q = session.query(User).\ - join(User.addresses.of_type(a1)).\ - join(User.addresses.of_type(a2)).\ - filter(a1.email_address == 'ed@foo.com').\ - filter(a2.email_address == 'ed@bar.com') + q = ( + session.query(User) + .join(User.addresses.of_type(a1)) + .join(User.addresses.of_type(a2)) + .filter(a1.email_address == "ed@foo.com") + .filter(a2.email_address == "ed@bar.com") + ) **Augmenting Built-in ON Clauses** @@ -2348,7 +2365,7 @@ def join( with the default criteria using AND:: q = session.query(User).join( - User.addresses.and_(Address.email_address != 'foo@bar.com') + User.addresses.and_(Address.email_address != "foo@bar.com") ) .. versionadded:: 1.4 @@ -2361,29 +2378,28 @@ def join( appropriate ``.subquery()`` method in order to make a subquery out of a query:: - subq = session.query(Address).\ - filter(Address.email_address == 'ed@foo.com').\ - subquery() + subq = ( + session.query(Address) + .filter(Address.email_address == "ed@foo.com") + .subquery() + ) - q = session.query(User).join( - subq, User.id == subq.c.user_id - ) + q = session.query(User).join(subq, User.id == subq.c.user_id) Joining to a subquery in terms of a specific relationship and/or target entity may be achieved by linking the subquery to the entity using :func:`_orm.aliased`:: - subq = session.query(Address).\ - filter(Address.email_address == 'ed@foo.com').\ - subquery() + subq = ( + session.query(Address) + .filter(Address.email_address == "ed@foo.com") + .subquery() + ) address_subq = aliased(Address, subq) - q = session.query(User).join( - User.addresses.of_type(address_subq) - ) - + q = session.query(User).join(User.addresses.of_type(address_subq)) **Controlling what to Join From** @@ -2391,11 +2407,16 @@ def join( :class:`_query.Query` is not in line with what we want to join from, the :meth:`_query.Query.select_from` method may be used:: - q = session.query(Address).select_from(User).\ - join(User.addresses).\ - filter(User.name == 'ed') + q = ( + session.query(Address) + .select_from(User) + .join(User.addresses) + .filter(User.name == "ed") + ) + + Which will produce SQL similar to: - Which will produce SQL similar to:: + .. sourcecode:: sql SELECT address.* FROM user JOIN address ON user.id=address.user_id @@ -2499,11 +2520,16 @@ def select_from(self, *from_obj: _FromClauseArgument) -> Self: A typical example:: - q = session.query(Address).select_from(User).\ - join(User.addresses).\ - filter(User.name == 'ed') + q = ( + session.query(Address) + .select_from(User) + .join(User.addresses) + .filter(User.name == "ed") + ) - Which produces SQL equivalent to:: + Which produces SQL equivalent to: + + .. sourcecode:: sql SELECT address.* FROM user JOIN address ON user.id=address.user_id @@ -2866,7 +2892,7 @@ def column_descriptions(self) -> List[ORMColumnDescription]: Format is a list of dictionaries:: - user_alias = aliased(User, name='user2') + user_alias = aliased(User, name="user2") q = sess.query(User, User.id, user_alias) # this expression: @@ -2875,26 +2901,26 @@ def column_descriptions(self) -> List[ORMColumnDescription]: # would return: [ { - 'name':'User', - 'type':User, - 'aliased':False, - 'expr':User, - 'entity': User + "name": "User", + "type": User, + "aliased": False, + "expr": User, + "entity": User, }, { - 'name':'id', - 'type':Integer(), - 'aliased':False, - 'expr':User.id, - 'entity': User + "name": "id", + "type": Integer(), + "aliased": False, + "expr": User.id, + "entity": User, }, { - 'name':'user2', - 'type':User, - 'aliased':True, - 'expr':user_alias, - 'entity': user_alias - } + "name": "user2", + "type": User, + "aliased": True, + "expr": user_alias, + "entity": user_alias, + }, ] .. seealso:: @@ -3003,10 +3029,12 @@ def exists(self) -> Exists: e.g.:: - q = session.query(User).filter(User.name == 'fred') + q = session.query(User).filter(User.name == "fred") session.query(q.exists()) - Producing SQL similar to:: + Producing SQL similar to: + + .. sourcecode:: sql SELECT EXISTS ( SELECT 1 FROM users WHERE users.name = :name_1 @@ -3055,7 +3083,9 @@ def count(self) -> int: r"""Return a count of rows this the SQL formed by this :class:`Query` would return. - This generates the SQL for this Query as follows:: + This generates the SQL for this Query as follows: + + .. sourcecode:: sql SELECT count(1) AS count_1 FROM ( SELECT @@ -3095,8 +3125,7 @@ def count(self) -> int: # return count of user "id" grouped # by "name" - session.query(func.count(User.id)).\ - group_by(User.name) + session.query(func.count(User.id)).group_by(User.name) from sqlalchemy import distinct @@ -3122,11 +3151,11 @@ def delete( E.g.:: - sess.query(User).filter(User.age == 25).\ - delete(synchronize_session=False) + sess.query(User).filter(User.age == 25).delete(synchronize_session=False) - sess.query(User).filter(User.age == 25).\ - delete(synchronize_session='evaluate') + sess.query(User).filter(User.age == 25).delete( + synchronize_session="evaluate" + ) .. warning:: @@ -3146,7 +3175,7 @@ def delete( :ref:`orm_expression_update_delete` - """ + """ # noqa: E501 bulk_del = BulkDelete(self) if self.dispatch.before_compile_delete: @@ -3184,11 +3213,13 @@ def update( E.g.:: - sess.query(User).filter(User.age == 25).\ - update({User.age: User.age - 10}, synchronize_session=False) + sess.query(User).filter(User.age == 25).update( + {User.age: User.age - 10}, synchronize_session=False + ) - sess.query(User).filter(User.age == 25).\ - update({"age": User.age - 10}, synchronize_session='evaluate') + sess.query(User).filter(User.age == 25).update( + {"age": User.age - 10}, synchronize_session="evaluate" + ) .. warning:: diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index b5e33ffdbb9..3cdaec81110 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -709,12 +709,16 @@ def in_(self, other: Any) -> NoReturn: def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 """Implement the ``==`` operator. - In a many-to-one context, such as:: + In a many-to-one context, such as: + + .. sourcecode:: text MyClass.some_prop == this will typically produce a - clause such as:: + clause such as: + + .. sourcecode:: text mytable.related_id == @@ -877,11 +881,12 @@ def any( An expression like:: session.query(MyClass).filter( - MyClass.somereference.any(SomeRelated.x==2) + MyClass.somereference.any(SomeRelated.x == 2) ) + Will produce a query like: - Will produce a query like:: + .. sourcecode:: sql SELECT * FROM my_table WHERE EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id @@ -895,11 +900,11 @@ def any( :meth:`~.Relationship.Comparator.any` is particularly useful for testing for empty collections:: - session.query(MyClass).filter( - ~MyClass.somereference.any() - ) + session.query(MyClass).filter(~MyClass.somereference.any()) + + will produce: - will produce:: + .. sourcecode:: sql SELECT * FROM my_table WHERE NOT (EXISTS (SELECT 1 FROM related WHERE @@ -930,11 +935,12 @@ def has( An expression like:: session.query(MyClass).filter( - MyClass.somereference.has(SomeRelated.x==2) + MyClass.somereference.has(SomeRelated.x == 2) ) + Will produce a query like: - Will produce a query like:: + .. sourcecode:: sql SELECT * FROM my_table WHERE EXISTS (SELECT 1 FROM related WHERE @@ -973,7 +979,9 @@ def contains( MyClass.contains(other) - Produces a clause like:: + Produces a clause like: + + .. sourcecode:: sql mytable.id == @@ -993,7 +1001,9 @@ def contains( query(MyClass).filter(MyClass.contains(other)) - Produces a query like:: + Produces a query like: + + .. sourcecode:: sql SELECT * FROM my_table, my_association_table AS my_association_table_1 WHERE @@ -1089,11 +1099,15 @@ def adapt(col: _CE) -> _CE: def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501 """Implement the ``!=`` operator. - In a many-to-one context, such as:: + In a many-to-one context, such as: + + .. sourcecode:: text MyClass.some_prop != - This will typically produce a clause such as:: + This will typically produce a clause such as: + + .. sourcecode:: sql mytable.related_id != diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index 283f4a0221f..88a1aad1e5a 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -280,11 +280,13 @@ def query_property( Session = scoped_session(sessionmaker()) + class MyClass: query: QueryPropertyDescriptor = Session.query_property() + # after mappers are defined - result = MyClass.query.filter(MyClass.name=='foo').all() + result = MyClass.query.filter(MyClass.name == "foo").all() Produces instances of the session's configured query class by default. To override and use a custom implementation, provide @@ -730,9 +732,8 @@ def execute( E.g.:: from sqlalchemy import select - result = session.execute( - select(User).where(User.id == 5) - ) + + result = session.execute(select(User).where(User.id == 5)) The API contract of :meth:`_orm.Session.execute` is similar to that of :meth:`_engine.Connection.execute`, the :term:`2.0 style` version @@ -962,10 +963,7 @@ def get( some_object = session.get(VersionedFoo, (5, 10)) - some_object = session.get( - VersionedFoo, - {"id": 5, "version_id": 10} - ) + some_object = session.get(VersionedFoo, {"id": 5, "version_id": 10}) .. versionadded:: 1.4 Added :meth:`_orm.Session.get`, which is moved from the now legacy :meth:`_orm.Query.get` method. diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index f18299b3eaf..f8ce45100ed 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -1569,12 +1569,16 @@ def __init__( operation. The complete heuristics for resolution are described at :meth:`.Session.get_bind`. Usage looks like:: - Session = sessionmaker(binds={ - SomeMappedClass: create_engine('postgresql+psycopg2://engine1'), - SomeDeclarativeBase: create_engine('postgresql+psycopg2://engine2'), - some_mapper: create_engine('postgresql+psycopg2://engine3'), - some_table: create_engine('postgresql+psycopg2://engine4'), - }) + Session = sessionmaker( + binds={ + SomeMappedClass: create_engine("postgresql+psycopg2://engine1"), + SomeDeclarativeBase: create_engine( + "postgresql+psycopg2://engine2" + ), + some_mapper: create_engine("postgresql+psycopg2://engine3"), + some_table: create_engine("postgresql+psycopg2://engine4"), + } + ) .. seealso:: @@ -2316,9 +2320,8 @@ def execute( E.g.:: from sqlalchemy import select - result = session.execute( - select(User).where(User.id == 5) - ) + + result = session.execute(select(User).where(User.id == 5)) The API contract of :meth:`_orm.Session.execute` is similar to that of :meth:`_engine.Connection.execute`, the :term:`2.0 style` version @@ -2970,7 +2973,7 @@ def _identity_lookup( e.g.:: - obj = session._identity_lookup(inspect(SomeClass), (1, )) + obj = session._identity_lookup(inspect(SomeClass), (1,)) :param mapper: mapper in use :param primary_key_identity: the primary key we are searching for, as @@ -3598,10 +3601,7 @@ def get( some_object = session.get(VersionedFoo, (5, 10)) - some_object = session.get( - VersionedFoo, - {"id": 5, "version_id": 10} - ) + some_object = session.get(VersionedFoo, {"id": 5, "version_id": 10}) .. versionadded:: 1.4 Added :meth:`_orm.Session.get`, which is moved from the now legacy :meth:`_orm.Query.get` method. @@ -3690,7 +3690,7 @@ def get( :return: The object instance, or ``None``. - """ + """ # noqa: E501 return self._get_impl( entity, ident, @@ -4943,7 +4943,7 @@ class sessionmaker(_SessionClassMethods, Generic[_S]): # an Engine, which the Session will use for connection # resources - engine = create_engine('postgresql+psycopg2://scott:tiger@localhost/') + engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/") Session = sessionmaker(engine) @@ -4996,7 +4996,7 @@ class sessionmaker(_SessionClassMethods, Generic[_S]): with engine.connect() as connection: with Session(bind=connection) as session: - # work with session + ... # work with session The class also includes a method :meth:`_orm.sessionmaker.configure`, which can be used to specify additional keyword arguments to the factory, which @@ -5011,7 +5011,7 @@ class sessionmaker(_SessionClassMethods, Generic[_S]): # ... later, when an engine URL is read from a configuration # file or other events allow the engine to be created - engine = create_engine('sqlite:///foo.db') + engine = create_engine("sqlite:///foo.db") Session.configure(bind=engine) sess = Session() @@ -5149,7 +5149,7 @@ def configure(self, **new_kw: Any) -> None: Session = sessionmaker() - Session.configure(bind=create_engine('sqlite://')) + Session.configure(bind=create_engine("sqlite://")) """ self.kw.update(new_kw) diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index b4bfea14726..af42f7c9923 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -109,9 +109,7 @@ def contains_eager( The option is used in conjunction with an explicit join that loads the desired rows, i.e.:: - sess.query(Order).join(Order.user).options( - contains_eager(Order.user) - ) + sess.query(Order).join(Order.user).options(contains_eager(Order.user)) The above query would join from the ``Order`` entity to its related ``User`` entity, and the returned ``Order`` objects would have the @@ -257,15 +255,11 @@ def joinedload( select(User).options(joinedload(User.orders)) # joined-load Order.items and then Item.keywords - select(Order).options( - joinedload(Order.items).joinedload(Item.keywords) - ) + select(Order).options(joinedload(Order.items).joinedload(Item.keywords)) # lazily load Order.items, but when Items are loaded, # joined-load the keywords collection - select(Order).options( - lazyload(Order.items).joinedload(Item.keywords) - ) + select(Order).options(lazyload(Order.items).joinedload(Item.keywords)) :param innerjoin: if ``True``, indicates that the joined eager load should use an inner join instead of the default of left outer join:: @@ -276,9 +270,7 @@ def joinedload( OUTER and others INNER, right-nested joins are used to link them:: select(A).options( - joinedload(A.bs, innerjoin=False).joinedload( - B.cs, innerjoin=True - ) + joinedload(A.bs, innerjoin=False).joinedload(B.cs, innerjoin=True) ) The above query, linking A.bs via "outer" join and B.cs via "inner" @@ -293,10 +285,7 @@ def joinedload( will render as LEFT OUTER JOIN. For example, supposing ``A.bs`` is an outerjoin:: - select(A).options( - joinedload(A.bs).joinedload(B.cs, innerjoin="unnested") - ) - + select(A).options(joinedload(A.bs).joinedload(B.cs, innerjoin="unnested")) The above join will render as "a LEFT OUTER JOIN b LEFT OUTER JOIN c", rather than as "a LEFT OUTER JOIN (b JOIN c)". @@ -326,7 +315,7 @@ def joinedload( :ref:`joined_eager_loading` - """ + """ # noqa: E501 loader = self._set_relationship_strategy( attr, {"lazy": "joined"}, @@ -357,10 +346,7 @@ def subqueryload(self, attr: _AttrType) -> Self: # lazily load Order.items, but when Items are loaded, # subquery-load the keywords collection - select(Order).options( - lazyload(Order.items).subqueryload(Item.keywords) - ) - + select(Order).options(lazyload(Order.items).subqueryload(Item.keywords)) .. seealso:: @@ -394,9 +380,7 @@ def selectinload( # lazily load Order.items, but when Items are loaded, # selectin-load the keywords collection - select(Order).options( - lazyload(Order.items).selectinload(Item.keywords) - ) + select(Order).options(lazyload(Order.items).selectinload(Item.keywords)) :param recursion_depth: optional int; when set to a positive integer in conjunction with a self-referential relationship, @@ -609,8 +593,7 @@ def defer(self, key: _AttrType, raiseload: bool = False) -> Self: from sqlalchemy.orm import defer session.query(MyClass).options( - defer(MyClass.attribute_one), - defer(MyClass.attribute_two) + defer(MyClass.attribute_one), defer(MyClass.attribute_two) ) To specify a deferred load of an attribute on a related class, @@ -630,7 +613,7 @@ def defer(self, key: _AttrType, raiseload: bool = False) -> Self: defaultload(MyClass.someattr).options( defer(RelatedClass.some_column), defer(RelatedClass.some_other_column), - defer(RelatedClass.another_column) + defer(RelatedClass.another_column), ) ) @@ -676,14 +659,10 @@ def undefer(self, key: _AttrType) -> Self: ) # undefer all columns specific to a single class using Load + * - session.query(MyClass, MyOtherClass).options( - Load(MyClass).undefer("*") - ) + session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*")) # undefer a column on a related object - select(MyClass).options( - defaultload(MyClass.items).undefer(MyClass.text) - ) + select(MyClass).options(defaultload(MyClass.items).undefer(MyClass.text)) :param key: Attribute to be undeferred. @@ -696,7 +675,7 @@ def undefer(self, key: _AttrType) -> Self: :func:`_orm.undefer_group` - """ + """ # noqa: E501 return self._set_column_strategy( (key,), {"deferred": False, "instrument": True} ) @@ -1218,13 +1197,11 @@ def options(self, *opts: _AbstractLoad) -> Self: query = session.query(Author) query = query.options( - joinedload(Author.book).options( - load_only(Book.summary, Book.excerpt), - joinedload(Book.citations).options( - joinedload(Citation.author) - ) - ) - ) + joinedload(Author.book).options( + load_only(Book.summary, Book.excerpt), + joinedload(Book.citations).options(joinedload(Citation.author)), + ) + ) :param \*opts: A series of loader option objects (ultimately :class:`_orm.Load` objects) which should be applied to the path @@ -1668,13 +1645,17 @@ def _adjust_effective_path_for_current_path( loads, and adjusts the given path to be relative to the current_path. - E.g. given a loader path and current path:: + E.g. given a loader path and current path: + + .. sourcecode:: text lp: User -> orders -> Order -> items -> Item -> keywords -> Keyword cp: User -> orders -> Order -> items - The adjusted path would be:: + The adjusted path would be: + + .. sourcecode:: text Item -> keywords -> Keyword @@ -2155,11 +2136,11 @@ class _TokenStrategyLoad(_LoadElement): e.g.:: - raiseload('*') - Load(User).lazyload('*') - defer('*') + raiseload("*") + Load(User).lazyload("*") + defer("*") load_only(User.name, User.email) # will create a defer('*') - joinedload(User.addresses).raiseload('*') + joinedload(User.addresses).raiseload("*") """ diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 69556751a76..dbfa6d5f1b8 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -475,9 +475,7 @@ def identity_key( E.g.:: - >>> row = engine.execute(\ - text("select * from table where a=1 and b=2")\ - ).first() + >>> row = engine.execute(text("select * from table where a=1 and b=2")).first() >>> identity_key(MyClass, row=row) (, (1, 2), None) @@ -488,7 +486,7 @@ def identity_key( .. versionadded:: 1.2 added identity_token - """ + """ # noqa: E501 if class_ is not None: mapper = class_mapper(class_) if row is None: @@ -666,9 +664,9 @@ class AliasedClass( # find all pairs of users with the same name user_alias = aliased(User) - session.query(User, user_alias).\ - join((user_alias, User.id > user_alias.id)).\ - filter(User.name == user_alias.name) + session.query(User, user_alias).join( + (user_alias, User.id > user_alias.id) + ).filter(User.name == user_alias.name) :class:`.AliasedClass` is also capable of mapping an existing mapped class to an entirely new selectable, provided this selectable is column- @@ -692,6 +690,7 @@ class to an entirely new selectable, provided this selectable is column- using :func:`_sa.inspect`:: from sqlalchemy import inspect + my_alias = aliased(MyClass) insp = inspect(my_alias) @@ -1601,8 +1600,7 @@ def __init__( bn = Bundle("mybundle", MyClass.x, MyClass.y) - for row in session.query(bn).filter( - bn.c.x == 5).filter(bn.c.y == 4): + for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4): print(row.mybundle.x, row.mybundle.y) :param name: name of the bundle. @@ -1611,7 +1609,7 @@ def __init__( can be returned as a "single entity" outside of any enclosing tuple in the same manner as a mapped entity. - """ + """ # noqa: E501 self.name = self._label = name coerced_exprs = [ coercions.expect( @@ -1666,19 +1664,19 @@ def entity_namespace( Nesting of bundles is also supported:: - b1 = Bundle("b1", - Bundle('b2', MyClass.a, MyClass.b), - Bundle('b3', MyClass.x, MyClass.y) - ) + b1 = Bundle( + "b1", + Bundle("b2", MyClass.a, MyClass.b), + Bundle("b3", MyClass.x, MyClass.y), + ) - q = sess.query(b1).filter( - b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9) + q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9) .. seealso:: :attr:`.Bundle.c` - """ + """ # noqa: E501 c: ReadOnlyColumnCollection[str, KeyedColumnElement[Any]] """An alias for :attr:`.Bundle.columns`.""" @@ -1744,25 +1742,24 @@ def create_row_processor( from sqlalchemy.orm import Bundle + class DictBundle(Bundle): def create_row_processor(self, query, procs, labels): - 'Override create_row_processor to return values as - dictionaries' + "Override create_row_processor to return values as dictionaries" def proc(row): - return dict( - zip(labels, (proc(row) for proc in procs)) - ) + return dict(zip(labels, (proc(row) for proc in procs))) + return proc A result from the above :class:`_orm.Bundle` will return dictionary values:: - bn = DictBundle('mybundle', MyClass.data1, MyClass.data2) - for row in session.execute(select(bn)).where(bn.c.data1 == 'd1'): - print(row.mybundle['data1'], row.mybundle['data2']) + bn = DictBundle("mybundle", MyClass.data1, MyClass.data2) + for row in session.execute(select(bn)).where(bn.c.data1 == "d1"): + print(row.mybundle["data1"], row.mybundle["data2"]) - """ + """ # noqa: E501 keyed_tuple = result_tuple(labels, [() for l in labels]) def proc(row: Row[Any]) -> Any: @@ -1988,7 +1985,6 @@ def with_parent( stmt = select(Address).where(with_parent(some_user, User.addresses)) - The SQL rendered is the same as that rendered when a lazy loader would fire off from the given parent on that attribute, meaning that the appropriate state is taken from the parent object in @@ -2001,9 +1997,7 @@ def with_parent( a1 = aliased(Address) a2 = aliased(Address) - stmt = select(a1, a2).where( - with_parent(u1, User.addresses.of_type(a2)) - ) + stmt = select(a1, a2).where(with_parent(u1, User.addresses.of_type(a2))) The above use is equivalent to using the :func:`_orm.with_parent.from_entity` argument:: @@ -2028,7 +2022,7 @@ def with_parent( .. versionadded:: 1.2 - """ + """ # noqa: E501 prop_t: RelationshipProperty[Any] if isinstance(prop, str): @@ -2122,14 +2116,13 @@ def _entity_corresponds_to_use_path_impl( someoption(A).someoption(C.d) # -> fn(A, C) -> False a1 = aliased(A) - someoption(a1).someoption(A.b) # -> fn(a1, A) -> False - someoption(a1).someoption(a1.b) # -> fn(a1, a1) -> True + someoption(a1).someoption(A.b) # -> fn(a1, A) -> False + someoption(a1).someoption(a1.b) # -> fn(a1, a1) -> True wp = with_polymorphic(A, [A1, A2]) someoption(wp).someoption(A1.foo) # -> fn(wp, A1) -> False someoption(wp).someoption(wp.A1.foo) # -> fn(wp, wp.A1) -> True - """ if insp_is_aliased_class(given): return ( diff --git a/lib/sqlalchemy/pool/events.py b/lib/sqlalchemy/pool/events.py index 4b4f4e47851..b54fad125b1 100644 --- a/lib/sqlalchemy/pool/events.py +++ b/lib/sqlalchemy/pool/events.py @@ -35,10 +35,12 @@ class PoolEvents(event.Events[Pool]): from sqlalchemy import event + def my_on_checkout(dbapi_conn, connection_rec, connection_proxy): "handle an on checkout event" - event.listen(Pool, 'checkout', my_on_checkout) + + event.listen(Pool, "checkout", my_on_checkout) In addition to accepting the :class:`_pool.Pool` class and :class:`_pool.Pool` instances, :class:`_events.PoolEvents` also accepts @@ -49,7 +51,7 @@ def my_on_checkout(dbapi_conn, connection_rec, connection_proxy): engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test") # will associate with engine.pool - event.listen(engine, 'checkout', my_on_checkout) + event.listen(engine, "checkout", my_on_checkout) """ # noqa: E501 diff --git a/lib/sqlalchemy/sql/_dml_constructors.py b/lib/sqlalchemy/sql/_dml_constructors.py index a7ead521f86..3afe70e3afc 100644 --- a/lib/sqlalchemy/sql/_dml_constructors.py +++ b/lib/sqlalchemy/sql/_dml_constructors.py @@ -24,10 +24,7 @@ def insert(table: _DMLTableArgument) -> Insert: from sqlalchemy import insert - stmt = ( - insert(user_table). - values(name='username', fullname='Full Username') - ) + stmt = insert(user_table).values(name="username", fullname="Full Username") Similar functionality is available via the :meth:`_expression.TableClause.insert` method on @@ -78,7 +75,7 @@ def insert(table: _DMLTableArgument) -> Insert: :ref:`tutorial_core_insert` - in the :ref:`unified_tutorial` - """ + """ # noqa: E501 return Insert(table) @@ -90,9 +87,7 @@ def update(table: _DMLTableArgument) -> Update: from sqlalchemy import update stmt = ( - update(user_table). - where(user_table.c.id == 5). - values(name='user #5') + update(user_table).where(user_table.c.id == 5).values(name="user #5") ) Similar functionality is available via the @@ -109,7 +104,7 @@ def update(table: _DMLTableArgument) -> Update: :ref:`tutorial_core_update_delete` - in the :ref:`unified_tutorial` - """ + """ # noqa: E501 return Update(table) @@ -120,10 +115,7 @@ def delete(table: _DMLTableArgument) -> Delete: from sqlalchemy import delete - stmt = ( - delete(user_table). - where(user_table.c.id == 5) - ) + stmt = delete(user_table).where(user_table.c.id == 5) Similar functionality is available via the :meth:`_expression.TableClause.delete` method on diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index 55e92dd0c4f..121386781e9 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -125,11 +125,8 @@ def and_( # type: ignore[empty-body] from sqlalchemy import and_ stmt = select(users_table).where( - and_( - users_table.c.name == 'wendy', - users_table.c.enrolled == True - ) - ) + and_(users_table.c.name == "wendy", users_table.c.enrolled == True) + ) The :func:`.and_` conjunction is also available using the Python ``&`` operator (though note that compound expressions @@ -137,9 +134,8 @@ def and_( # type: ignore[empty-body] operator precedence behavior):: stmt = select(users_table).where( - (users_table.c.name == 'wendy') & - (users_table.c.enrolled == True) - ) + (users_table.c.name == "wendy") & (users_table.c.enrolled == True) + ) The :func:`.and_` operation is also implicit in some cases; the :meth:`_expression.Select.where` @@ -147,9 +143,11 @@ def and_( # type: ignore[empty-body] times against a statement, which will have the effect of each clause being combined using :func:`.and_`:: - stmt = select(users_table).\ - where(users_table.c.name == 'wendy').\ - where(users_table.c.enrolled == True) + stmt = ( + select(users_table) + .where(users_table.c.name == "wendy") + .where(users_table.c.enrolled == True) + ) The :func:`.and_` construct must be given at least one positional argument in order to be valid; a :func:`.and_` construct with no @@ -159,6 +157,7 @@ def and_( # type: ignore[empty-body] specified:: from sqlalchemy import true + criteria = and_(true(), *expressions) The above expression will compile to SQL as the expression ``true`` @@ -190,11 +189,8 @@ def and_(*clauses): # noqa: F811 from sqlalchemy import and_ stmt = select(users_table).where( - and_( - users_table.c.name == 'wendy', - users_table.c.enrolled == True - ) - ) + and_(users_table.c.name == "wendy", users_table.c.enrolled == True) + ) The :func:`.and_` conjunction is also available using the Python ``&`` operator (though note that compound expressions @@ -202,9 +198,8 @@ def and_(*clauses): # noqa: F811 operator precedence behavior):: stmt = select(users_table).where( - (users_table.c.name == 'wendy') & - (users_table.c.enrolled == True) - ) + (users_table.c.name == "wendy") & (users_table.c.enrolled == True) + ) The :func:`.and_` operation is also implicit in some cases; the :meth:`_expression.Select.where` @@ -212,9 +207,11 @@ def and_(*clauses): # noqa: F811 times against a statement, which will have the effect of each clause being combined using :func:`.and_`:: - stmt = select(users_table).\ - where(users_table.c.name == 'wendy').\ - where(users_table.c.enrolled == True) + stmt = ( + select(users_table) + .where(users_table.c.name == "wendy") + .where(users_table.c.enrolled == True) + ) The :func:`.and_` construct must be given at least one positional argument in order to be valid; a :func:`.and_` construct with no @@ -224,6 +221,7 @@ def and_(*clauses): # noqa: F811 specified:: from sqlalchemy import true + criteria = and_(true(), *expressions) The above expression will compile to SQL as the expression ``true`` @@ -241,7 +239,7 @@ def and_(*clauses): # noqa: F811 :func:`.or_` - """ + """ # noqa: E501 return BooleanClauseList.and_(*clauses) @@ -307,9 +305,12 @@ def asc( e.g.:: from sqlalchemy import asc + stmt = select(users_table).order_by(asc(users_table.c.name)) - will produce SQL as:: + will produce SQL as: + + .. sourcecode:: sql SELECT id, name FROM user ORDER BY name ASC @@ -346,9 +347,11 @@ def collate( e.g.:: - collate(mycolumn, 'utf8_bin') + collate(mycolumn, "utf8_bin") + + produces: - produces:: + .. sourcecode:: sql mycolumn COLLATE utf8_bin @@ -373,9 +376,12 @@ def between( E.g.:: from sqlalchemy import between + stmt = select(users_table).where(between(users_table.c.id, 5, 7)) - Would produce SQL resembling:: + Would produce SQL resembling: + + .. sourcecode:: sql SELECT id, name FROM user WHERE id BETWEEN :id_1 AND :id_2 @@ -497,7 +503,9 @@ def bindparam( users_table.c.name == bindparam("username") ) - The above statement, when rendered, will produce SQL similar to:: + The above statement, when rendered, will produce SQL similar to: + + .. sourcecode:: sql SELECT id, name FROM user WHERE name = :username @@ -532,7 +540,7 @@ def bindparam( coerced into fixed :func:`.bindparam` constructs. For example, given a comparison operation such as:: - expr = users_table.c.name == 'Wendy' + expr = users_table.c.name == "Wendy" The above expression will produce a :class:`.BinaryExpression` construct, where the left side is the :class:`_schema.Column` object @@ -540,9 +548,11 @@ def bindparam( :class:`.BindParameter` representing the literal value:: print(repr(expr.right)) - BindParameter('%(4327771088 name)s', 'Wendy', type_=String()) + BindParameter("%(4327771088 name)s", "Wendy", type_=String()) - The expression above will render SQL such as:: + The expression above will render SQL such as: + + .. sourcecode:: sql user.name = :name_1 @@ -551,10 +561,12 @@ def bindparam( along where it is later used within statement execution. If we invoke a statement like the following:: - stmt = select(users_table).where(users_table.c.name == 'Wendy') + stmt = select(users_table).where(users_table.c.name == "Wendy") result = connection.execute(stmt) - We would see SQL logging output as:: + We would see SQL logging output as: + + .. sourcecode:: sql SELECT "user".id, "user".name FROM "user" @@ -574,7 +586,9 @@ def bindparam( stmt = users_table.insert() result = connection.execute(stmt, {"name": "Wendy"}) - The above will produce SQL output as:: + The above will produce SQL output as: + + .. sourcecode:: sql INSERT INTO "user" (name) VALUES (%(name)s) {'name': 'Wendy'} @@ -738,16 +752,17 @@ def case( from sqlalchemy import case - stmt = select(users_table).\ - where( - case( - (users_table.c.name == 'wendy', 'W'), - (users_table.c.name == 'jack', 'J'), - else_='E' - ) - ) + stmt = select(users_table).where( + case( + (users_table.c.name == "wendy", "W"), + (users_table.c.name == "jack", "J"), + else_="E", + ) + ) + + The above statement will produce SQL resembling: - The above statement will produce SQL resembling:: + .. sourcecode:: sql SELECT id, name FROM user WHERE CASE @@ -765,14 +780,9 @@ def case( compared against keyed to result expressions. The statement below is equivalent to the preceding statement:: - stmt = select(users_table).\ - where( - case( - {"wendy": "W", "jack": "J"}, - value=users_table.c.name, - else_='E' - ) - ) + stmt = select(users_table).where( + case({"wendy": "W", "jack": "J"}, value=users_table.c.name, else_="E") + ) The values which are accepted as result values in :paramref:`.case.whens` as well as with :paramref:`.case.else_` are @@ -787,20 +797,16 @@ def case( from sqlalchemy import case, literal_column case( - ( - orderline.c.qty > 100, - literal_column("'greaterthan100'") - ), - ( - orderline.c.qty > 10, - literal_column("'greaterthan10'") - ), - else_=literal_column("'lessthan10'") + (orderline.c.qty > 100, literal_column("'greaterthan100'")), + (orderline.c.qty > 10, literal_column("'greaterthan10'")), + else_=literal_column("'lessthan10'"), ) The above will render the given constants without using bound parameters for the result values (but still for the comparison - values), as in:: + values), as in: + + .. sourcecode:: sql CASE WHEN (orderline.qty > :qty_1) THEN 'greaterthan100' @@ -821,8 +827,8 @@ def case( resulting value, e.g.:: case( - (users_table.c.name == 'wendy', 'W'), - (users_table.c.name == 'jack', 'J') + (users_table.c.name == "wendy", "W"), + (users_table.c.name == "jack", "J"), ) In the second form, it accepts a Python dictionary of comparison @@ -830,10 +836,7 @@ def case( :paramref:`.case.value` to be present, and values will be compared using the ``==`` operator, e.g.:: - case( - {"wendy": "W", "jack": "J"}, - value=users_table.c.name - ) + case({"wendy": "W", "jack": "J"}, value=users_table.c.name) :param value: An optional SQL expression which will be used as a fixed "comparison point" for candidate values within a dictionary @@ -846,7 +849,7 @@ def case( expressions evaluate to true. - """ + """ # noqa: E501 return Case(*whens, value=value, else_=else_) @@ -864,7 +867,9 @@ def cast( stmt = select(cast(product_table.c.unit_price, Numeric(10, 4))) - The above statement will produce SQL resembling:: + The above statement will produce SQL resembling: + + .. sourcecode:: sql SELECT CAST(unit_price AS NUMERIC(10, 4)) FROM product @@ -933,11 +938,11 @@ def try_cast( from sqlalchemy import select, try_cast, Numeric - stmt = select( - try_cast(product_table.c.unit_price, Numeric(10, 4)) - ) + stmt = select(try_cast(product_table.c.unit_price, Numeric(10, 4))) - The above would render on Microsoft SQL Server as:: + The above would render on Microsoft SQL Server as: + + .. sourcecode:: sql SELECT TRY_CAST (product_table.unit_price AS NUMERIC(10, 4)) FROM product_table @@ -968,7 +973,9 @@ def column( id, name = column("id"), column("name") stmt = select(id, name).select_from("user") - The above statement would produce SQL like:: + The above statement would produce SQL like: + + .. sourcecode:: sql SELECT id, name FROM user @@ -1004,13 +1011,14 @@ def column( from sqlalchemy import table, column, select - user = table("user", - column("id"), - column("name"), - column("description"), + user = table( + "user", + column("id"), + column("name"), + column("description"), ) - stmt = select(user.c.description).where(user.c.name == 'wendy') + stmt = select(user.c.description).where(user.c.name == "wendy") A :func:`_expression.column` / :func:`.table` construct like that illustrated @@ -1057,7 +1065,9 @@ def desc( stmt = select(users_table).order_by(desc(users_table.c.name)) - will produce SQL as:: + will produce SQL as: + + .. sourcecode:: sql SELECT id, name FROM user ORDER BY name DESC @@ -1096,9 +1106,12 @@ def distinct(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: an aggregate function, as in:: from sqlalchemy import distinct, func + stmt = select(users_table.c.id, func.count(distinct(users_table.c.name))) - The above would produce an statement resembling:: + The above would produce an statement resembling: + + .. sourcecode:: sql SELECT user.id, count(DISTINCT user.name) FROM user @@ -1170,9 +1183,10 @@ def extract(field: str, expr: _ColumnExpressionArgument[Any]) -> Extract: from sqlalchemy import extract from sqlalchemy import table, column - logged_table = table("user", - column("id"), - column("date_created"), + logged_table = table( + "user", + column("id"), + column("date_created"), ) stmt = select(logged_table.c.id).where( @@ -1184,9 +1198,9 @@ def extract(field: str, expr: _ColumnExpressionArgument[Any]) -> Extract: Similarly, one can also select an extracted component:: - stmt = select( - extract("YEAR", logged_table.c.date_created) - ).where(logged_table.c.id == 1) + stmt = select(extract("YEAR", logged_table.c.date_created)).where( + logged_table.c.id == 1 + ) The implementation of ``EXTRACT`` may vary across database backends. Users are reminded to consult their database documentation. @@ -1245,7 +1259,8 @@ def funcfilter( E.g.:: from sqlalchemy import funcfilter - funcfilter(func.count(1), MyClass.name == 'some name') + + funcfilter(func.count(1), MyClass.name == "some name") Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')". @@ -1302,10 +1317,11 @@ def nulls_first(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: from sqlalchemy import desc, nulls_first - stmt = select(users_table).order_by( - nulls_first(desc(users_table.c.name))) + stmt = select(users_table).order_by(nulls_first(desc(users_table.c.name))) - The SQL expression from the above would resemble:: + The SQL expression from the above would resemble: + + .. sourcecode:: sql SELECT id, name FROM user ORDER BY name DESC NULLS FIRST @@ -1316,7 +1332,8 @@ def nulls_first(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: function version, as in:: stmt = select(users_table).order_by( - users_table.c.name.desc().nulls_first()) + users_table.c.name.desc().nulls_first() + ) .. versionchanged:: 1.4 :func:`.nulls_first` is renamed from :func:`.nullsfirst` in previous releases. @@ -1332,7 +1349,7 @@ def nulls_first(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: :meth:`_expression.Select.order_by` - """ + """ # noqa: E501 return UnaryExpression._create_nulls_first(column) @@ -1346,10 +1363,11 @@ def nulls_last(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: from sqlalchemy import desc, nulls_last - stmt = select(users_table).order_by( - nulls_last(desc(users_table.c.name))) + stmt = select(users_table).order_by(nulls_last(desc(users_table.c.name))) - The SQL expression from the above would resemble:: + The SQL expression from the above would resemble: + + .. sourcecode:: sql SELECT id, name FROM user ORDER BY name DESC NULLS LAST @@ -1359,8 +1377,7 @@ def nulls_last(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: rather than as its standalone function version, as in:: - stmt = select(users_table).order_by( - users_table.c.name.desc().nulls_last()) + stmt = select(users_table).order_by(users_table.c.name.desc().nulls_last()) .. versionchanged:: 1.4 :func:`.nulls_last` is renamed from :func:`.nullslast` in previous releases. @@ -1376,7 +1393,7 @@ def nulls_last(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]: :meth:`_expression.Select.order_by` - """ + """ # noqa: E501 return UnaryExpression._create_nulls_last(column) @@ -1391,11 +1408,8 @@ def or_( # type: ignore[empty-body] from sqlalchemy import or_ stmt = select(users_table).where( - or_( - users_table.c.name == 'wendy', - users_table.c.name == 'jack' - ) - ) + or_(users_table.c.name == "wendy", users_table.c.name == "jack") + ) The :func:`.or_` conjunction is also available using the Python ``|`` operator (though note that compound expressions @@ -1403,9 +1417,8 @@ def or_( # type: ignore[empty-body] operator precedence behavior):: stmt = select(users_table).where( - (users_table.c.name == 'wendy') | - (users_table.c.name == 'jack') - ) + (users_table.c.name == "wendy") | (users_table.c.name == "jack") + ) The :func:`.or_` construct must be given at least one positional argument in order to be valid; a :func:`.or_` construct with no @@ -1415,6 +1428,7 @@ def or_( # type: ignore[empty-body] specified:: from sqlalchemy import false + or_criteria = or_(false(), *expressions) The above expression will compile to SQL as the expression ``false`` @@ -1446,11 +1460,8 @@ def or_(*clauses): # noqa: F811 from sqlalchemy import or_ stmt = select(users_table).where( - or_( - users_table.c.name == 'wendy', - users_table.c.name == 'jack' - ) - ) + or_(users_table.c.name == "wendy", users_table.c.name == "jack") + ) The :func:`.or_` conjunction is also available using the Python ``|`` operator (though note that compound expressions @@ -1458,9 +1469,8 @@ def or_(*clauses): # noqa: F811 operator precedence behavior):: stmt = select(users_table).where( - (users_table.c.name == 'wendy') | - (users_table.c.name == 'jack') - ) + (users_table.c.name == "wendy") | (users_table.c.name == "jack") + ) The :func:`.or_` construct must be given at least one positional argument in order to be valid; a :func:`.or_` construct with no @@ -1470,6 +1480,7 @@ def or_(*clauses): # noqa: F811 specified:: from sqlalchemy import false + or_criteria = or_(false(), *expressions) The above expression will compile to SQL as the expression ``false`` @@ -1487,7 +1498,7 @@ def or_(*clauses): # noqa: F811 :func:`.and_` - """ + """ # noqa: E501 return BooleanClauseList.or_(*clauses) @@ -1508,7 +1519,9 @@ def over( func.row_number().over(order_by=mytable.c.some_column) - Would produce:: + Would produce: + + .. sourcecode:: sql ROW_NUMBER() OVER(ORDER BY some_column) @@ -1517,10 +1530,11 @@ def over( mutually-exclusive parameters each accept a 2-tuple, which contains a combination of integers and None:: - func.row_number().over( - order_by=my_table.c.some_column, range_=(None, 0)) + func.row_number().over(order_by=my_table.c.some_column, range_=(None, 0)) + + The above would produce: - The above would produce:: + .. sourcecode:: sql ROW_NUMBER() OVER(ORDER BY some_column RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) @@ -1531,19 +1545,19 @@ def over( * RANGE BETWEEN 5 PRECEDING AND 10 FOLLOWING:: - func.row_number().over(order_by='x', range_=(-5, 10)) + func.row_number().over(order_by="x", range_=(-5, 10)) * ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW:: - func.row_number().over(order_by='x', rows=(None, 0)) + func.row_number().over(order_by="x", rows=(None, 0)) * RANGE BETWEEN 2 PRECEDING AND UNBOUNDED FOLLOWING:: - func.row_number().over(order_by='x', range_=(-2, None)) + func.row_number().over(order_by="x", range_=(-2, None)) * RANGE BETWEEN 1 FOLLOWING AND 3 FOLLOWING:: - func.row_number().over(order_by='x', range_=(1, 3)) + func.row_number().over(order_by="x", range_=(1, 3)) :param element: a :class:`.FunctionElement`, :class:`.WithinGroup`, or other compatible construct. @@ -1572,7 +1586,7 @@ def over( :func:`_expression.within_group` - """ + """ # noqa: E501 return Over(element, partition_by, order_by, range_, rows) @@ -1621,9 +1635,11 @@ def text(text: str) -> TextClause: method allows specification of return columns including names and types:: - t = text("SELECT * FROM users WHERE id=:user_id").\ - bindparams(user_id=7).\ - columns(id=Integer, name=String) + t = ( + text("SELECT * FROM users WHERE id=:user_id") + .bindparams(user_id=7) + .columns(id=Integer, name=String) + ) for id, name in connection.execute(t): print(id, name) @@ -1705,9 +1721,7 @@ def tuple_( from sqlalchemy import tuple_ - tuple_(table.c.col1, table.c.col2).in_( - [(1, 2), (5, 12), (10, 19)] - ) + tuple_(table.c.col1, table.c.col2).in_([(1, 2), (5, 12), (10, 19)]) .. versionchanged:: 1.3.6 Added support for SQLite IN tuples. @@ -1757,10 +1771,9 @@ def type_coerce( :meth:`_expression.ColumnElement.label`:: stmt = select( - type_coerce(log_table.date_string, StringDateTime()).label('date') + type_coerce(log_table.date_string, StringDateTime()).label("date") ) - A type that features bound-value handling will also have that behavior take effect when literal values or :func:`.bindparam` constructs are passed to :func:`.type_coerce` as targets. @@ -1821,11 +1834,10 @@ def within_group( the :meth:`.FunctionElement.within_group` method, e.g.:: from sqlalchemy import within_group + stmt = select( department.c.id, - func.percentile_cont(0.5).within_group( - department.c.salary.desc() - ) + func.percentile_cont(0.5).within_group(department.c.salary.desc()), ) The above statement would produce SQL similar to diff --git a/lib/sqlalchemy/sql/_selectable_constructors.py b/lib/sqlalchemy/sql/_selectable_constructors.py index c2b5008c679..8ada82c8d4a 100644 --- a/lib/sqlalchemy/sql/_selectable_constructors.py +++ b/lib/sqlalchemy/sql/_selectable_constructors.py @@ -155,16 +155,16 @@ def exists( :meth:`_sql.SelectBase.exists` method:: exists_criteria = ( - select(table2.c.col2). - where(table1.c.col1 == table2.c.col2). - exists() + select(table2.c.col2).where(table1.c.col1 == table2.c.col2).exists() ) The EXISTS criteria is then used inside of an enclosing SELECT:: stmt = select(table1.c.col1).where(exists_criteria) - The above statement will then be of the form:: + The above statement will then be of the form: + + .. sourcecode:: sql SELECT col1 FROM table1 WHERE EXISTS (SELECT table2.col2 FROM table2 WHERE table2.col2 = table1.col1) @@ -225,11 +225,14 @@ def join( E.g.:: - j = join(user_table, address_table, - user_table.c.id == address_table.c.user_id) + j = join( + user_table, address_table, user_table.c.id == address_table.c.user_id + ) stmt = select(user_table).select_from(j) - would emit SQL along the lines of:: + would emit SQL along the lines of: + + .. sourcecode:: sql SELECT user.id, user.name FROM user JOIN address ON user.id = address.user_id @@ -263,7 +266,7 @@ def join( :class:`_expression.Join` - the type of object produced. - """ + """ # noqa: E501 return Join(left, right, onclause, isouter, full) @@ -529,13 +532,14 @@ class via the from sqlalchemy import func selectable = people.tablesample( - func.bernoulli(1), - name='alias', - seed=func.random()) + func.bernoulli(1), name="alias", seed=func.random() + ) stmt = select(selectable.c.people_id) Assuming ``people`` with a column ``people_id``, the above - statement would render as:: + statement would render as: + + .. sourcecode:: sql SELECT alias.people_id FROM people AS alias TABLESAMPLE bernoulli(:bernoulli_1) @@ -613,12 +617,10 @@ def values( from sqlalchemy import values value_expr = values( - column('id', Integer), - column('name', String), - name="my_values" - ).data( - [(1, 'name1'), (2, 'name2'), (3, 'name3')] - ) + column("id", Integer), + column("name", String), + name="my_values", + ).data([(1, "name1"), (2, "name2"), (3, "name3")]) :param \*columns: column expressions, typically composed using :func:`_expression.column` objects. diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index e4a7256b5d8..23247dee147 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -480,7 +480,7 @@ def argument_for(cls, dialect_name, argument_name, default): Index.argument_for("mydialect", "length", None) - some_index = Index('a', 'b', mydialect_length=5) + some_index = Index("a", "b", mydialect_length=5) The :meth:`.DialectKWArgs.argument_for` method is a per-argument way adding extra arguments to the @@ -569,7 +569,7 @@ def dialect_options(self): and ````. For example, the ``postgresql_where`` argument would be locatable as:: - arg = my_object.dialect_options['postgresql']['where'] + arg = my_object.dialect_options["postgresql"]["where"] .. versionadded:: 0.9.2 @@ -917,11 +917,7 @@ def from_execution_options( execution_options, ) = QueryContext.default_load_options.from_execution_options( "_sa_orm_load_options", - { - "populate_existing", - "autoflush", - "yield_per" - }, + {"populate_existing", "autoflush", "yield_per"}, execution_options, statement._execution_options, ) @@ -1224,6 +1220,7 @@ def execution_options(self, **kw: Any) -> Self: from sqlalchemy import event + @event.listens_for(some_engine, "before_execute") def _process_opt(conn, statement, multiparams, params, execution_options): "run a SQL function before invoking a statement" @@ -1475,14 +1472,14 @@ class ColumnCollection(Generic[_COLKEY, _COL_co]): mean either two columns with the same key, in which case the column returned by key access is **arbitrary**:: - >>> x1, x2 = Column('x', Integer), Column('x', Integer) + >>> x1, x2 = Column("x", Integer), Column("x", Integer) >>> cc = ColumnCollection(columns=[(x1.name, x1), (x2.name, x2)]) >>> list(cc) [Column('x', Integer(), table=None), Column('x', Integer(), table=None)] - >>> cc['x'] is x1 + >>> cc["x"] is x1 False - >>> cc['x'] is x2 + >>> cc["x"] is x2 True Or it can also mean the same column multiple times. These cases are @@ -2033,8 +2030,8 @@ def replace( e.g.:: - t = Table('sometable', metadata, Column('col1', Integer)) - t.columns.replace(Column('col1', Integer, key='columnone')) + t = Table("sometable", metadata, Column("col1", Integer)) + t.columns.replace(Column("col1", Integer, key="columnone")) will remove the original 'col1' from the collection, and add the new column under the name 'columnname'. diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index ab717e2b37e..d27b43f1fa1 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -155,8 +155,8 @@ class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement): event.listen( users, - 'after_create', - AddConstraint(constraint).execute_if(dialect='postgresql') + "after_create", + AddConstraint(constraint).execute_if(dialect="postgresql"), ) .. seealso:: @@ -231,20 +231,20 @@ def execute_if( Used to provide a wrapper for event listening:: event.listen( - metadata, - 'before_create', - DDL("my_ddl").execute_if(dialect='postgresql') - ) + metadata, + "before_create", + DDL("my_ddl").execute_if(dialect="postgresql"), + ) :param dialect: May be a string or tuple of strings. If a string, it will be compared to the name of the executing database dialect:: - DDL('something').execute_if(dialect='postgresql') + DDL("something").execute_if(dialect="postgresql") If a tuple, specifies multiple dialect names:: - DDL('something').execute_if(dialect=('postgresql', 'mysql')) + DDL("something").execute_if(dialect=("postgresql", "mysql")) :param callable\_: A callable, which will be invoked with three positional arguments as well as optional keyword @@ -342,17 +342,19 @@ class DDL(ExecutableDDLElement): from sqlalchemy import event, DDL - tbl = Table('users', metadata, Column('uid', Integer)) - event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger')) + tbl = Table("users", metadata, Column("uid", Integer)) + event.listen(tbl, "before_create", DDL("DROP TRIGGER users_trigger")) - spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE') - event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb')) + spow = DDL("ALTER TABLE %(table)s SET secretpowers TRUE") + event.listen(tbl, "after_create", spow.execute_if(dialect="somedb")) - drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE') + drop_spow = DDL("ALTER TABLE users SET secretpowers FALSE") connection.execute(drop_spow) When operating on Table events, the following ``statement`` - string substitutions are available:: + string substitutions are available: + + .. sourcecode:: text %(table)s - the Table name, with any required quoting applied %(schema)s - the schema name, with any required quoting applied @@ -568,6 +570,7 @@ class CreateColumn(BaseDDLElement): from sqlalchemy import schema from sqlalchemy.ext.compiler import compiles + @compiles(schema.CreateColumn) def compile(element, compiler, **kw): column = element.element @@ -576,9 +579,9 @@ def compile(element, compiler, **kw): return compiler.visit_create_column(element, **kw) text = "%s SPECIAL DIRECTIVE %s" % ( - column.name, - compiler.type_compiler.process(column.type) - ) + column.name, + compiler.type_compiler.process(column.type), + ) default = compiler.get_column_default_string(column) if default is not None: text += " DEFAULT " + default @@ -588,8 +591,8 @@ def compile(element, compiler, **kw): if column.constraints: text += " ".join( - compiler.process(const) - for const in column.constraints) + compiler.process(const) for const in column.constraints + ) return text The above construct can be applied to a :class:`_schema.Table` @@ -600,17 +603,21 @@ def compile(element, compiler, **kw): metadata = MetaData() - table = Table('mytable', MetaData(), - Column('x', Integer, info={"special":True}, primary_key=True), - Column('y', String(50)), - Column('z', String(20), info={"special":True}) - ) + table = Table( + "mytable", + MetaData(), + Column("x", Integer, info={"special": True}, primary_key=True), + Column("y", String(50)), + Column("z", String(20), info={"special": True}), + ) metadata.create_all(conn) Above, the directives we've added to the :attr:`_schema.Column.info` collection - will be detected by our custom compilation scheme:: + will be detected by our custom compilation scheme: + + .. sourcecode:: sql CREATE TABLE mytable ( x SPECIAL DIRECTIVE INTEGER NOT NULL, @@ -635,18 +642,21 @@ def compile(element, compiler, **kw): from sqlalchemy.schema import CreateColumn + @compiles(CreateColumn, "postgresql") def skip_xmin(element, compiler, **kw): - if element.element.name == 'xmin': + if element.element.name == "xmin": return None else: return compiler.visit_create_column(element, **kw) - my_table = Table('mytable', metadata, - Column('id', Integer, primary_key=True), - Column('xmin', Integer) - ) + my_table = Table( + "mytable", + metadata, + Column("id", Integer, primary_key=True), + Column("xmin", Integer), + ) Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE`` which only includes the ``id`` column in the string; the ``xmin`` column diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 779be1dac12..51e00ca4e26 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -525,11 +525,11 @@ def return_defaults( E.g.:: - stmt = table.insert().values(data='newdata').return_defaults() + stmt = table.insert().values(data="newdata").return_defaults() result = connection.execute(stmt) - server_created_at = result.returned_defaults['created_at'] + server_created_at = result.returned_defaults["created_at"] When used against an UPDATE statement :meth:`.UpdateBase.return_defaults` instead looks for columns that @@ -1032,7 +1032,7 @@ def values( users.insert().values(name="some name") - users.update().where(users.c.id==5).values(name="some name") + users.update().where(users.c.id == 5).values(name="some name") :param \*args: As an alternative to passing key/value parameters, a dictionary, tuple, or list of dictionaries or tuples can be passed @@ -1062,13 +1062,17 @@ def values( this syntax is supported on backends such as SQLite, PostgreSQL, MySQL, but not necessarily others:: - users.insert().values([ - {"name": "some name"}, - {"name": "some other name"}, - {"name": "yet another name"}, - ]) + users.insert().values( + [ + {"name": "some name"}, + {"name": "some other name"}, + {"name": "yet another name"}, + ] + ) + + The above form would render a multiple VALUES statement similar to: - The above form would render a multiple VALUES statement similar to:: + .. sourcecode:: sql INSERT INTO users (name) VALUES (:name_1), @@ -1246,7 +1250,7 @@ def from_select( e.g.:: sel = select(table1.c.a, table1.c.b).where(table1.c.c > 5) - ins = table2.insert().from_select(['a', 'b'], sel) + ins = table2.insert().from_select(["a", "b"], sel) :param names: a sequence of string column names or :class:`_schema.Column` @@ -1535,9 +1539,7 @@ def ordered_values(self, *args: Tuple[_DMLColumnArgument, Any]) -> Self: E.g.:: - stmt = table.update().ordered_values( - ("name", "ed"), ("ident", "foo") - ) + stmt = table.update().ordered_values(("name", "ed"), ("ident", "foo")) .. seealso:: @@ -1550,7 +1552,7 @@ def ordered_values(self, *args: Tuple[_DMLColumnArgument, Any]) -> Self: :paramref:`_expression.update.preserve_parameter_order` parameter, which will be removed in SQLAlchemy 2.0. - """ + """ # noqa: E501 if self._values: raise exc.ArgumentError( "This statement already has values present" diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 441974707b9..62d71fbdf9a 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -281,7 +281,7 @@ def compile( from sqlalchemy.sql import table, column, select - t = table('t', column('x')) + t = table("t", column("x")) s = select(t).where(t.c.x == 5) @@ -584,10 +584,10 @@ def params( :func:`_expression.bindparam` elements replaced with values taken from the given dictionary:: - >>> clause = column('x') + bindparam('foo') + >>> clause = column("x") + bindparam("foo") >>> print(clause.compile().params) {'foo':None} - >>> print(clause.params({'foo':7}).compile().params) + >>> print(clause.params({"foo": 7}).compile().params) {'foo':7} """ @@ -1286,9 +1286,9 @@ class ColumnElement( .. sourcecode:: pycon+sql >>> from sqlalchemy.sql import column - >>> column('a') + column('b') + >>> column("a") + column("b") - >>> print(column('a') + column('b')) + >>> print(column("a") + column("b")) {printsql}a + b .. seealso:: @@ -1377,7 +1377,9 @@ def _non_anon_label(self) -> Optional[str]: SQL. Concretely, this is the "name" of a column or a label in a - SELECT statement; ```` and ```` below:: + SELECT statement; ```` and ```` below: + + .. sourcecode:: sql SELECT FROM table @@ -2232,7 +2234,6 @@ class TextClause( t = text("SELECT * FROM users") result = connection.execute(t) - The :class:`_expression.TextClause` construct is produced using the :func:`_expression.text` function; see that function for full documentation. @@ -2309,16 +2310,19 @@ def bindparams( Given a text construct such as:: from sqlalchemy import text - stmt = text("SELECT id, name FROM user WHERE name=:name " - "AND timestamp=:timestamp") + + stmt = text( + "SELECT id, name FROM user WHERE name=:name AND timestamp=:timestamp" + ) the :meth:`_expression.TextClause.bindparams` method can be used to establish the initial value of ``:name`` and ``:timestamp``, using simple keyword arguments:: - stmt = stmt.bindparams(name='jack', - timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)) + stmt = stmt.bindparams( + name="jack", timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5) + ) Where above, new :class:`.BindParameter` objects will be generated with the names ``name`` and ``timestamp``, and @@ -2333,10 +2337,11 @@ def bindparams( argument, then an optional value and type:: from sqlalchemy import bindparam + stmt = stmt.bindparams( - bindparam('name', value='jack', type_=String), - bindparam('timestamp', type_=DateTime) - ) + bindparam("name", value="jack", type_=String), + bindparam("timestamp", type_=DateTime), + ) Above, we specified the type of :class:`.DateTime` for the ``timestamp`` bind, and the type of :class:`.String` for the ``name`` @@ -2346,8 +2351,9 @@ def bindparams( Additional bound parameters can be supplied at statement execution time, e.g.:: - result = connection.execute(stmt, - timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)) + result = connection.execute( + stmt, timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5) + ) The :meth:`_expression.TextClause.bindparams` method can be called repeatedly, @@ -2357,15 +2363,15 @@ def bindparams( first with typing information, and a second time with value information, and it will be combined:: - stmt = text("SELECT id, name FROM user WHERE name=:name " - "AND timestamp=:timestamp") + stmt = text( + "SELECT id, name FROM user WHERE name=:name " + "AND timestamp=:timestamp" + ) stmt = stmt.bindparams( - bindparam('name', type_=String), - bindparam('timestamp', type_=DateTime) + bindparam("name", type_=String), bindparam("timestamp", type_=DateTime) ) stmt = stmt.bindparams( - name='jack', - timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5) + name="jack", timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5) ) The :meth:`_expression.TextClause.bindparams` @@ -2379,18 +2385,17 @@ def bindparams( object:: stmt1 = text("select id from table where name=:name").bindparams( - bindparam("name", value='name1', unique=True) + bindparam("name", value="name1", unique=True) ) stmt2 = text("select id from table where name=:name").bindparams( - bindparam("name", value='name2', unique=True) + bindparam("name", value="name2", unique=True) ) - union = union_all( - stmt1.columns(column("id")), - stmt2.columns(column("id")) - ) + union = union_all(stmt1.columns(column("id")), stmt2.columns(column("id"))) + + The above statement will render as: - The above statement will render as:: + .. sourcecode:: sql select id from table where name=:name_1 UNION ALL select id from table where name=:name_2 @@ -2400,7 +2405,7 @@ def bindparams( :func:`_expression.text` constructs. - """ + """ # noqa: E501 self._bindparams = new_params = self._bindparams.copy() for bind in binds: @@ -2454,12 +2459,13 @@ def columns( from sqlalchemy.sql import column, text stmt = text("SELECT id, name FROM some_table") - stmt = stmt.columns(column('id'), column('name')).subquery('st') + stmt = stmt.columns(column("id"), column("name")).subquery("st") - stmt = select(mytable).\ - select_from( - mytable.join(stmt, mytable.c.name == stmt.c.name) - ).where(stmt.c.id > 5) + stmt = ( + select(mytable) + .select_from(mytable.join(stmt, mytable.c.name == stmt.c.name)) + .where(stmt.c.id > 5) + ) Above, we pass a series of :func:`_expression.column` elements to the :meth:`_expression.TextClause.columns` method positionally. These @@ -2480,10 +2486,10 @@ def columns( stmt = text("SELECT id, name, timestamp FROM some_table") stmt = stmt.columns( - column('id', Integer), - column('name', Unicode), - column('timestamp', DateTime) - ) + column("id", Integer), + column("name", Unicode), + column("timestamp", DateTime), + ) for id, name, timestamp in connection.execute(stmt): print(id, name, timestamp) @@ -2492,11 +2498,7 @@ def columns( types alone may be used, if only type conversion is needed:: stmt = text("SELECT id, name, timestamp FROM some_table") - stmt = stmt.columns( - id=Integer, - name=Unicode, - timestamp=DateTime - ) + stmt = stmt.columns(id=Integer, name=Unicode, timestamp=DateTime) for id, name, timestamp in connection.execute(stmt): print(id, name, timestamp) @@ -2510,26 +2512,31 @@ def columns( the result set will match to those columns positionally, meaning the name or origin of the column in the textual SQL doesn't matter:: - stmt = text("SELECT users.id, addresses.id, users.id, " - "users.name, addresses.email_address AS email " - "FROM users JOIN addresses ON users.id=addresses.user_id " - "WHERE users.id = 1").columns( - User.id, - Address.id, - Address.user_id, - User.name, - Address.email_address - ) + stmt = text( + "SELECT users.id, addresses.id, users.id, " + "users.name, addresses.email_address AS email " + "FROM users JOIN addresses ON users.id=addresses.user_id " + "WHERE users.id = 1" + ).columns( + User.id, + Address.id, + Address.user_id, + User.name, + Address.email_address, + ) - query = session.query(User).from_statement(stmt).options( - contains_eager(User.addresses)) + query = ( + session.query(User) + .from_statement(stmt) + .options(contains_eager(User.addresses)) + ) The :meth:`_expression.TextClause.columns` method provides a direct route to calling :meth:`_expression.FromClause.subquery` as well as :meth:`_expression.SelectBase.cte` against a textual SELECT statement:: - stmt = stmt.columns(id=Integer, name=String).cte('st') + stmt = stmt.columns(id=Integer, name=String).cte("st") stmt = select(sometable).where(sometable.c.id == stmt.c.id) @@ -3274,14 +3281,13 @@ class Case(ColumnElement[_T]): from sqlalchemy import case - stmt = select(users_table).\ - where( - case( - (users_table.c.name == 'wendy', 'W'), - (users_table.c.name == 'jack', 'J'), - else_='E' - ) - ) + stmt = select(users_table).where( + case( + (users_table.c.name == "wendy", "W"), + (users_table.c.name == "jack", "J"), + else_="E", + ) + ) Details on :class:`.Case` usage is at :func:`.case`. @@ -3819,9 +3825,9 @@ class BinaryExpression(OperatorExpression[_T]): .. sourcecode:: pycon+sql >>> from sqlalchemy.sql import column - >>> column('a') + column('b') + >>> column("a") + column("b") - >>> print(column('a') + column('b')) + >>> print(column("a") + column("b")) {printsql}a + b """ @@ -3910,7 +3916,7 @@ def __bool__(self): The rationale here is so that ColumnElement objects can be hashable. What? Well, suppose you do this:: - c1, c2 = column('x'), column('y') + c1, c2 = column("x"), column("y") s1 = set([c1, c2]) We do that **a lot**, columns inside of sets is an extremely basic @@ -4475,12 +4481,13 @@ def over( The expression:: - func.rank().filter(MyClass.y > 5).over(order_by='x') + func.rank().filter(MyClass.y > 5).over(order_by="x") is shorthand for:: from sqlalchemy import over, funcfilter - over(funcfilter(func.rank(), MyClass.y > 5), order_by='x') + + over(funcfilter(func.rank(), MyClass.y > 5), order_by="x") See :func:`_expression.over` for a full description. @@ -4842,7 +4849,9 @@ class ColumnClause( id, name = column("id"), column("name") stmt = select(id, name).select_from("user") - The above statement would produce SQL like:: + The above statement would produce SQL like: + + .. sourcecode:: sql SELECT id, name FROM user @@ -5395,11 +5404,12 @@ class conv(_truncated_label): E.g. when we create a :class:`.Constraint` using a naming convention as follows:: - m = MetaData(naming_convention={ - "ck": "ck_%(table_name)s_%(constraint_name)s" - }) - t = Table('t', m, Column('x', Integer), - CheckConstraint('x > 5', name='x5')) + m = MetaData( + naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} + ) + t = Table( + "t", m, Column("x", Integer), CheckConstraint("x > 5", name="x5") + ) The name of the above constraint will be rendered as ``"ck_t_x5"``. That is, the existing name ``x5`` is used in the naming convention as the @@ -5412,11 +5422,15 @@ class conv(_truncated_label): use this explicitly as follows:: - m = MetaData(naming_convention={ - "ck": "ck_%(table_name)s_%(constraint_name)s" - }) - t = Table('t', m, Column('x', Integer), - CheckConstraint('x > 5', name=conv('ck_t_x5'))) + m = MetaData( + naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} + ) + t = Table( + "t", + m, + Column("x", Integer), + CheckConstraint("x > 5", name=conv("ck_t_x5")), + ) Where above, the :func:`_schema.conv` marker indicates that the constraint name here is final, and the name will render as ``"ck_t_x5"`` and not diff --git a/lib/sqlalchemy/sql/events.py b/lib/sqlalchemy/sql/events.py index 1a6a9a6a7d0..e9d19f337d0 100644 --- a/lib/sqlalchemy/sql/events.py +++ b/lib/sqlalchemy/sql/events.py @@ -63,13 +63,14 @@ class DDLEvents(event.Events[SchemaEventTarget]): from sqlalchemy import Table, Column, Metadata, Integer m = MetaData() - some_table = Table('some_table', m, Column('data', Integer)) + some_table = Table("some_table", m, Column("data", Integer)) + @event.listens_for(some_table, "after_create") def after_create(target, connection, **kw): - connection.execute(text( - "ALTER TABLE %s SET name=foo_%s" % (target.name, target.name) - )) + connection.execute( + text("ALTER TABLE %s SET name=foo_%s" % (target.name, target.name)) + ) some_engine = create_engine("postgresql://scott:tiger@host/test") @@ -127,10 +128,11 @@ def after_create(target, connection, **kw): as listener callables:: from sqlalchemy import DDL + event.listen( some_table, "after_create", - DDL("ALTER TABLE %(table)s SET name=foo_%(table)s") + DDL("ALTER TABLE %(table)s SET name=foo_%(table)s"), ) **Event Propagation to MetaData Copies** @@ -149,7 +151,7 @@ def after_create(target, connection, **kw): some_table, "after_create", DDL("ALTER TABLE %(table)s SET name=foo_%(table)s"), - propagate=True + propagate=True, ) new_metadata = MetaData() @@ -169,7 +171,7 @@ def after_create(target, connection, **kw): :ref:`schema_ddl_sequences` - """ + """ # noqa: E501 _target_class_doc = "SomeSchemaClassOrObject" _dispatch_target = SchemaEventTarget @@ -358,16 +360,17 @@ def column_reflect( metadata = MetaData() - @event.listens_for(metadata, 'column_reflect') + + @event.listens_for(metadata, "column_reflect") def receive_column_reflect(inspector, table, column_info): # receives for all Table objects that are reflected # under this MetaData + ... # will use the above event hook my_table = Table("my_table", metadata, autoload_with=some_engine) - .. versionadded:: 1.4.0b2 The :meth:`_events.DDLEvents.column_reflect` hook may now be applied to a :class:`_schema.MetaData` object as well as the :class:`_schema.MetaData` class itself where it will @@ -379,9 +382,11 @@ def receive_column_reflect(inspector, table, column_info): from sqlalchemy import Table - @event.listens_for(Table, 'column_reflect') + + @event.listens_for(Table, "column_reflect") def receive_column_reflect(inspector, table, column_info): # receives for all Table objects that are reflected + ... It can also be applied to a specific :class:`_schema.Table` at the point that one is being reflected using the @@ -390,9 +395,7 @@ def receive_column_reflect(inspector, table, column_info): t1 = Table( "my_table", autoload_with=some_engine, - listeners=[ - ('column_reflect', receive_column_reflect) - ] + listeners=[("column_reflect", receive_column_reflect)], ) The dictionary of column information as returned by the diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index 8ef7f75bc21..2e86baf4985 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -246,9 +246,8 @@ def table_valued( .. sourcecode:: pycon+sql - >>> fn = ( - ... func.generate_series(1, 5). - ... table_valued("value", "start", "stop", "step") + >>> fn = func.generate_series(1, 5).table_valued( + ... "value", "start", "stop", "step" ... ) >>> print(select(fn)) @@ -265,7 +264,9 @@ def table_valued( .. sourcecode:: pycon+sql - >>> fn = func.generate_series(4, 1, -1).table_valued("gen", with_ordinality="ordinality") + >>> fn = func.generate_series(4, 1, -1).table_valued( + ... "gen", with_ordinality="ordinality" + ... ) >>> print(select(fn)) {printsql}SELECT anon_1.gen, anon_1.ordinality FROM generate_series(:generate_series_1, :generate_series_2, :generate_series_3) WITH ORDINALITY AS anon_1 @@ -377,7 +378,7 @@ def columns(self) -> ColumnCollection[str, KeyedColumnElement[Any]]: # type: ig .. sourcecode:: pycon+sql >>> from sqlalchemy import column, select, func - >>> stmt = select(column('x'), column('y')).select_from(func.myfunction()) + >>> stmt = select(column("x"), column("y")).select_from(func.myfunction()) >>> print(stmt) {printsql}SELECT x, y FROM myfunction() @@ -442,12 +443,13 @@ def over( The expression:: - func.row_number().over(order_by='x') + func.row_number().over(order_by="x") is shorthand for:: from sqlalchemy import over - over(func.row_number(), order_by='x') + + over(func.row_number(), order_by="x") See :func:`_expression.over` for a full description. @@ -511,6 +513,7 @@ def filter( is shorthand for:: from sqlalchemy import funcfilter + funcfilter(func.count(1), True) .. seealso:: @@ -567,7 +570,7 @@ def as_comparison( An ORM example is as follows:: class Venue(Base): - __tablename__ = 'venue' + __tablename__ = "venue" id = Column(Integer, primary_key=True) name = Column(String) @@ -575,9 +578,10 @@ class Venue(Base): "Venue", primaryjoin=func.instr( remote(foreign(name)), name + "/" - ).as_comparison(1, 2) == 1, + ).as_comparison(1, 2) + == 1, viewonly=True, - order_by=name + order_by=name, ) Above, the "Venue" class can load descendant "Venue" objects by @@ -881,8 +885,11 @@ class _FunctionGenerator: .. sourcecode:: pycon+sql - >>> print(func.my_string(u'hi', type_=Unicode) + ' ' + - ... func.my_string(u'there', type_=Unicode)) + >>> print( + ... func.my_string("hi", type_=Unicode) + ... + " " + ... + func.my_string("there", type_=Unicode) + ... ) {printsql}my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3) The object returned by a :data:`.func` call is usually an instance of @@ -1367,10 +1374,12 @@ class that is instantiated automatically when called from sqlalchemy.sql.functions import GenericFunction from sqlalchemy.types import DateTime + class as_utc(GenericFunction): type = DateTime() inherit_cache = True + print(select(func.as_utc())) User-defined generic functions can be organized into @@ -1418,6 +1427,7 @@ class GeoBuffer(GenericFunction): from sqlalchemy.sql import quoted_name + class GeoBuffer(GenericFunction): type = Geometry() package = "geo" @@ -1657,7 +1667,7 @@ class concat(GenericFunction[str]): .. sourcecode:: pycon+sql - >>> print(select(func.concat('a', 'b'))) + >>> print(select(func.concat("a", "b"))) {printsql}SELECT concat(:concat_2, :concat_3) AS concat_1 String concatenation in SQLAlchemy is more commonly available using the @@ -1705,11 +1715,13 @@ class count(GenericFunction[int]): from sqlalchemy import select from sqlalchemy import table, column - my_table = table('some_table', column('id')) + my_table = table("some_table", column("id")) stmt = select(func.count()).select_from(my_table) - Executing ``stmt`` would emit:: + Executing ``stmt`` would emit: + + .. sourcecode:: sql SELECT count(*) AS count_1 FROM some_table @@ -2009,9 +2021,7 @@ class grouping_sets(GenericFunction[_T]): from sqlalchemy import tuple_ stmt = select( - func.sum(table.c.value), - table.c.col_1, table.c.col_2, - table.c.col_3 + func.sum(table.c.value), table.c.col_1, table.c.col_2, table.c.col_3 ).group_by( func.grouping_sets( tuple_(table.c.col_1, table.c.col_2), @@ -2019,10 +2029,9 @@ class grouping_sets(GenericFunction[_T]): ) ) - .. versionadded:: 1.2 - """ + """ # noqa: E501 _has_args = True inherit_cache = True diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index 2657b2c243d..061da29707c 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -518,7 +518,6 @@ class StatementLambdaElement( stmt += lambda s: s.where(table.c.col == parameter) - .. versionadded:: 1.4 .. seealso:: @@ -558,9 +557,7 @@ def add_criteria( ... stmt = lambda_stmt( ... lambda: select(table.c.x, table.c.y), ... ) - ... stmt = stmt.add_criteria( - ... lambda: table.c.x > parameter - ... ) + ... stmt = stmt.add_criteria(lambda: table.c.x > parameter) ... return stmt The :meth:`_sql.StatementLambdaElement.add_criteria` method is @@ -571,18 +568,15 @@ def add_criteria( >>> def my_stmt(self, foo): ... stmt = lambda_stmt( ... lambda: select(func.max(foo.x, foo.y)), - ... track_closure_variables=False - ... ) - ... stmt = stmt.add_criteria( - ... lambda: self.where_criteria, - ... track_on=[self] + ... track_closure_variables=False, ... ) + ... stmt = stmt.add_criteria(lambda: self.where_criteria, track_on=[self]) ... return stmt See :func:`_sql.lambda_stmt` for a description of the parameters accepted. - """ + """ # noqa: E501 opts = self.opts + dict( enable_tracking=enable_tracking, diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index c611004b97e..33733d03fc9 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -148,6 +148,7 @@ def __and__(self, other: Any) -> Operators: is equivalent to:: from sqlalchemy import and_ + and_(a, b) Care should be taken when using ``&`` regarding @@ -172,6 +173,7 @@ def __or__(self, other: Any) -> Operators: is equivalent to:: from sqlalchemy import or_ + or_(a, b) Care should be taken when using ``|`` regarding @@ -196,6 +198,7 @@ def __invert__(self) -> Operators: is equivalent to:: from sqlalchemy import not_ + not_(a) """ @@ -224,7 +227,7 @@ def op( This function can also be used to make bitwise operators explicit. For example:: - somecolumn.op('&')(0xff) + somecolumn.op("&")(0xFF) is a bitwise AND of the value in ``somecolumn``. @@ -275,7 +278,7 @@ def op( e.g.:: - >>> expr = column('x').op('+', python_impl=lambda a, b: a + b)('y') + >>> expr = column("x").op("+", python_impl=lambda a, b: a + b)("y") The operator for the above expression will also work for non-SQL left and right objects:: @@ -389,10 +392,9 @@ class custom_op(OperatorType, Generic[_T]): from sqlalchemy.sql import operators from sqlalchemy import Numeric - unary = UnaryExpression(table.c.somecolumn, - modifier=operators.custom_op("!"), - type_=Numeric) - + unary = UnaryExpression( + table.c.somecolumn, modifier=operators.custom_op("!"), type_=Numeric + ) .. seealso:: @@ -400,7 +402,7 @@ class custom_op(OperatorType, Generic[_T]): :meth:`.Operators.bool_op` - """ + """ # noqa: E501 __name__ = "custom_op" @@ -698,14 +700,15 @@ def like( ) -> ColumnOperators: r"""Implement the ``like`` operator. - In a column context, produces the expression:: + In a column context, produces the expression: + + .. sourcecode:: sql a LIKE other E.g.:: - stmt = select(sometable).\ - where(sometable.c.column.like("%foobar%")) + stmt = select(sometable).where(sometable.c.column.like("%foobar%")) :param other: expression to be compared :param escape: optional escape character, renders the ``ESCAPE`` @@ -725,18 +728,21 @@ def ilike( ) -> ColumnOperators: r"""Implement the ``ilike`` operator, e.g. case insensitive LIKE. - In a column context, produces an expression either of the form:: + In a column context, produces an expression either of the form: + + .. sourcecode:: sql lower(a) LIKE lower(other) - Or on backends that support the ILIKE operator:: + Or on backends that support the ILIKE operator: + + .. sourcecode:: sql a ILIKE other E.g.:: - stmt = select(sometable).\ - where(sometable.c.column.ilike("%foobar%")) + stmt = select(sometable).where(sometable.c.column.ilike("%foobar%")) :param other: expression to be compared :param escape: optional escape character, renders the ``ESCAPE`` @@ -748,7 +754,7 @@ def ilike( :meth:`.ColumnOperators.like` - """ + """ # noqa: E501 return self.operate(ilike_op, other, escape=escape) def bitwise_xor(self, other: Any) -> ColumnOperators: @@ -842,12 +848,15 @@ def in_(self, other: Any) -> ColumnOperators: The given parameter ``other`` may be: - * A list of literal values, e.g.:: + * A list of literal values, + e.g.:: stmt.where(column.in_([1, 2, 3])) In this calling form, the list of items is converted to a set of - bound parameters the same length as the list given:: + bound parameters the same length as the list given: + + .. sourcecode:: sql WHERE COL IN (?, ?, ?) @@ -855,16 +864,20 @@ def in_(self, other: Any) -> ColumnOperators: :func:`.tuple_` containing multiple expressions:: from sqlalchemy import tuple_ + stmt.where(tuple_(col1, col2).in_([(1, 10), (2, 20), (3, 30)])) - * An empty list, e.g.:: + * An empty list, + e.g.:: stmt.where(column.in_([])) In this calling form, the expression renders an "empty set" expression. These expressions are tailored to individual backends and are generally trying to get an empty SELECT statement as a - subquery. Such as on SQLite, the expression is:: + subquery. Such as on SQLite, the expression is: + + .. sourcecode:: sql WHERE col IN (SELECT 1 FROM (SELECT 1) WHERE 1!=1) @@ -874,10 +887,12 @@ def in_(self, other: Any) -> ColumnOperators: * A bound parameter, e.g. :func:`.bindparam`, may be used if it includes the :paramref:`.bindparam.expanding` flag:: - stmt.where(column.in_(bindparam('value', expanding=True))) + stmt.where(column.in_(bindparam("value", expanding=True))) In this calling form, the expression renders a special non-SQL - placeholder expression that looks like:: + placeholder expression that looks like: + + .. sourcecode:: sql WHERE COL IN ([EXPANDING_value]) @@ -887,7 +902,9 @@ def in_(self, other: Any) -> ColumnOperators: connection.execute(stmt, {"value": [1, 2, 3]}) - The database would be passed a bound parameter for each value:: + The database would be passed a bound parameter for each value: + + .. sourcecode:: sql WHERE COL IN (?, ?, ?) @@ -895,7 +912,9 @@ def in_(self, other: Any) -> ColumnOperators: If an empty list is passed, a special "empty list" expression, which is specific to the database in use, is rendered. On - SQLite this would be:: + SQLite this would be: + + .. sourcecode:: sql WHERE COL IN (SELECT 1 FROM (SELECT 1) WHERE 1!=1) @@ -906,13 +925,12 @@ def in_(self, other: Any) -> ColumnOperators: correlated scalar select:: stmt.where( - column.in_( - select(othertable.c.y). - where(table.c.x == othertable.c.x) - ) + column.in_(select(othertable.c.y).where(table.c.x == othertable.c.x)) ) - In this calling form, :meth:`.ColumnOperators.in_` renders as given:: + In this calling form, :meth:`.ColumnOperators.in_` renders as given: + + .. sourcecode:: sql WHERE COL IN (SELECT othertable.y FROM othertable WHERE othertable.x = table.x) @@ -921,7 +939,7 @@ def in_(self, other: Any) -> ColumnOperators: construct, or a :func:`.bindparam` construct that includes the :paramref:`.bindparam.expanding` flag set to True. - """ + """ # noqa: E501 return self.operate(in_op, other) def not_in(self, other: Any) -> ColumnOperators: @@ -1065,14 +1083,15 @@ def startswith( r"""Implement the ``startswith`` operator. Produces a LIKE expression that tests against a match for the start - of a string value:: + of a string value: + + .. sourcecode:: sql column LIKE || '%' E.g.:: - stmt = select(sometable).\ - where(sometable.c.column.startswith("foobar")) + stmt = select(sometable).where(sometable.c.column.startswith("foobar")) Since the operator uses ``LIKE``, wildcard characters ``"%"`` and ``"_"`` that are present inside the expression @@ -1101,7 +1120,9 @@ def startswith( somecolumn.startswith("foo%bar", autoescape=True) - Will render as:: + Will render as: + + .. sourcecode:: sql somecolumn LIKE :param || '%' ESCAPE '/' @@ -1117,7 +1138,9 @@ def startswith( somecolumn.startswith("foo/%bar", escape="^") - Will render as:: + Will render as: + + .. sourcecode:: sql somecolumn LIKE :param || '%' ESCAPE '^' @@ -1137,7 +1160,7 @@ def startswith( :meth:`.ColumnOperators.like` - """ + """ # noqa: E501 return self.operate( startswith_op, other, escape=escape, autoescape=autoescape ) @@ -1152,14 +1175,15 @@ def istartswith( version of :meth:`.ColumnOperators.startswith`. Produces a LIKE expression that tests against an insensitive - match for the start of a string value:: + match for the start of a string value: + + .. sourcecode:: sql lower(column) LIKE lower() || '%' E.g.:: - stmt = select(sometable).\ - where(sometable.c.column.istartswith("foobar")) + stmt = select(sometable).where(sometable.c.column.istartswith("foobar")) Since the operator uses ``LIKE``, wildcard characters ``"%"`` and ``"_"`` that are present inside the expression @@ -1188,7 +1212,9 @@ def istartswith( somecolumn.istartswith("foo%bar", autoescape=True) - Will render as:: + Will render as: + + .. sourcecode:: sql lower(somecolumn) LIKE lower(:param) || '%' ESCAPE '/' @@ -1204,7 +1230,9 @@ def istartswith( somecolumn.istartswith("foo/%bar", escape="^") - Will render as:: + Will render as: + + .. sourcecode:: sql lower(somecolumn) LIKE lower(:param) || '%' ESCAPE '^' @@ -1219,7 +1247,7 @@ def istartswith( .. seealso:: :meth:`.ColumnOperators.startswith` - """ + """ # noqa: E501 return self.operate( istartswith_op, other, escape=escape, autoescape=autoescape ) @@ -1233,14 +1261,15 @@ def endswith( r"""Implement the 'endswith' operator. Produces a LIKE expression that tests against a match for the end - of a string value:: + of a string value: + + .. sourcecode:: sql column LIKE '%' || E.g.:: - stmt = select(sometable).\ - where(sometable.c.column.endswith("foobar")) + stmt = select(sometable).where(sometable.c.column.endswith("foobar")) Since the operator uses ``LIKE``, wildcard characters ``"%"`` and ``"_"`` that are present inside the expression @@ -1269,7 +1298,9 @@ def endswith( somecolumn.endswith("foo%bar", autoescape=True) - Will render as:: + Will render as: + + .. sourcecode:: sql somecolumn LIKE '%' || :param ESCAPE '/' @@ -1285,7 +1316,9 @@ def endswith( somecolumn.endswith("foo/%bar", escape="^") - Will render as:: + Will render as: + + .. sourcecode:: sql somecolumn LIKE '%' || :param ESCAPE '^' @@ -1305,7 +1338,7 @@ def endswith( :meth:`.ColumnOperators.like` - """ + """ # noqa: E501 return self.operate( endswith_op, other, escape=escape, autoescape=autoescape ) @@ -1320,14 +1353,15 @@ def iendswith( version of :meth:`.ColumnOperators.endswith`. Produces a LIKE expression that tests against an insensitive match - for the end of a string value:: + for the end of a string value: + + .. sourcecode:: sql lower(column) LIKE '%' || lower() E.g.:: - stmt = select(sometable).\ - where(sometable.c.column.iendswith("foobar")) + stmt = select(sometable).where(sometable.c.column.iendswith("foobar")) Since the operator uses ``LIKE``, wildcard characters ``"%"`` and ``"_"`` that are present inside the expression @@ -1356,7 +1390,9 @@ def iendswith( somecolumn.iendswith("foo%bar", autoescape=True) - Will render as:: + Will render as: + + .. sourcecode:: sql lower(somecolumn) LIKE '%' || lower(:param) ESCAPE '/' @@ -1372,7 +1408,9 @@ def iendswith( somecolumn.iendswith("foo/%bar", escape="^") - Will render as:: + Will render as: + + .. sourcecode:: sql lower(somecolumn) LIKE '%' || lower(:param) ESCAPE '^' @@ -1387,7 +1425,7 @@ def iendswith( .. seealso:: :meth:`.ColumnOperators.endswith` - """ + """ # noqa: E501 return self.operate( iendswith_op, other, escape=escape, autoescape=autoescape ) @@ -1396,14 +1434,15 @@ def contains(self, other: Any, **kw: Any) -> ColumnOperators: r"""Implement the 'contains' operator. Produces a LIKE expression that tests against a match for the middle - of a string value:: + of a string value: + + .. sourcecode:: sql column LIKE '%' || || '%' E.g.:: - stmt = select(sometable).\ - where(sometable.c.column.contains("foobar")) + stmt = select(sometable).where(sometable.c.column.contains("foobar")) Since the operator uses ``LIKE``, wildcard characters ``"%"`` and ``"_"`` that are present inside the expression @@ -1432,7 +1471,9 @@ def contains(self, other: Any, **kw: Any) -> ColumnOperators: somecolumn.contains("foo%bar", autoescape=True) - Will render as:: + Will render as: + + .. sourcecode:: sql somecolumn LIKE '%' || :param || '%' ESCAPE '/' @@ -1448,7 +1489,9 @@ def contains(self, other: Any, **kw: Any) -> ColumnOperators: somecolumn.contains("foo/%bar", escape="^") - Will render as:: + Will render as: + + .. sourcecode:: sql somecolumn LIKE '%' || :param || '%' ESCAPE '^' @@ -1469,7 +1512,7 @@ def contains(self, other: Any, **kw: Any) -> ColumnOperators: :meth:`.ColumnOperators.like` - """ + """ # noqa: E501 return self.operate(contains_op, other, **kw) def icontains(self, other: Any, **kw: Any) -> ColumnOperators: @@ -1477,14 +1520,15 @@ def icontains(self, other: Any, **kw: Any) -> ColumnOperators: version of :meth:`.ColumnOperators.contains`. Produces a LIKE expression that tests against an insensitive match - for the middle of a string value:: + for the middle of a string value: + + .. sourcecode:: sql lower(column) LIKE '%' || lower() || '%' E.g.:: - stmt = select(sometable).\ - where(sometable.c.column.icontains("foobar")) + stmt = select(sometable).where(sometable.c.column.icontains("foobar")) Since the operator uses ``LIKE``, wildcard characters ``"%"`` and ``"_"`` that are present inside the expression @@ -1513,7 +1557,9 @@ def icontains(self, other: Any, **kw: Any) -> ColumnOperators: somecolumn.icontains("foo%bar", autoescape=True) - Will render as:: + Will render as: + + .. sourcecode:: sql lower(somecolumn) LIKE '%' || lower(:param) || '%' ESCAPE '/' @@ -1529,7 +1575,9 @@ def icontains(self, other: Any, **kw: Any) -> ColumnOperators: somecolumn.icontains("foo/%bar", escape="^") - Will render as:: + Will render as: + + .. sourcecode:: sql lower(somecolumn) LIKE '%' || lower(:param) || '%' ESCAPE '^' @@ -1545,7 +1593,7 @@ def icontains(self, other: Any, **kw: Any) -> ColumnOperators: :meth:`.ColumnOperators.contains` - """ + """ # noqa: E501 return self.operate(icontains_op, other, **kw) def match(self, other: Any, **kwargs: Any) -> ColumnOperators: @@ -1586,7 +1634,7 @@ def regexp_match( E.g.:: stmt = select(table.c.some_column).where( - table.c.some_column.regexp_match('^(b|c)') + table.c.some_column.regexp_match("^(b|c)") ) :meth:`_sql.ColumnOperators.regexp_match` attempts to resolve to @@ -1644,11 +1692,7 @@ def regexp_replace( E.g.:: stmt = select( - table.c.some_column.regexp_replace( - 'b(..)', - 'X\1Y', - flags='g' - ) + table.c.some_column.regexp_replace("b(..)", "X\1Y", flags="g") ) :meth:`_sql.ColumnOperators.regexp_replace` attempts to resolve to diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index c4b7f0d3132..463c8c31e07 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -320,9 +320,10 @@ class Table( e.g.:: mytable = Table( - "mytable", metadata, - Column('mytable_id', Integer, primary_key=True), - Column('value', String(50)) + "mytable", + metadata, + Column("mytable_id", Integer, primary_key=True), + Column("value", String(50)), ) The :class:`_schema.Table` @@ -632,11 +633,13 @@ def __init__( :class:`_schema.Column` named "y":: - Table("mytable", metadata, - Column('y', Integer), - extend_existing=True, - autoload_with=engine - ) + Table( + "mytable", + metadata, + Column("y", Integer), + extend_existing=True, + autoload_with=engine, + ) .. seealso:: @@ -733,12 +736,12 @@ def listen_for_reflect(table, column_info): "handle the column reflection event" # ... + t = Table( - 'sometable', + "sometable", autoload_with=engine, - listeners=[ - ('column_reflect', listen_for_reflect) - ]) + listeners=[("column_reflect", listen_for_reflect)], + ) .. seealso:: @@ -1345,7 +1348,7 @@ def to_metadata( m1 = MetaData() - user = Table('user', m1, Column('id', Integer, primary_key=True)) + user = Table("user", m1, Column("id", Integer, primary_key=True)) m2 = MetaData() user_copy = user.to_metadata(m2) @@ -1369,7 +1372,7 @@ def to_metadata( unless set explicitly:: - m2 = MetaData(schema='newschema') + m2 = MetaData(schema="newschema") # user_copy_one will have "newschema" as the schema name user_copy_one = user.to_metadata(m2, schema=None) @@ -1396,15 +1399,16 @@ def to_metadata( E.g.:: - def referred_schema_fn(table, to_schema, - constraint, referred_schema): - if referred_schema == 'base_tables': + def referred_schema_fn(table, to_schema, constraint, referred_schema): + if referred_schema == "base_tables": return referred_schema else: return to_schema - new_table = table.to_metadata(m2, schema="alt_schema", - referred_schema_fn=referred_schema_fn) + + new_table = table.to_metadata( + m2, schema="alt_schema", referred_schema_fn=referred_schema_fn + ) :param name: optional string name indicating the target table name. If not specified or None, the table name is retained. This allows @@ -1412,7 +1416,7 @@ def referred_schema_fn(table, to_schema, :class:`_schema.MetaData` target with a new name. - """ + """ # noqa: E501 if name is None: name = self.name @@ -1557,10 +1561,10 @@ def __init__( as well, e.g.:: # use a type with arguments - Column('data', String(50)) + Column("data", String(50)) # use no arguments - Column('level', Integer) + Column("level", Integer) The ``type`` argument may be the second positional argument or specified by keyword. @@ -1662,8 +1666,12 @@ def __init__( # turn on autoincrement for this column despite # the ForeignKey() - Column('id', ForeignKey('other.id'), - primary_key=True, autoincrement='ignore_fk') + Column( + "id", + ForeignKey("other.id"), + primary_key=True, + autoincrement="ignore_fk", + ) It is typically not desirable to have "autoincrement" enabled on a column that refers to another via foreign key, as such a column is @@ -1780,7 +1788,7 @@ def __init__( "some_table", metadata, Column("x", Integer), - Index("ix_some_table_x", "x") + Index("ix_some_table_x", "x"), ) To add the :paramref:`_schema.Index.unique` flag to the @@ -1862,14 +1870,22 @@ def __init__( String types will be emitted as-is, surrounded by single quotes:: - Column('x', Text, server_default="val") + Column("x", Text, server_default="val") + + will render: + + .. sourcecode:: sql x TEXT DEFAULT 'val' A :func:`~sqlalchemy.sql.expression.text` expression will be rendered as-is, without quotes:: - Column('y', DateTime, server_default=text('NOW()')) + Column("y", DateTime, server_default=text("NOW()")) + + will render: + + .. sourcecode:: sql y DATETIME DEFAULT NOW() @@ -1884,20 +1900,21 @@ def __init__( from sqlalchemy.dialects.postgresql import array engine = create_engine( - 'postgresql+psycopg2://scott:tiger@localhost/mydatabase' + "postgresql+psycopg2://scott:tiger@localhost/mydatabase" ) metadata_obj = MetaData() tbl = Table( - "foo", - metadata_obj, - Column("bar", - ARRAY(Text), - server_default=array(["biz", "bang", "bash"]) - ) + "foo", + metadata_obj, + Column( + "bar", ARRAY(Text), server_default=array(["biz", "bang", "bash"]) + ), ) metadata_obj.create_all(engine) - The above results in a table created with the following SQL:: + The above results in a table created with the following SQL: + + .. sourcecode:: sql CREATE TABLE foo ( bar TEXT[] DEFAULT ARRAY['biz', 'bang', 'bash'] @@ -1962,12 +1979,7 @@ def __init__( :class:`_schema.UniqueConstraint` construct explicitly at the level of the :class:`_schema.Table` construct itself:: - Table( - "some_table", - metadata, - Column("x", Integer), - UniqueConstraint("x") - ) + Table("some_table", metadata, Column("x", Integer), UniqueConstraint("x")) The :paramref:`_schema.UniqueConstraint.name` parameter of the unique constraint object is left at its default value @@ -2738,8 +2750,10 @@ class ForeignKey(DialectKWArgs, SchemaItem): object, e.g.:: - t = Table("remote_table", metadata, - Column("remote_id", ForeignKey("main_table.id")) + t = Table( + "remote_table", + metadata, + Column("remote_id", ForeignKey("main_table.id")), ) Note that ``ForeignKey`` is only a marker object that defines @@ -3388,12 +3402,11 @@ class ColumnDefault(DefaultGenerator, ABC): For example, the following:: - Column('foo', Integer, default=50) + Column("foo", Integer, default=50) Is equivalent to:: - Column('foo', Integer, ColumnDefault(50)) - + Column("foo", Integer, ColumnDefault(50)) """ @@ -3680,9 +3693,14 @@ class Sequence(HasSchemaAttr, IdentityOptions, DefaultGenerator): The :class:`.Sequence` is typically associated with a primary key column:: some_table = Table( - 'some_table', metadata, - Column('id', Integer, Sequence('some_table_seq', start=1), - primary_key=True) + "some_table", + metadata, + Column( + "id", + Integer, + Sequence("some_table_seq", start=1), + primary_key=True, + ), ) When CREATE TABLE is emitted for the above :class:`_schema.Table`, if the @@ -3958,7 +3976,7 @@ class FetchedValue(SchemaEventTarget): E.g.:: - Column('foo', Integer, FetchedValue()) + Column("foo", Integer, FetchedValue()) Would indicate that some trigger or default generator will create a new value for the ``foo`` column during an @@ -4024,11 +4042,11 @@ class DefaultClause(FetchedValue): For example, the following:: - Column('foo', Integer, server_default="50") + Column("foo", Integer, server_default="50") Is equivalent to:: - Column('foo', Integer, DefaultClause("50")) + Column("foo", Integer, DefaultClause("50")) """ @@ -4857,11 +4875,13 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): :class:`_schema.Column` objects corresponding to those marked with the :paramref:`_schema.Column.primary_key` flag:: - >>> my_table = Table('mytable', metadata, - ... Column('id', Integer, primary_key=True), - ... Column('version_id', Integer, primary_key=True), - ... Column('data', String(50)) - ... ) + >>> my_table = Table( + ... "mytable", + ... metadata, + ... Column("id", Integer, primary_key=True), + ... Column("version_id", Integer, primary_key=True), + ... Column("data", String(50)), + ... ) >>> my_table.primary_key PrimaryKeyConstraint( Column('id', Integer(), table=, @@ -4875,13 +4895,14 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): the "name" of the constraint can also be specified, as well as other options which may be recognized by dialects:: - my_table = Table('mytable', metadata, - Column('id', Integer), - Column('version_id', Integer), - Column('data', String(50)), - PrimaryKeyConstraint('id', 'version_id', - name='mytable_pk') - ) + my_table = Table( + "mytable", + metadata, + Column("id", Integer), + Column("version_id", Integer), + Column("data", String(50)), + PrimaryKeyConstraint("id", "version_id", name="mytable_pk"), + ) The two styles of column-specification should generally not be mixed. An warning is emitted if the columns present in the @@ -4899,13 +4920,14 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): primary key column collection from the :class:`_schema.Table` based on the flags:: - my_table = Table('mytable', metadata, - Column('id', Integer, primary_key=True), - Column('version_id', Integer, primary_key=True), - Column('data', String(50)), - PrimaryKeyConstraint(name='mytable_pk', - mssql_clustered=True) - ) + my_table = Table( + "mytable", + metadata, + Column("id", Integer, primary_key=True), + Column("version_id", Integer, primary_key=True), + Column("data", String(50)), + PrimaryKeyConstraint(name="mytable_pk", mssql_clustered=True), + ) """ @@ -5102,19 +5124,21 @@ class Index( E.g.:: - sometable = Table("sometable", metadata, - Column("name", String(50)), - Column("address", String(100)) - ) + sometable = Table( + "sometable", + metadata, + Column("name", String(50)), + Column("address", String(100)), + ) Index("some_index", sometable.c.name) For a no-frills, single column index, adding :class:`_schema.Column` also supports ``index=True``:: - sometable = Table("sometable", metadata, - Column("name", String(50), index=True) - ) + sometable = Table( + "sometable", metadata, Column("name", String(50), index=True) + ) For a composite index, multiple columns can be specified:: @@ -5133,22 +5157,26 @@ class Index( the names of the indexed columns can be specified as strings:: - Table("sometable", metadata, - Column("name", String(50)), - Column("address", String(100)), - Index("some_index", "name", "address") - ) + Table( + "sometable", + metadata, + Column("name", String(50)), + Column("address", String(100)), + Index("some_index", "name", "address"), + ) To support functional or expression-based indexes in this form, the :func:`_expression.text` construct may be used:: from sqlalchemy import text - Table("sometable", metadata, - Column("name", String(50)), - Column("address", String(100)), - Index("some_index", text("lower(name)")) - ) + Table( + "sometable", + metadata, + Column("name", String(50)), + Column("address", String(100)), + Index("some_index", text("lower(name)")), + ) .. seealso:: @@ -5906,9 +5934,11 @@ class Computed(FetchedValue, SchemaItem): from sqlalchemy import Computed - Table('square', metadata_obj, - Column('side', Float, nullable=False), - Column('area', Float, Computed('side * side')) + Table( + "square", + metadata_obj, + Column("side", Float, nullable=False), + Column("area", Float, Computed("side * side")), ) See the linked documentation below for complete details. @@ -6013,9 +6043,11 @@ class Identity(IdentityOptions, FetchedValue, SchemaItem): from sqlalchemy import Identity - Table('foo', metadata_obj, - Column('id', Integer, Identity()) - Column('description', Text), + Table( + "foo", + metadata_obj, + Column("id", Integer, Identity()), + Column("description", Text), ) See the linked documentation below for complete details. diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 922a8e4a682..e6be0ae5513 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -392,8 +392,7 @@ def prefix_with( stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql") # MySQL 5.7 optimizer hints - stmt = select(table).prefix_with( - "/*+ BKA(t1) */", dialect="mysql") + stmt = select(table).prefix_with("/*+ BKA(t1) */", dialect="mysql") Multiple prefixes can be specified by multiple calls to :meth:`_expression.HasPrefixes.prefix_with`. @@ -440,8 +439,13 @@ def suffix_with( E.g.:: - stmt = select(col1, col2).cte().suffix_with( - "cycle empno set y_cycle to 1 default 0", dialect="oracle") + stmt = ( + select(col1, col2) + .cte() + .suffix_with( + "cycle empno set y_cycle to 1 default 0", dialect="oracle" + ) + ) Multiple suffixes can be specified by multiple calls to :meth:`_expression.HasSuffixes.suffix_with`. @@ -540,20 +544,21 @@ def with_hint( the table or alias. E.g. when using Oracle Database, the following:: - select(mytable).\ - with_hint(mytable, "index(%(name)s ix_mytable)") + select(mytable).with_hint(mytable, "index(%(name)s ix_mytable)") - Would render SQL as:: + Would render SQL as: + + .. sourcecode:: sql select /*+ index(mytable ix_mytable) */ ... from mytable The ``dialect_name`` option will limit the rendering of a particular hint to a particular backend. Such as, to add hints for both Oracle - Database and Sybase simultaneously:: + Database and MSSql simultaneously:: - select(mytable).\ - with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\ - with_hint(mytable, "WITH INDEX ix_mytable", 'mssql') + select(mytable).with_hint( + mytable, "index(%(name)s ix_mytable)", "oracle" + ).with_hint(mytable, "WITH INDEX ix_mytable", "mssql") .. seealso:: @@ -665,11 +670,14 @@ def join( from sqlalchemy import join - j = user_table.join(address_table, - user_table.c.id == address_table.c.user_id) + j = user_table.join( + address_table, user_table.c.id == address_table.c.user_id + ) stmt = select(user_table).select_from(j) - would emit SQL along the lines of:: + would emit SQL along the lines of: + + .. sourcecode:: sql SELECT user.id, user.name FROM user JOIN address ON user.id = address.user_id @@ -715,15 +723,15 @@ def outerjoin( from sqlalchemy import outerjoin - j = user_table.outerjoin(address_table, - user_table.c.id == address_table.c.user_id) + j = user_table.outerjoin( + address_table, user_table.c.id == address_table.c.user_id + ) The above is equivalent to:: j = user_table.join( - address_table, - user_table.c.id == address_table.c.user_id, - isouter=True) + address_table, user_table.c.id == address_table.c.user_id, isouter=True + ) :param right: the right side of the join; this is any :class:`_expression.FromClause` object such as a @@ -745,7 +753,7 @@ def outerjoin( :class:`_expression.Join` - """ + """ # noqa: E501 return Join(self, right, onclause, True, full) @@ -756,7 +764,7 @@ def alias( E.g.:: - a2 = some_table.alias('a2') + a2 = some_table.alias("a2") The above code creates an :class:`_expression.Alias` object which can be used @@ -893,7 +901,7 @@ def entity_namespace(self) -> _EntityNamespace: This is the namespace that is used to resolve "filter_by()" type expressions, such as:: - stmt.filter_by(address='some address') + stmt.filter_by(address="some address") It defaults to the ``.c`` collection, however internally it can be overridden using the "entity_namespace" annotation to deliver @@ -1076,7 +1084,11 @@ class SelectLabelStyle(Enum): >>> from sqlalchemy import table, column, select, true, LABEL_STYLE_NONE >>> table1 = table("table1", column("columna"), column("columnb")) >>> table2 = table("table2", column("columna"), column("columnc")) - >>> print(select(table1, table2).join(table2, true()).set_label_style(LABEL_STYLE_NONE)) + >>> print( + ... select(table1, table2) + ... .join(table2, true()) + ... .set_label_style(LABEL_STYLE_NONE) + ... ) {printsql}SELECT table1.columna, table1.columnb, table2.columna, table2.columnc FROM table1 JOIN table2 ON true @@ -1098,10 +1110,20 @@ class SelectLabelStyle(Enum): .. sourcecode:: pycon+sql - >>> from sqlalchemy import table, column, select, true, LABEL_STYLE_TABLENAME_PLUS_COL + >>> from sqlalchemy import ( + ... table, + ... column, + ... select, + ... true, + ... LABEL_STYLE_TABLENAME_PLUS_COL, + ... ) >>> table1 = table("table1", column("columna"), column("columnb")) >>> table2 = table("table2", column("columna"), column("columnc")) - >>> print(select(table1, table2).join(table2, true()).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)) + >>> print( + ... select(table1, table2) + ... .join(table2, true()) + ... .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) + ... ) {printsql}SELECT table1.columna AS table1_columna, table1.columnb AS table1_columnb, table2.columna AS table2_columna, table2.columnc AS table2_columnc FROM table1 JOIN table2 ON true @@ -1127,10 +1149,20 @@ class SelectLabelStyle(Enum): .. sourcecode:: pycon+sql - >>> from sqlalchemy import table, column, select, true, LABEL_STYLE_DISAMBIGUATE_ONLY + >>> from sqlalchemy import ( + ... table, + ... column, + ... select, + ... true, + ... LABEL_STYLE_DISAMBIGUATE_ONLY, + ... ) >>> table1 = table("table1", column("columna"), column("columnb")) >>> table2 = table("table2", column("columna"), column("columnc")) - >>> print(select(table1, table2).join(table2, true()).set_label_style(LABEL_STYLE_DISAMBIGUATE_ONLY)) + >>> print( + ... select(table1, table2) + ... .join(table2, true()) + ... .set_label_style(LABEL_STYLE_DISAMBIGUATE_ONLY) + ... ) {printsql}SELECT table1.columna, table1.columnb, table2.columna AS columna_1, table2.columnc FROM table1 JOIN table2 ON true @@ -1528,7 +1560,9 @@ def select(self) -> Select[Any]: stmt = stmt.select() - The above will produce a SQL string resembling:: + The above will produce a SQL string resembling: + + .. sourcecode:: sql SELECT table_a.id, table_a.col, table_b.id, table_b.a_id FROM table_a JOIN table_b ON table_a.id = table_b.a_id @@ -1762,7 +1796,9 @@ class TableValuedAlias(LateralFromClause, Alias): .. sourcecode:: pycon+sql >>> from sqlalchemy import select, func - >>> fn = func.json_array_elements_text('["one", "two", "three"]').table_valued("value") + >>> fn = func.json_array_elements_text('["one", "two", "three"]').table_valued( + ... "value" + ... ) >>> print(select(fn.c.value)) {printsql}SELECT anon_1.value FROM json_array_elements_text(:json_array_elements_text_1) AS anon_1 @@ -1881,8 +1917,9 @@ def render_derived( >>> print( ... select( - ... func.unnest(array(["one", "two", "three"])). - table_valued("x", with_ordinality="o").render_derived() + ... func.unnest(array(["one", "two", "three"])) + ... .table_valued("x", with_ordinality="o") + ... .render_derived() ... ) ... ) {printsql}SELECT anon_1.x, anon_1.o @@ -1896,9 +1933,7 @@ def render_derived( >>> print( ... select( - ... func.json_to_recordset( - ... '[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]' - ... ) + ... func.json_to_recordset('[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]') ... .table_valued(column("a", Integer), column("b", String)) ... .render_derived(with_types=True) ... ) @@ -2455,16 +2490,20 @@ def add_cte(self, *ctes: CTE, nest_here: bool = False) -> Self: E.g.:: from sqlalchemy import table, column, select - t = table('t', column('c1'), column('c2')) + + t = table("t", column("c1"), column("c2")) ins = t.insert().values({"c1": "x", "c2": "y"}).cte() stmt = select(t).add_cte(ins) - Would render:: + Would render: + + .. sourcecode:: sql - WITH anon_1 AS - (INSERT INTO t (c1, c2) VALUES (:param_1, :param_2)) + WITH anon_1 AS ( + INSERT INTO t (c1, c2) VALUES (:param_1, :param_2) + ) SELECT t.c1, t.c2 FROM t @@ -2480,9 +2519,7 @@ def add_cte(self, *ctes: CTE, nest_here: bool = False) -> Self: t = table("t", column("c1"), column("c2")) - delete_statement_cte = ( - t.delete().where(t.c.c1 < 1).cte("deletions") - ) + delete_statement_cte = t.delete().where(t.c.c1 < 1).cte("deletions") insert_stmt = insert(t).values({"c1": 1, "c2": 2}) update_statement = insert_stmt.on_conflict_do_update( @@ -2495,10 +2532,13 @@ def add_cte(self, *ctes: CTE, nest_here: bool = False) -> Self: print(update_statement) - The above statement renders as:: + The above statement renders as: + + .. sourcecode:: sql - WITH deletions AS - (DELETE FROM t WHERE t.c1 < %(c1_1)s) + WITH deletions AS ( + DELETE FROM t WHERE t.c1 < %(c1_1)s + ) INSERT INTO t (c1, c2) VALUES (%(c1)s, %(c2)s) ON CONFLICT (c1) DO UPDATE SET c1 = excluded.c1, c2 = excluded.c2 @@ -2522,10 +2562,8 @@ def add_cte(self, *ctes: CTE, nest_here: bool = False) -> Self: :paramref:`.HasCTE.cte.nesting` - """ - opt = _CTEOpts( - nest_here, - ) + """ # noqa: E501 + opt = _CTEOpts(nest_here) for cte in ctes: cte = coercions.expect(roles.IsCTERole, cte) self._independent_ctes += (cte,) @@ -2593,95 +2631,123 @@ def cte( Example 1, non recursive:: - from sqlalchemy import (Table, Column, String, Integer, - MetaData, select, func) + from sqlalchemy import ( + Table, + Column, + String, + Integer, + MetaData, + select, + func, + ) metadata = MetaData() - orders = Table('orders', metadata, - Column('region', String), - Column('amount', Integer), - Column('product', String), - Column('quantity', Integer) + orders = Table( + "orders", + metadata, + Column("region", String), + Column("amount", Integer), + Column("product", String), + Column("quantity", Integer), ) - regional_sales = select( - orders.c.region, - func.sum(orders.c.amount).label('total_sales') - ).group_by(orders.c.region).cte("regional_sales") + regional_sales = ( + select(orders.c.region, func.sum(orders.c.amount).label("total_sales")) + .group_by(orders.c.region) + .cte("regional_sales") + ) - top_regions = select(regional_sales.c.region).\ - where( - regional_sales.c.total_sales > - select( - func.sum(regional_sales.c.total_sales) / 10 - ) - ).cte("top_regions") + top_regions = ( + select(regional_sales.c.region) + .where( + regional_sales.c.total_sales + > select(func.sum(regional_sales.c.total_sales) / 10) + ) + .cte("top_regions") + ) - statement = select( - orders.c.region, - orders.c.product, - func.sum(orders.c.quantity).label("product_units"), - func.sum(orders.c.amount).label("product_sales") - ).where(orders.c.region.in_( - select(top_regions.c.region) - )).group_by(orders.c.region, orders.c.product) + statement = ( + select( + orders.c.region, + orders.c.product, + func.sum(orders.c.quantity).label("product_units"), + func.sum(orders.c.amount).label("product_sales"), + ) + .where(orders.c.region.in_(select(top_regions.c.region))) + .group_by(orders.c.region, orders.c.product) + ) result = conn.execute(statement).fetchall() Example 2, WITH RECURSIVE:: - from sqlalchemy import (Table, Column, String, Integer, - MetaData, select, func) + from sqlalchemy import ( + Table, + Column, + String, + Integer, + MetaData, + select, + func, + ) metadata = MetaData() - parts = Table('parts', metadata, - Column('part', String), - Column('sub_part', String), - Column('quantity', Integer), + parts = Table( + "parts", + metadata, + Column("part", String), + Column("sub_part", String), + Column("quantity", Integer), ) - included_parts = select(\ - parts.c.sub_part, parts.c.part, parts.c.quantity\ - ).\ - where(parts.c.part=='our part').\ - cte(recursive=True) + included_parts = ( + select(parts.c.sub_part, parts.c.part, parts.c.quantity) + .where(parts.c.part == "our part") + .cte(recursive=True) + ) incl_alias = included_parts.alias() parts_alias = parts.alias() included_parts = included_parts.union_all( select( - parts_alias.c.sub_part, - parts_alias.c.part, - parts_alias.c.quantity - ).\ - where(parts_alias.c.part==incl_alias.c.sub_part) + parts_alias.c.sub_part, parts_alias.c.part, parts_alias.c.quantity + ).where(parts_alias.c.part == incl_alias.c.sub_part) ) statement = select( - included_parts.c.sub_part, - func.sum(included_parts.c.quantity). - label('total_quantity') - ).\ - group_by(included_parts.c.sub_part) + included_parts.c.sub_part, + func.sum(included_parts.c.quantity).label("total_quantity"), + ).group_by(included_parts.c.sub_part) result = conn.execute(statement).fetchall() Example 3, an upsert using UPDATE and INSERT with CTEs:: from datetime import date - from sqlalchemy import (MetaData, Table, Column, Integer, - Date, select, literal, and_, exists) + from sqlalchemy import ( + MetaData, + Table, + Column, + Integer, + Date, + select, + literal, + and_, + exists, + ) metadata = MetaData() - visitors = Table('visitors', metadata, - Column('product_id', Integer, primary_key=True), - Column('date', Date, primary_key=True), - Column('count', Integer), + visitors = Table( + "visitors", + metadata, + Column("product_id", Integer, primary_key=True), + Column("date", Date, primary_key=True), + Column("count", Integer), ) # add 5 visitors for the product_id == 1 @@ -2691,31 +2757,31 @@ def cte( update_cte = ( visitors.update() - .where(and_(visitors.c.product_id == product_id, - visitors.c.date == day)) + .where( + and_(visitors.c.product_id == product_id, visitors.c.date == day) + ) .values(count=visitors.c.count + count) .returning(literal(1)) - .cte('update_cte') + .cte("update_cte") ) upsert = visitors.insert().from_select( [visitors.c.product_id, visitors.c.date, visitors.c.count], - select(literal(product_id), literal(day), literal(count)) - .where(~exists(update_cte.select())) + select(literal(product_id), literal(day), literal(count)).where( + ~exists(update_cte.select()) + ), ) connection.execute(upsert) Example 4, Nesting CTE (SQLAlchemy 1.4.24 and above):: - value_a = select( - literal("root").label("n") - ).cte("value_a") + value_a = select(literal("root").label("n")).cte("value_a") # A nested CTE with the same name as the root one - value_a_nested = select( - literal("nesting").label("n") - ).cte("value_a", nesting=True) + value_a_nested = select(literal("nesting").label("n")).cte( + "value_a", nesting=True + ) # Nesting CTEs takes ascendency locally # over the CTEs at a higher level @@ -2724,7 +2790,9 @@ def cte( value_ab = select(value_a.c.n.label("a"), value_b.c.n.label("b")) The above query will render the second CTE nested inside the first, - shown with inline parameters below as:: + shown with inline parameters below as: + + .. sourcecode:: sql WITH value_a AS @@ -2739,21 +2807,17 @@ def cte( The same CTE can be set up using the :meth:`.HasCTE.add_cte` method as follows (SQLAlchemy 2.0 and above):: - value_a = select( - literal("root").label("n") - ).cte("value_a") + value_a = select(literal("root").label("n")).cte("value_a") # A nested CTE with the same name as the root one - value_a_nested = select( - literal("nesting").label("n") - ).cte("value_a") + value_a_nested = select(literal("nesting").label("n")).cte("value_a") # Nesting CTEs takes ascendency locally # over the CTEs at a higher level value_b = ( - select(value_a_nested.c.n). - add_cte(value_a_nested, nest_here=True). - cte("value_b") + select(value_a_nested.c.n) + .add_cte(value_a_nested, nest_here=True) + .cte("value_b") ) value_ab = select(value_a.c.n.label("a"), value_b.c.n.label("b")) @@ -2768,9 +2832,7 @@ def cte( Column("right", Integer), ) - root_node = select(literal(1).label("node")).cte( - "nodes", recursive=True - ) + root_node = select(literal(1).label("node")).cte("nodes", recursive=True) left_edge = select(edge.c.left).join( root_node, edge.c.right == root_node.c.node @@ -2783,7 +2845,9 @@ def cte( subgraph = select(subgraph_cte) - The above query will render 2 UNIONs inside the recursive CTE:: + The above query will render 2 UNIONs inside the recursive CTE: + + .. sourcecode:: sql WITH RECURSIVE nodes(node) AS ( SELECT 1 AS node @@ -2801,7 +2865,7 @@ def cte( :meth:`_orm.Query.cte` - ORM version of :meth:`_expression.HasCTE.cte`. - """ + """ # noqa: E501 return CTE._construct( self, name=name, recursive=recursive, nesting=nesting ) @@ -2958,10 +3022,11 @@ class TableClause(roles.DMLTableRole, Immutable, NamedFromClause): from sqlalchemy import table, column - user = table("user", - column("id"), - column("name"), - column("description"), + user = table( + "user", + column("id"), + column("name"), + column("description"), ) The :class:`_expression.TableClause` construct serves as the base for @@ -3067,7 +3132,7 @@ def insert(self) -> util.preloaded.sql_dml.Insert: E.g.:: - table.insert().values(name='foo') + table.insert().values(name="foo") See :func:`_expression.insert` for argument and usage information. @@ -3082,7 +3147,7 @@ def update(self) -> Update: E.g.:: - table.update().where(table.c.id==7).values(name='foo') + table.update().where(table.c.id == 7).values(name="foo") See :func:`_expression.update` for argument and usage information. @@ -3098,7 +3163,7 @@ def delete(self) -> Delete: E.g.:: - table.delete().where(table.c.id==7) + table.delete().where(table.c.id == 7) See :func:`_expression.delete` for argument and usage information. @@ -3286,7 +3351,7 @@ def data(self, values: Sequence[Tuple[Any, ...]]) -> Self: E.g.:: - my_values = my_values.data([(1, 'value 1'), (2, 'value2')]) + my_values = my_values.data([(1, "value 1"), (2, "value2")]) :param values: a sequence (i.e. list) of tuples that map to the column expressions given in the :class:`_expression.Values` @@ -3631,7 +3696,9 @@ def subquery(self, name: Optional[str] = None) -> Subquery: stmt = select(table.c.id, table.c.name) - The above statement might look like:: + The above statement might look like: + + .. sourcecode:: sql SELECT table.id, table.name FROM table @@ -3642,7 +3709,9 @@ def subquery(self, name: Optional[str] = None) -> Subquery: subq = stmt.subquery() new_stmt = select(subq) - The above renders as:: + The above renders as: + + .. sourcecode:: sql SELECT anon_1.id, anon_1.name FROM (SELECT table.id, table.name FROM table) AS anon_1 @@ -3837,12 +3906,16 @@ def with_for_update( stmt = select(table).with_for_update(nowait=True) On a database like PostgreSQL or Oracle Database, the above would - render a statement like:: + render a statement like: + + .. sourcecode:: sql SELECT table.a, table.b FROM table FOR UPDATE NOWAIT on other backends, the ``nowait`` option is ignored and instead - would produce:: + would produce: + + .. sourcecode:: sql SELECT table.a, table.b FROM table FOR UPDATE @@ -4259,8 +4332,7 @@ def group_by( e.g.:: - stmt = select(table.c.name, func.max(table.c.stat)).\ - group_by(table.c.name) + stmt = select(table.c.name, func.max(table.c.stat)).group_by(table.c.name) :param \*clauses: a series of :class:`_expression.ColumnElement` constructs @@ -4273,7 +4345,7 @@ def group_by( :ref:`tutorial_order_by_label` - in the :ref:`unified_tutorial` - """ + """ # noqa: E501 if not clauses and __first is None: self._group_by_clauses = () @@ -5346,11 +5418,17 @@ def join( E.g.:: - stmt = select(user_table).join(address_table, user_table.c.id == address_table.c.user_id) + stmt = select(user_table).join( + address_table, user_table.c.id == address_table.c.user_id + ) - The above statement generates SQL similar to:: + The above statement generates SQL similar to: - SELECT user.id, user.name FROM user JOIN address ON user.id = address.user_id + .. sourcecode:: sql + + SELECT user.id, user.name + FROM user + JOIN address ON user.id = address.user_id .. versionchanged:: 1.4 :meth:`_expression.Select.join` now creates a :class:`_sql.Join` object between a :class:`_sql.FromClause` @@ -5454,7 +5532,9 @@ def join_from( user_table, address_table, user_table.c.id == address_table.c.user_id ) - The above statement generates SQL similar to:: + The above statement generates SQL similar to: + + .. sourcecode:: sql SELECT user.id, user.name, address.id, address.email, address.user_id FROM user JOIN address ON user.id = address.user_id @@ -6062,9 +6142,12 @@ def distinct(self, *expr: _ColumnExpressionArgument[Any]) -> Self: E.g.:: from sqlalchemy import select + stmt = select(users_table.c.id, users_table.c.name).distinct() - The above would produce an statement resembling:: + The above would produce an statement resembling: + + .. sourcecode:: sql SELECT DISTINCT user.id, user.name FROM user @@ -6100,12 +6183,11 @@ def select_from(self, *froms: _FromClauseArgument) -> Self: E.g.:: - table1 = table('t1', column('a')) - table2 = table('t2', column('b')) - s = select(table1.c.a).\ - select_from( - table1.join(table2, table1.c.a==table2.c.b) - ) + table1 = table("t1", column("a")) + table2 = table("t2", column("b")) + s = select(table1.c.a).select_from( + table1.join(table2, table1.c.a == table2.c.b) + ) The "from" list is a unique set on the identity of each element, so adding an already present :class:`_schema.Table` @@ -6124,7 +6206,7 @@ def select_from(self, *froms: _FromClauseArgument) -> Self: if desired, in the case that the FROM clause cannot be fully derived from the columns clause:: - select(func.count('*')).select_from(table1) + select(func.count("*")).select_from(table1) """ @@ -6277,8 +6359,8 @@ def selected_columns( :class:`_expression.ColumnElement` objects are directly present as they were given, e.g.:: - col1 = column('q', Integer) - col2 = column('p', Integer) + col1 = column("q", Integer) + col2 = column("p", Integer) stmt = select(col1, col2) Above, ``stmt.selected_columns`` would be a collection that contains @@ -6293,7 +6375,8 @@ def selected_columns( criteria, e.g.:: def filter_on_id(my_select, id): - return my_select.where(my_select.selected_columns['id'] == id) + return my_select.where(my_select.selected_columns["id"] == id) + stmt = select(MyModel) @@ -6742,7 +6825,9 @@ def select(self) -> Select[Tuple[bool]]: stmt = exists(some_table.c.id).where(some_table.c.id == 5).select() - This will produce a statement resembling:: + This will produce a statement resembling: + + .. sourcecode:: sql SELECT EXISTS (SELECT id FROM some_table WHERE some_table = :param) AS anon_1 diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index c181f24d91b..f16db640664 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -202,7 +202,7 @@ def __init__( .. sourcecode:: pycon+sql >>> from sqlalchemy import cast, select, String - >>> print(select(cast('some string', String(collation='utf8')))) + >>> print(select(cast("some string", String(collation="utf8")))) {printsql}SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1 .. note:: @@ -651,7 +651,7 @@ def __init__( Column( "float_data", - Float(5).with_variant(oracle.FLOAT(binary_precision=16), "oracle") + Float(5).with_variant(oracle.FLOAT(binary_precision=16), "oracle"), ) :param asdecimal: the same flag as that of :class:`.Numeric`, but @@ -1226,15 +1226,14 @@ class Enum(String, SchemaType, Emulated, TypeEngine[Union[str, enum.Enum]]): import enum from sqlalchemy import Enum + class MyEnum(enum.Enum): one = 1 two = 2 three = 3 - t = Table( - 'data', MetaData(), - Column('value', Enum(MyEnum)) - ) + + t = Table("data", MetaData(), Column("value", Enum(MyEnum))) connection.execute(t.insert(), {"value": MyEnum.two}) assert connection.scalar(t.select()) is MyEnum.two @@ -2175,15 +2174,16 @@ class JSON(Indexable, TypeEngine[Any]): The :class:`_types.JSON` type stores arbitrary JSON format data, e.g.:: - data_table = Table('data_table', metadata, - Column('id', Integer, primary_key=True), - Column('data', JSON) + data_table = Table( + "data_table", + metadata, + Column("id", Integer, primary_key=True), + Column("data", JSON), ) with engine.connect() as conn: conn.execute( - data_table.insert(), - {"data": {"key1": "value1", "key2": "value2"}} + data_table.insert(), {"data": {"key1": "value1", "key2": "value2"}} ) **JSON-Specific Expression Operators** @@ -2193,7 +2193,7 @@ class JSON(Indexable, TypeEngine[Any]): * Keyed index operations:: - data_table.c.data['some key'] + data_table.c.data["some key"] * Integer index operations:: @@ -2201,7 +2201,7 @@ class JSON(Indexable, TypeEngine[Any]): * Path index operations:: - data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')] + data_table.c.data[("key_1", "key_2", 5, ..., "key_n")] * Data casters for specific JSON element types, subsequent to an index or path operation being invoked:: @@ -2256,13 +2256,12 @@ class JSON(Indexable, TypeEngine[Any]): from sqlalchemy import cast, type_coerce from sqlalchemy import String, JSON - cast( - data_table.c.data['some_key'], String - ) == type_coerce(55, JSON) + + cast(data_table.c.data["some_key"], String) == type_coerce(55, JSON) The above case now works directly as:: - data_table.c.data['some_key'].as_integer() == 5 + data_table.c.data["some_key"].as_integer() == 5 For details on the previous comparison approach within the 1.3.x series, see the documentation for SQLAlchemy 1.2 or the included HTML @@ -2293,6 +2292,7 @@ class JSON(Indexable, TypeEngine[Any]): should be SQL NULL as opposed to JSON ``"null"``:: from sqlalchemy import null + conn.execute(table.insert(), {"json_value": null()}) To insert or select against a value that is JSON ``"null"``, use the @@ -2325,7 +2325,8 @@ class JSON(Indexable, TypeEngine[Any]): engine = create_engine( "sqlite://", - json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False)) + json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False), + ) .. versionchanged:: 1.3.7 @@ -2343,7 +2344,7 @@ class JSON(Indexable, TypeEngine[Any]): :class:`sqlalchemy.dialects.sqlite.JSON` - """ + """ # noqa: E501 __visit_name__ = "JSON" @@ -2377,8 +2378,7 @@ class JSON(Indexable, TypeEngine[Any]): transparent method is to use :func:`_expression.text`:: Table( - 'my_table', metadata, - Column('json_data', JSON, default=text("'null'")) + "my_table", metadata, Column("json_data", JSON, default=text("'null'")) ) While it is possible to use :attr:`_types.JSON.NULL` in this context, the @@ -2390,7 +2390,7 @@ class JSON(Indexable, TypeEngine[Any]): generated defaults. - """ + """ # noqa: E501 def __init__(self, none_as_null: bool = False): """Construct a :class:`_types.JSON` type. @@ -2403,6 +2403,7 @@ def __init__(self, none_as_null: bool = False): as SQL NULL:: from sqlalchemy import null + conn.execute(table.insert(), {"data": null()}) .. note:: @@ -2544,15 +2545,13 @@ def as_boolean(self): e.g.:: - stmt = select( - mytable.c.json_column['some_data'].as_boolean() - ).where( - mytable.c.json_column['some_data'].as_boolean() == True + stmt = select(mytable.c.json_column["some_data"].as_boolean()).where( + mytable.c.json_column["some_data"].as_boolean() == True ) .. versionadded:: 1.3.11 - """ + """ # noqa: E501 return self._binary_w_type(Boolean(), "as_boolean") def as_string(self): @@ -2563,16 +2562,13 @@ def as_string(self): e.g.:: - stmt = select( - mytable.c.json_column['some_data'].as_string() - ).where( - mytable.c.json_column['some_data'].as_string() == - 'some string' + stmt = select(mytable.c.json_column["some_data"].as_string()).where( + mytable.c.json_column["some_data"].as_string() == "some string" ) .. versionadded:: 1.3.11 - """ + """ # noqa: E501 return self._binary_w_type(Unicode(), "as_string") def as_integer(self): @@ -2583,15 +2579,13 @@ def as_integer(self): e.g.:: - stmt = select( - mytable.c.json_column['some_data'].as_integer() - ).where( - mytable.c.json_column['some_data'].as_integer() == 5 + stmt = select(mytable.c.json_column["some_data"].as_integer()).where( + mytable.c.json_column["some_data"].as_integer() == 5 ) .. versionadded:: 1.3.11 - """ + """ # noqa: E501 return self._binary_w_type(Integer(), "as_integer") def as_float(self): @@ -2602,15 +2596,13 @@ def as_float(self): e.g.:: - stmt = select( - mytable.c.json_column['some_data'].as_float() - ).where( - mytable.c.json_column['some_data'].as_float() == 29.75 + stmt = select(mytable.c.json_column["some_data"].as_float()).where( + mytable.c.json_column["some_data"].as_float() == 29.75 ) .. versionadded:: 1.3.11 - """ + """ # noqa: E501 return self._binary_w_type(Float(), "as_float") def as_numeric(self, precision, scale, asdecimal=True): @@ -2621,16 +2613,13 @@ def as_numeric(self, precision, scale, asdecimal=True): e.g.:: - stmt = select( - mytable.c.json_column['some_data'].as_numeric(10, 6) - ).where( - mytable.c. - json_column['some_data'].as_numeric(10, 6) == 29.75 + stmt = select(mytable.c.json_column["some_data"].as_numeric(10, 6)).where( + mytable.c.json_column["some_data"].as_numeric(10, 6) == 29.75 ) .. versionadded:: 1.4.0b2 - """ + """ # noqa: E501 return self._binary_w_type( Numeric(precision, scale, asdecimal=asdecimal), "as_numeric" ) @@ -2643,7 +2632,7 @@ def as_json(self): e.g.:: - stmt = select(mytable.c.json_column['some_data'].as_json()) + stmt = select(mytable.c.json_column["some_data"].as_json()) This is typically the default behavior of indexed elements in any case. @@ -2761,26 +2750,21 @@ class ARRAY( An :class:`_types.ARRAY` type is constructed given the "type" of element:: - mytable = Table("mytable", metadata, - Column("data", ARRAY(Integer)) - ) + mytable = Table("mytable", metadata, Column("data", ARRAY(Integer))) The above type represents an N-dimensional array, meaning a supporting backend such as PostgreSQL will interpret values with any number of dimensions automatically. To produce an INSERT construct that passes in a 1-dimensional array of integers:: - connection.execute( - mytable.insert(), - {"data": [1,2,3]} - ) + connection.execute(mytable.insert(), {"data": [1, 2, 3]}) The :class:`_types.ARRAY` type can be constructed given a fixed number of dimensions:: - mytable = Table("mytable", metadata, - Column("data", ARRAY(Integer, dimensions=2)) - ) + mytable = Table( + "mytable", metadata, Column("data", ARRAY(Integer, dimensions=2)) + ) Sending a number of dimensions is optional, but recommended if the datatype is to represent arrays of more than one dimension. This number @@ -2814,10 +2798,9 @@ class ARRAY( as well as UPDATE statements when the :meth:`_expression.Update.values` method is used:: - mytable.update().values({ - mytable.c.data[5]: 7, - mytable.c.data[2:7]: [1, 2, 3] - }) + mytable.update().values( + {mytable.c.data[5]: 7, mytable.c.data[2:7]: [1, 2, 3]} + ) Indexed access is one-based by default; for zero-based index conversion, set :paramref:`_types.ARRAY.zero_indexes`. @@ -2839,6 +2822,7 @@ class ARRAY( from sqlalchemy import ARRAY from sqlalchemy.ext.mutable import MutableList + class SomeOrmClass(Base): # ... @@ -2877,7 +2861,7 @@ def __init__( E.g.:: - Column('myarray', ARRAY(Integer)) + Column("myarray", ARRAY(Integer)) Arguments are: @@ -2986,9 +2970,7 @@ def any(self, other, operator=None): from sqlalchemy.sql import operators conn.execute( - select(table.c.data).where( - table.c.data.any(7, operator=operators.lt) - ) + select(table.c.data).where(table.c.data.any(7, operator=operators.lt)) ) :param other: expression to be compared @@ -3002,7 +2984,7 @@ def any(self, other, operator=None): :meth:`.types.ARRAY.Comparator.all` - """ + """ # noqa: E501 elements = util.preloaded.sql_elements operator = operator if operator else operators.eq @@ -3035,9 +3017,7 @@ def all(self, other, operator=None): from sqlalchemy.sql import operators conn.execute( - select(table.c.data).where( - table.c.data.all(7, operator=operators.lt) - ) + select(table.c.data).where(table.c.data.all(7, operator=operators.lt)) ) :param other: expression to be compared @@ -3051,7 +3031,7 @@ def all(self, other, operator=None): :meth:`.types.ARRAY.Comparator.any` - """ + """ # noqa: E501 elements = util.preloaded.sql_elements operator = operator if operator else operators.eq @@ -3540,14 +3520,13 @@ class Uuid(Emulated, TypeEngine[_UUID_RETURN]): t = Table( "t", metadata_obj, - Column('uuid_data', Uuid, primary_key=True), - Column("other_data", String) + Column("uuid_data", Uuid, primary_key=True), + Column("other_data", String), ) with engine.begin() as conn: conn.execute( - t.insert(), - {"uuid_data": uuid.uuid4(), "other_data", "some data"} + t.insert(), {"uuid_data": uuid.uuid4(), "other_data": "some data"} ) To have the :class:`_sqltypes.Uuid` datatype work with string-based @@ -3561,7 +3540,7 @@ class Uuid(Emulated, TypeEngine[_UUID_RETURN]): :class:`_sqltypes.UUID` - represents exactly the ``UUID`` datatype without any backend-agnostic behaviors. - """ + """ # noqa: E501 __visit_name__ = "uuid" diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 633763aaa51..971acf30e3d 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -311,11 +311,13 @@ def evaluates_none(self) -> Self: E.g.:: Table( - 'some_table', metadata, + "some_table", + metadata, Column( String(50).evaluates_none(), nullable=True, - server_default='no value') + server_default="no value", + ), ) The ORM uses this flag to indicate that a positive value of ``None`` @@ -641,7 +643,7 @@ def with_variant( string_type = String() string_type = string_type.with_variant( - mysql.VARCHAR(collation='foo'), 'mysql', 'mariadb' + mysql.VARCHAR(collation="foo"), "mysql", "mariadb" ) The variant mapping indicates that when this type is @@ -1128,7 +1130,7 @@ class ExternalType(TypeEngineMixin): """ cache_ok: Optional[bool] = None - """Indicate if statements using this :class:`.ExternalType` are "safe to + '''Indicate if statements using this :class:`.ExternalType` are "safe to cache". The default value ``None`` will emit a warning and then not allow caching @@ -1169,12 +1171,12 @@ def __init__(self, choices): series of tuples. Given a previously un-cacheable type as:: class LookupType(UserDefinedType): - '''a custom type that accepts a dictionary as a parameter. + """a custom type that accepts a dictionary as a parameter. this is the non-cacheable version, as "self.lookup" is not hashable. - ''' + """ def __init__(self, lookup): self.lookup = lookup @@ -1182,8 +1184,7 @@ def __init__(self, lookup): def get_col_spec(self, **kw): return "VARCHAR(255)" - def bind_processor(self, dialect): - # ... works with "self.lookup" ... + def bind_processor(self, dialect): ... # works with "self.lookup" ... Where "lookup" is a dictionary. The type will not be able to generate a cache key:: @@ -1219,7 +1220,7 @@ def bind_processor(self, dialect): to the ".lookup" attribute:: class LookupType(UserDefinedType): - '''a custom type that accepts a dictionary as a parameter. + """a custom type that accepts a dictionary as a parameter. The dictionary is stored both as itself in a private variable, and published in a public variable as a sorted tuple of tuples, @@ -1227,7 +1228,7 @@ class LookupType(UserDefinedType): two equivalent dictionaries. Note it assumes the keys and values of the dictionary are themselves hashable. - ''' + """ cache_ok = True @@ -1236,15 +1237,12 @@ def __init__(self, lookup): # assume keys/values of "lookup" are hashable; otherwise # they would also need to be converted in some way here - self.lookup = tuple( - (key, lookup[key]) for key in sorted(lookup) - ) + self.lookup = tuple((key, lookup[key]) for key in sorted(lookup)) def get_col_spec(self, **kw): return "VARCHAR(255)" - def bind_processor(self, dialect): - # ... works with "self._lookup" ... + def bind_processor(self, dialect): ... # works with "self._lookup" ... Where above, the cache key for ``LookupType({"a": 10, "b": 20})`` will be:: @@ -1262,7 +1260,7 @@ def bind_processor(self, dialect): :ref:`sql_caching` - """ # noqa: E501 + ''' # noqa: E501 @util.non_memoized_property def _static_cache_key( @@ -1304,10 +1302,11 @@ class UserDefinedType( import sqlalchemy.types as types + class MyType(types.UserDefinedType): cache_ok = True - def __init__(self, precision = 8): + def __init__(self, precision=8): self.precision = precision def get_col_spec(self, **kw): @@ -1316,19 +1315,23 @@ def get_col_spec(self, **kw): def bind_processor(self, dialect): def process(value): return value + return process def result_processor(self, dialect, coltype): def process(value): return value + return process Once the type is made, it's immediately usable:: - table = Table('foo', metadata_obj, - Column('id', Integer, primary_key=True), - Column('data', MyType(16)) - ) + table = Table( + "foo", + metadata_obj, + Column("id", Integer, primary_key=True), + Column("data", MyType(16)), + ) The ``get_col_spec()`` method will in most cases receive a keyword argument ``type_expression`` which refers to the owning expression @@ -1493,7 +1496,7 @@ def adapt_emulated_to_native( class TypeDecorator(SchemaEventTarget, ExternalType, TypeEngine[_T]): - """Allows the creation of types which add additional functionality + '''Allows the creation of types which add additional functionality to an existing type. This method is preferred to direct subclassing of SQLAlchemy's @@ -1504,10 +1507,11 @@ class TypeDecorator(SchemaEventTarget, ExternalType, TypeEngine[_T]): import sqlalchemy.types as types + class MyType(types.TypeDecorator): - '''Prefixes Unicode values with "PREFIX:" on the way in and + """Prefixes Unicode values with "PREFIX:" on the way in and strips it off on the way out. - ''' + """ impl = types.Unicode @@ -1599,6 +1603,7 @@ def coerce_compared_value(self, op, value): from sqlalchemy import JSON from sqlalchemy import TypeDecorator + class MyJsonType(TypeDecorator): impl = JSON @@ -1619,6 +1624,7 @@ def coerce_compared_value(self, op, value): from sqlalchemy import ARRAY from sqlalchemy import TypeDecorator + class MyArrayType(TypeDecorator): impl = ARRAY @@ -1627,8 +1633,7 @@ class MyArrayType(TypeDecorator): def coerce_compared_value(self, op, value): return self.impl.coerce_compared_value(op, value) - - """ + ''' __visit_name__ = "type_decorator" diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 4a35bb217ea..e7ca7b4bc2b 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -106,7 +106,7 @@ def join_condition( would produce an expression along the lines of:: - tablea.c.id==tableb.c.tablea_id + tablea.c.id == tableb.c.tablea_id The join is determined based on the foreign key relationships between the two selectables. If there are multiple ways @@ -268,7 +268,7 @@ def visit_binary_product( The function is of the form:: - def my_fn(binary, left, right) + def my_fn(binary, left, right): ... For each binary expression located which has a comparison operator, the product of "left" and @@ -277,12 +277,11 @@ def my_fn(binary, left, right) Hence an expression like:: - and_( - (a + b) == q + func.sum(e + f), - j == r - ) + and_((a + b) == q + func.sum(e + f), j == r) + + would have the traversal: - would have the traversal:: + .. sourcecode:: text a q a e @@ -528,9 +527,7 @@ def bind_values(clause): E.g.:: - >>> expr = and_( - ... table.c.foo==5, table.c.foo==7 - ... ) + >>> expr = and_(table.c.foo == 5, table.c.foo == 7) >>> bind_values(expr) [5, 7] """ @@ -1041,20 +1038,24 @@ class ClauseAdapter(visitors.ReplacingExternalTraversal): E.g.:: - table1 = Table('sometable', metadata, - Column('col1', Integer), - Column('col2', Integer) - ) - table2 = Table('someothertable', metadata, - Column('col1', Integer), - Column('col2', Integer) - ) + table1 = Table( + "sometable", + metadata, + Column("col1", Integer), + Column("col2", Integer), + ) + table2 = Table( + "someothertable", + metadata, + Column("col1", Integer), + Column("col2", Integer), + ) condition = table1.c.col1 == table2.c.col1 make an alias of table1:: - s = table1.alias('foo') + s = table1.alias("foo") calling ``ClauseAdapter(s).traverse(condition)`` converts condition to read:: diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py index d1cd7a939f6..286daae266d 100644 --- a/lib/sqlalchemy/sql/visitors.py +++ b/lib/sqlalchemy/sql/visitors.py @@ -934,11 +934,13 @@ def traverse( from sqlalchemy.sql import visitors - stmt = select(some_table).where(some_table.c.foo == 'bar') + stmt = select(some_table).where(some_table.c.foo == "bar") + def visit_bindparam(bind_param): print("found bound value: %s" % bind_param.value) + visitors.traverse(stmt, {}, {"bindparam": visit_bindparam}) The iteration of objects uses the :func:`.visitors.iterate` function, diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py index e2623ead58d..9e88d9dd893 100644 --- a/lib/sqlalchemy/testing/config.py +++ b/lib/sqlalchemy/testing/config.py @@ -122,7 +122,9 @@ def combinations( passed, each argument combination is turned into a pytest.param() object, mapping the elements of the argument tuple to produce an id based on a character value in the same position within the string template using the - following scheme:: + following scheme: + + .. sourcecode:: text i - the given argument is a string that is part of the id only, don't pass it as an argument @@ -146,7 +148,7 @@ def combinations( (operator.ne, "ne"), (operator.gt, "gt"), (operator.lt, "lt"), - id_="na" + id_="na", ) def test_operator(self, opfunc, name): pass @@ -228,14 +230,9 @@ def variation(argname_or_fn, cases=None): @testing.variation("querytyp", ["select", "subquery", "legacy_query"]) @testing.variation("lazy", ["select", "raise", "raise_on_sql"]) - def test_thing( - self, - querytyp, - lazy, - decl_base - ): + def test_thing(self, querytyp, lazy, decl_base): class Thing(decl_base): - __tablename__ = 'thing' + __tablename__ = "thing" # use name directly rel = relationship("Rel", lazy=lazy.name) @@ -250,7 +247,6 @@ class Thing(decl_base): else: querytyp.fail() - The variable provided is a slots object of boolean variables, as well as the name of the case itself under the attribute ".name" diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index c6dc43e5383..a36575935f0 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -108,7 +108,9 @@ def generate_db_urls(db_urls, extra_drivers): """Generate a set of URLs to test given configured URLs plus additional driver names. - Given:: + Given: + + .. sourcecode:: text --dburi postgresql://db1 \ --dburi postgresql://db2 \ @@ -116,7 +118,9 @@ def generate_db_urls(db_urls, extra_drivers): --dbdriver=psycopg2 --dbdriver=asyncpg?async_fallback=true Noting that the default postgresql driver is psycopg2, the output - would be:: + would be: + + .. sourcecode:: text postgresql+psycopg2://db1 postgresql+asyncpg://db1 @@ -133,6 +137,8 @@ def generate_db_urls(db_urls, extra_drivers): driver name. For example, to enable the async fallback option for asyncpg:: + .. sourcecode:: text + --dburi postgresql://db1 \ --dbdriver=asyncpg?async_fallback=true diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index a11e23631b3..0554fcf38d5 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -91,7 +91,9 @@ def unique_constraints_reflect_as_index(self): @property def table_value_constructor(self): - """Database / dialect supports a query like:: + """Database / dialect supports a query like: + + .. sourcecode:: sql SELECT * FROM VALUES ( (c1, c2), (c1, c2), ...) AS some_table(col1, col2) @@ -992,7 +994,9 @@ def binary_comparisons(self): @property def binary_literals(self): """target backend supports simple binary literals, e.g. an - expression like:: + expression like: + + .. sourcecode:: sql SELECT CAST('foo' AS BINARY) @@ -1173,9 +1177,7 @@ def implicit_decimal_binds(self): expr = decimal.Decimal("15.7563") - value = e.scalar( - select(literal(expr)) - ) + value = e.scalar(select(literal(expr))) assert value == expr @@ -1343,7 +1345,9 @@ def update_where_target_in_subquery(self): present in a subquery in the WHERE clause. This is an ANSI-standard syntax that apparently MySQL can't handle, - such as:: + such as: + + .. sourcecode:: sql UPDATE documents SET flag=1 WHERE documents.title IN (SELECT max(documents.title) AS title @@ -1376,7 +1380,11 @@ def order_by_col_from_union(self): """target database supports ordering by a column from a SELECT inside of a UNION - E.g. (SELECT id, ...) UNION (SELECT id, ...) ORDER BY id + E.g.: + + .. sourcecode:: sql + + (SELECT id, ...) UNION (SELECT id, ...) ORDER BY id """ return exclusions.open() @@ -1386,7 +1394,9 @@ def order_by_label_with_expression(self): """target backend supports ORDER BY a column label within an expression. - Basically this:: + Basically this: + + .. sourcecode:: sql select data as foo from test order by foo || 'bar' diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py index f6fad11d0e2..d2f8f5b6184 100644 --- a/lib/sqlalchemy/testing/util.py +++ b/lib/sqlalchemy/testing/util.py @@ -254,18 +254,19 @@ def flag_combinations(*combinations): dict(lazy=False, passive=True), dict(lazy=False, passive=True, raiseload=True), ) - + def test_fn(lazy, passive, raiseload): ... would result in:: @testing.combinations( - ('', False, False, False), - ('lazy', True, False, False), - ('lazy_passive', True, True, False), - ('lazy_passive', True, True, True), - id_='iaaa', - argnames='lazy,passive,raiseload' + ("", False, False, False), + ("lazy", True, False, False), + ("lazy_passive", True, True, False), + ("lazy_passive", True, True, True), + id_="iaaa", + argnames="lazy,passive,raiseload", ) + def test_fn(lazy, passive, raiseload): ... """ diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index 9755f8d99e1..8a7e8ea9d07 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -80,8 +80,8 @@ def merge_lists_w_ordering(a: List[Any], b: List[Any]) -> List[Any]: Example:: - >>> a = ['__tablename__', 'id', 'x', 'created_at'] - >>> b = ['id', 'name', 'data', 'y', 'created_at'] + >>> a = ["__tablename__", "id", "x", "created_at"] + >>> b = ["id", "name", "data", "y", "created_at"] >>> merge_lists_w_ordering(a, b) ['__tablename__', 'id', 'name', 'data', 'y', 'x', 'created_at'] diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py index 3034715b5e6..3a59a8a4bcd 100644 --- a/lib/sqlalchemy/util/deprecations.py +++ b/lib/sqlalchemy/util/deprecations.py @@ -205,10 +205,10 @@ def deprecated_params(**specs: Tuple[str, str]) -> Callable[[_F], _F]: weak_identity_map=( "0.7", "the :paramref:`.Session.weak_identity_map parameter " - "is deprecated." + "is deprecated.", ) - ) + def some_function(**kwargs): ... """ diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 9312976e71f..00ee0deb3ff 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -662,7 +662,9 @@ def format_argspec_init(method, grouped=True): """format_argspec_plus with considerations for typical __init__ methods Wraps format_argspec_plus with error handling strategies for typical - __init__ cases:: + __init__ cases: + + .. sourcecode:: text object.__init__ -> (self) other unreflectable (usually C) -> (self, *args, **kwargs) @@ -717,7 +719,9 @@ def decorate(cls): def getargspec_init(method): """inspect.getargspec with considerations for typical __init__ methods - Wraps inspect.getargspec with error handling for typical __init__ cases:: + Wraps inspect.getargspec with error handling for typical __init__ cases: + + .. sourcecode:: text object.__init__ -> (self) other unreflectable (usually C) -> (self, *args, **kwargs) @@ -1591,9 +1595,9 @@ def classlevel(self, func: Callable[..., Any]) -> hybridmethod[_T]: class symbol(int): """A constant symbol. - >>> symbol('foo') is symbol('foo') + >>> symbol("foo") is symbol("foo") True - >>> symbol('foo') + >>> symbol("foo") A slight refinement of the MAGICCOOKIE=object() pattern. The primary diff --git a/reap_dbs.py b/reap_dbs.py index 11a09ab67fb..c6d2616e6da 100644 --- a/reap_dbs.py +++ b/reap_dbs.py @@ -10,6 +10,7 @@ database in process. """ + import logging import sys diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index d1ddcc55037..37a8da6abeb 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -2844,8 +2844,7 @@ def _only_on_py38_w_sqlite_39(): """in python 3.9 and above you can actually do:: @(testing.requires.python38 + testing.only_on("sqlite > 3.9")) - def test_determinsitic_parameter(self): - ... + def test_determinsitic_parameter(self): ... that'll be cool. until then... diff --git a/test/orm/declarative/test_tm_future_annotations.py b/test/orm/declarative/test_tm_future_annotations.py index e3b5df0ad48..c34d54169e8 100644 --- a/test/orm/declarative/test_tm_future_annotations.py +++ b/test/orm/declarative/test_tm_future_annotations.py @@ -96,11 +96,11 @@ def make_class() -> None: ll = list + def make_class() -> None: x: ll[int] = [1, 2, 3] - """ # noqa: E501 class Foo(decl_base): diff --git a/test/orm/inheritance/_poly_fixtures.py b/test/orm/inheritance/_poly_fixtures.py index 5b5989c9205..d0f8e680d0d 100644 --- a/test/orm/inheritance/_poly_fixtures.py +++ b/test/orm/inheritance/_poly_fixtures.py @@ -469,19 +469,20 @@ class GeometryFixtureBase(fixtures.DeclarativeMappedTest): e.g.:: self._fixture_from_geometry( - "a": { - "subclasses": { - "b": {"polymorphic_load": "selectin"}, - "c": { - "subclasses": { - "d": { - "polymorphic_load": "inlne", "single": True - }, - "e": { - "polymorphic_load": "inline", "single": True + { + "a": { + "subclasses": { + "b": {"polymorphic_load": "selectin"}, + "c": { + "subclasses": { + "d": {"polymorphic_load": "inlne", "single": True}, + "e": { + "polymorphic_load": "inline", + "single": True, + }, }, + "polymorphic_load": "selectin", }, - "polymorphic_load": "selectin", } } } @@ -490,42 +491,41 @@ class GeometryFixtureBase(fixtures.DeclarativeMappedTest): would provide the equivalent of:: class a(Base): - __tablename__ = 'a' + __tablename__ = "a" id = Column(Integer, primary_key=True) a_data = Column(String(50)) type = Column(String(50)) - __mapper_args__ = { - "polymorphic_on": type, - "polymorphic_identity": "a" - } + __mapper_args__ = {"polymorphic_on": type, "polymorphic_identity": "a"} + class b(a): - __tablename__ = 'b' + __tablename__ = "b" - id = Column(ForeignKey('a.id'), primary_key=True) + id = Column(ForeignKey("a.id"), primary_key=True) b_data = Column(String(50)) __mapper_args__ = { "polymorphic_identity": "b", - "polymorphic_load": "selectin" + "polymorphic_load": "selectin", } # ... + class c(a): - __tablename__ = 'c' + __tablename__ = "c" - class d(c): - # ... - class e(c): - # ... + class d(c): ... + + + class e(c): ... Declarative is used so that we get extra behaviors of declarative, such as single-inheritance column masking. - """ + """ # noqa: E501 run_create_tables = "each" run_define_tables = "each" diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index c42ec112ebc..104d67f4075 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -433,7 +433,9 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): that points to itself, e.g. within a SQL function or similar. The test is against a materialized path setup. - this is an **extremely** unusual case:: + this is an **extremely** unusual case: + + .. sourcecode:: text Entity ------ @@ -1024,7 +1026,9 @@ class CompositeSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): the relationship(), one col points to itself in the same table. - this is a very unusual case:: + this is a very unusual case: + + .. sourcecode:: text company employee ---------- ---------- diff --git a/test/requirements.py b/test/requirements.py index ec65436e0d0..ebfe9272bc4 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -301,7 +301,9 @@ def binary_comparisons(self): @property def binary_literals(self): """target backend supports simple binary literals, e.g. an - expression like:: + expression like: + + .. sourcecode:: sql SELECT CAST('foo' AS BINARY) @@ -522,7 +524,9 @@ def update_where_target_in_subquery(self): present in a subquery in the WHERE clause. This is an ANSI-standard syntax that apparently MySQL can't handle, - such as:: + such as: + + .. sourcecode:: sql UPDATE documents SET flag=1 WHERE documents.title IN (SELECT max(documents.title) AS title @@ -1472,9 +1476,7 @@ def implicit_decimal_binds(self): expr = decimal.Decimal("15.7563") - value = e.scalar( - select(literal(expr)) - ) + value = e.scalar(select(literal(expr))) assert value == expr diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index ef7eac51e3d..383f2adaabd 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -296,7 +296,9 @@ def test_recursive_union_alias_one(self): def test_recursive_union_no_alias_two(self): """ - pg's example:: + pg's example: + + .. sourcecode:: sql WITH RECURSIVE t(n) AS ( VALUES (1) diff --git a/test/sql/test_from_linter.py b/test/sql/test_from_linter.py index 139499d941e..6608c51073b 100644 --- a/test/sql/test_from_linter.py +++ b/test/sql/test_from_linter.py @@ -97,7 +97,7 @@ def test_plain_cartesian(self): @testing.combinations(("lateral",), ("cartesian",), ("join",)) def test_lateral_subqueries(self, control): """ - :: + .. sourcecode:: sql test=> create table a (id integer); CREATE TABLE diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index b7e82391c16..163df0a0d71 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -1626,8 +1626,7 @@ def test_json_object_keys_with_ordinality(self): def test_alias_column(self): """ - - :: + .. sourcecode:: sql SELECT x, y FROM @@ -1658,8 +1657,7 @@ def test_column_valued_one(self): def test_column_valued_two(self): """ - - :: + .. sourcecode:: sql SELECT x, y FROM @@ -1774,7 +1772,7 @@ def test_render_derived_with_lateral(self, apply_alias_after_lateral): def test_function_alias(self): """ - :: + .. sourcecode:: sql SELECT result_elem -> 'Field' as field FROM "check" AS check_, json_array_elements( diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py index f3bc8e49481..58a64e5c381 100644 --- a/test/sql/test_quote.py +++ b/test/sql/test_quote.py @@ -195,7 +195,9 @@ def test_labels(self): """test the quoting of labels. If labels aren't quoted, a query in postgresql in particular will - fail since it produces:: + fail since it produces: + + .. sourcecode:: sql SELECT LaLa.lowercase, LaLa."UPPERCASE", LaLa."MixedCase", LaLa."ASC" diff --git a/tools/format_docs_code.py b/tools/format_docs_code.py index 8d24a9163af..3a06ac9f273 100644 --- a/tools/format_docs_code.py +++ b/tools/format_docs_code.py @@ -13,6 +13,7 @@ from argparse import RawDescriptionHelpFormatter from collections.abc import Iterator from functools import partial +from itertools import chain from pathlib import Path import re from typing import NamedTuple @@ -25,7 +26,12 @@ home = Path(__file__).parent.parent -ignore_paths = (re.compile(r"changelog/unreleased_\d{2}"),) +ignore_paths = ( + re.compile(r"changelog/unreleased_\d{2}"), + re.compile(r"README\.unittests\.rst"), + re.compile(r"\.tox"), + re.compile(r"build"), +) class BlockLine(NamedTuple): @@ -45,6 +51,7 @@ def _format_block( errors: list[tuple[int, str, Exception]], is_doctest: bool, file: str, + is_python_file: bool, ) -> list[str]: if not is_doctest: # The first line may have additional padding. Remove then restore later @@ -58,8 +65,9 @@ def _format_block( add_padding = None code = "\n".join(l.code for l in input_block) + mode = PYTHON_BLACK_MODE if is_python_file else RST_BLACK_MODE try: - formatted = format_str(code, mode=BLACK_MODE) + formatted = format_str(code, mode=mode) except Exception as e: start_line = input_block[0].line_no first_error = not errors @@ -119,6 +127,7 @@ def _format_block( r"^(((?!\.\.).+::)|(\.\.\s*sourcecode::(.*py.*)?)|(::))$" ) start_space = re.compile(r"^(\s*)[^ ]?") +not_python_line = re.compile(r"^\s+[$:]") def format_file( @@ -131,6 +140,8 @@ def format_file( doctest_block: _Block | None = None plain_block: _Block | None = None + is_python_file = file.suffix == ".py" + plain_code_section = False plain_padding = None plain_padding_len = None @@ -144,6 +155,7 @@ def format_file( errors=errors, is_doctest=True, file=str(file), + is_python_file=is_python_file, ) def doctest_format(): @@ -158,6 +170,7 @@ def doctest_format(): errors=errors, is_doctest=False, file=str(file), + is_python_file=is_python_file, ) def plain_format(): @@ -246,6 +259,14 @@ def plain_format(): ] continue buffer.append(line) + elif ( + is_python_file + and not plain_block + and not_python_line.match(line) + ): + # not a python block. ignore it + plain_code_section = False + buffer.append(line) else: # start of a plain block assert not doctest_block @@ -288,9 +309,12 @@ def plain_format(): def iter_files(directory: str) -> Iterator[Path]: + dir_path = home / directory yield from ( file - for file in (home / directory).glob("./**/*.rst") + for file in chain( + dir_path.glob("./**/*.rst"), dir_path.glob("./**/*.py") + ) if not any(pattern.search(file.as_posix()) for pattern in ignore_paths) ) @@ -352,7 +376,7 @@ def main( "-d", "--directory", help="Find documents in this directory and its sub dirs", - default="doc/build", + default=".", ) parser.add_argument( "-c", @@ -372,7 +396,8 @@ def main( "-l", "--project-line-length", help="Configure the line length to the project value instead " - "of using the black default of 88", + "of using the black default of 88. Python files always use the" + "project line length", action="store_true", ) parser.add_argument( @@ -385,18 +410,25 @@ def main( args = parser.parse_args() config = parse_pyproject_toml(home / "pyproject.toml") - BLACK_MODE = Mode( - target_versions={ - TargetVersion[val.upper()] - for val in config.get("target_version", []) - if val != "py27" - }, + target_versions = { + TargetVersion[val.upper()] + for val in config.get("target_version", []) + if val != "py27" + } + + RST_BLACK_MODE = Mode( + target_versions=target_versions, line_length=( config.get("line_length", DEFAULT_LINE_LENGTH) if args.project_line_length else DEFAULT_LINE_LENGTH ), ) + PYTHON_BLACK_MODE = Mode( + target_versions=target_versions, + # Remove a few char to account for normal indent + line_length=(config.get("line_length", 4) - 4 or DEFAULT_LINE_LENGTH), + ) REPORT_ONLY_DOCTEST = args.report_doctest main(args.file, args.directory, args.exit_on_error, args.check) diff --git a/tools/generate_proxy_methods.py b/tools/generate_proxy_methods.py index 31832ae8bfa..b9f9d572b00 100644 --- a/tools/generate_proxy_methods.py +++ b/tools/generate_proxy_methods.py @@ -370,11 +370,14 @@ def process_module(modname: str, filename: str, cmd: code_writer_cmd) -> str: # use tempfile in same path as the module, or at least in the # current working directory, so that black / zimports use # local pyproject.toml - with NamedTemporaryFile( - mode="w", - delete=False, - suffix=".py", - ) as buf, open(filename) as orig_py: + with ( + NamedTemporaryFile( + mode="w", + delete=False, + suffix=".py", + ) as buf, + open(filename) as orig_py, + ): in_block = False current_clsname = None for line in orig_py: diff --git a/tools/generate_sql_functions.py b/tools/generate_sql_functions.py index fc62486f6c3..0e5104352f5 100644 --- a/tools/generate_sql_functions.py +++ b/tools/generate_sql_functions.py @@ -27,11 +27,14 @@ def _fns_in_deterministic_order(): def process_functions(filename: str, cmd: code_writer_cmd) -> str: - with NamedTemporaryFile( - mode="w", - delete=False, - suffix=".py", - ) as buf, open(filename) as orig_py: + with ( + NamedTemporaryFile( + mode="w", + delete=False, + suffix=".py", + ) as buf, + open(filename) as orig_py, + ): indent = "" in_block = False diff --git a/tools/generate_tuple_map_overloads.py b/tools/generate_tuple_map_overloads.py index 098b356e026..8884095b1fa 100644 --- a/tools/generate_tuple_map_overloads.py +++ b/tools/generate_tuple_map_overloads.py @@ -44,11 +44,14 @@ def process_module( # current working directory, so that black / zimports use # local pyproject.toml found = 0 - with NamedTemporaryFile( - mode="w", - delete=False, - suffix=".py", - ) as buf, open(filename) as orig_py: + with ( + NamedTemporaryFile( + mode="w", + delete=False, + suffix=".py", + ) as buf, + open(filename) as orig_py, + ): indent = "" in_block = False current_fnname = given_fnname = None diff --git a/tools/trace_orm_adapter.py b/tools/trace_orm_adapter.py index 966705690de..72bb08cc484 100644 --- a/tools/trace_orm_adapter.py +++ b/tools/trace_orm_adapter.py @@ -3,22 +3,22 @@ Demos:: - python tools/trace_orm_adapter.py -m pytest \ + $ python tools/trace_orm_adapter.py -m pytest \ test/orm/inheritance/test_polymorphic_rel.py::PolymorphicAliasedJoinsTest::test_primary_eager_aliasing_joinedload - python tools/trace_orm_adapter.py -m pytest \ + $ python tools/trace_orm_adapter.py -m pytest \ test/orm/test_eager_relations.py::LazyLoadOptSpecificityTest::test_pathed_joinedload_aliased_abs_bcs - python tools/trace_orm_adapter.py my_test_script.py + $ python tools/trace_orm_adapter.py my_test_script.py The above two tests should spit out a ton of debug output. If a test or program has no debug output at all, that's a good thing! it means ORMAdapter isn't used for that case. -You can then set a breakpoint at the end of any adapt step: +You can then set a breakpoint at the end of any adapt step:: - python tools/trace_orm_adapter.py -d 10 -m pytest -s \ + $ python tools/trace_orm_adapter.py -d 10 -m pytest -s \ test/orm/test_eager_relations.py::LazyLoadOptSpecificityTest::test_pathed_joinedload_aliased_abs_bcs From 26351cee22df9433a2accae8576ee71d959ba6d9 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 1 Dec 2024 13:35:36 -0500 Subject: [PATCH 385/544] enable py3.13 for greenlet keep py3.14 disabled Fixes: #12145 Change-Id: I16c9bdda082fb2749085897faf3f8bc3140576c3 --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 4767e674539..bf9aedd8b6b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,7 +39,7 @@ package_dir = install_requires = importlib-metadata;python_version<"3.8" - greenlet != 0.4.17;(python_version<"3.13" and (platform_machine=='aarch64' or (platform_machine=='ppc64le' or (platform_machine=='x86_64' or (platform_machine=='amd64' or (platform_machine=='AMD64' or (platform_machine=='win32' or platform_machine=='WIN32'))))))) + greenlet != 0.4.17;(python_version<"3.14" and (platform_machine=='aarch64' or (platform_machine=='ppc64le' or (platform_machine=='x86_64' or (platform_machine=='amd64' or (platform_machine=='AMD64' or (platform_machine=='win32' or platform_machine=='WIN32'))))))) typing-extensions >= 4.6.0 [options.extras_require] From 32652c1980bb10c46ec09971a8c0d7786c117889 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 2 Dec 2024 18:59:19 -0500 Subject: [PATCH 386/544] use VARCHAR for CLOB outputtypehandler Fixed issue in oracledb / cx_oracle dialects where output type handlers for ``CLOB`` were being routed to ``NVARCHAR`` rather than ``VARCHAR``, causing a double conversion to take place. Fixes: #12150 Change-Id: I9f55e9bc595997b873c831b0422f5af10dcc15ef (cherry picked from commit 5ded16fae8abfc31d43430cb25757fb434c37ba2) --- doc/build/changelog/unreleased_20/12150.rst | 8 ++++++++ lib/sqlalchemy/dialects/oracle/cx_oracle.py | 7 ++++++- 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12150.rst diff --git a/doc/build/changelog/unreleased_20/12150.rst b/doc/build/changelog/unreleased_20/12150.rst new file mode 100644 index 00000000000..a40e4623f21 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12150.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, oracle + :tickets: 12150 + + Fixed issue in oracledb / cx_oracle dialects where output type handlers for + ``CLOB`` were being routed to ``NVARCHAR`` rather than ``VARCHAR``, causing + a double conversion to take place. + diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 9b66d7ea783..6a2588883b6 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -1353,8 +1353,13 @@ def output_type_handler( cx_Oracle.CLOB, cx_Oracle.NCLOB, ): + typ = ( + cx_Oracle.DB_TYPE_VARCHAR + if default_type is cx_Oracle.CLOB + else cx_Oracle.DB_TYPE_NVARCHAR + ) return cursor.var( - cx_Oracle.DB_TYPE_NVARCHAR, + typ, _CX_ORACLE_MAGIC_LOB_SIZE, cursor.arraysize, **dialect._cursor_var_unicode_kwargs, From 6ae4241f574f6f43fdf1e545e2e67821e51c46c3 Mon Sep 17 00:00:00 2001 From: Nick Wilkinson Date: Fri, 6 Dec 2024 01:59:22 -0500 Subject: [PATCH 387/544] Fixes: #11724 - PGDialect `get_multi_indexes` PGVecto.rs Bug When attempting to generate an auto-revision using Alembic, the `get_multi_indexes` method fails with the error: ```python dialect_options["postgresql_with"] = dict( ValueError: dictionary update sequence element #0 has length 4; 2 is required ``` ### Description The cause of this error is that when creating a vector index in PGVecto.rs, the index is: ```sql CREATE INDEX vector_embedding_idx ON public.vector_embeddings USING vectors (embedding vector_cos_ops) WITH (options=' [indexing.hnsw] m = 16 ef_construction = 64 ') ``` However, in PostgreSQL the index seems to be generated as: ```sql CREATE INDEX vector_embedding_idx ON public.vector_embeddings USING hnsw (embedding vector_cos_ops) WITH (m='16', ef_construction='64'); ``` To fix this, we need to modify: ```diff if row["reloptions"]: - dialect_options["postgresql_with"] = dict([option.split("=") for option in row["reloptions"]]) + dialect_options["postgresql_with"] = dict([option.split("=", 1) for option in row["reloptions"]]) ``` For more details on this error and a reproducible example, refer to #11724 ### Testing I couldn't really think of an easy way to add the potential test suggested in the issue thread [here](https://github.com/sqlalchemy/sqlalchemy/issues/11724#issuecomment-2518501318). However, this code is already tested in [`test_get_multi_view_indexes`](https://github.com/sqlalchemy/sqlalchemy/blob/5ded16fae8abfc31d43430cb25757fb434c37ba2/test/dialect/postgresql/test_reflection.py#L378), so assuming that test still passes and nothing breaks I believe we should be fine. ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [x] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. Closes: #12162 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12162 Pull-request-sha: 7d996fd92dd24a7d79bccab090d22bd76564dc76 Change-Id: Id6ad86133f3221eefcf0aa799c7f79a754e9c1bf (cherry picked from commit 9eacf3408d1deeb42fc1ecc6002437b898ecc397) --- doc/build/changelog/unreleased_20/11724.rst | 7 +++++++ lib/sqlalchemy/dialects/postgresql/base.py | 5 ++++- 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/11724.rst diff --git a/doc/build/changelog/unreleased_20/11724.rst b/doc/build/changelog/unreleased_20/11724.rst new file mode 100644 index 00000000000..3e8c436ebbc --- /dev/null +++ b/doc/build/changelog/unreleased_20/11724.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, postgresql + :ticket: 11724 + + Fixes issue in `get_multi_indexes` in postgresql dialect, where an error + would be thrown when attempting to use alembic with a vector index from + the pgvecto.rs extension. diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index e3920857a87..cd3ebfd5972 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -4603,7 +4603,10 @@ def get_multi_indexes( dialect_options = {} if row["reloptions"]: dialect_options["postgresql_with"] = dict( - [option.split("=") for option in row["reloptions"]] + [ + option.split("=", 1) + for option in row["reloptions"] + ] ) # it *might* be nice to include that this is 'btree' in the # reflection info. But we don't want an Index object From 5d784ac92c67ba79d18430ee51b9ddc66b26a7c5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 9 Dec 2024 12:36:56 -0500 Subject: [PATCH 388/544] ensure db_opts are honored for provision.py that adds db_opts these need to be used in all cases for testing_engine() that is using that same database driver References: #12159 Change-Id: I15c46a375ab05ef94c9a7d19000a3d8641de43bf (cherry picked from commit 3226f99dce77bb75698a7c9366f6fd07ab4d29ee) --- lib/sqlalchemy/testing/engines.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index 7cae807eb43..08fbe248e15 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -330,16 +330,18 @@ def testing_engine( url = url or config.db.url url = make_url(url) - if options is None: - if config.db is None or url.drivername == config.db.url.drivername: - options = config.db_opts - else: - options = {} - elif config.db is not None and url.drivername == config.db.url.drivername: - default_opt = config.db_opts.copy() - default_opt.update(options) - engine = create_engine(url, **options) + if ( + config.db is None or url.drivername == config.db.url.drivername + ) and config.db_opts: + use_options = config.db_opts.copy() + else: + use_options = {} + + if options is not None: + use_options.update(options) + + engine = create_engine(url, **use_options) if sqlite_savepoint and engine.name == "sqlite": # apply SQLite savepoint workaround @@ -370,9 +372,9 @@ def do_begin(conn): if ( isinstance(engine.pool, pool.QueuePool) - and "pool" not in options - and "pool_timeout" not in options - and "max_overflow" not in options + and "pool" not in use_options + and "pool_timeout" not in use_options + and "max_overflow" not in use_options ): engine.pool._timeout = 0 engine.pool._max_overflow = 0 From 8a45f13ef66628c5d5ff30bed30c3a62874f041e Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 3 Oct 2024 18:21:12 -0400 Subject: [PATCH 389/544] dont match partial types in type_annotation_map Fixed issue regarding ``Union`` types that would be present in the :paramref:`_orm.registry.type_annotation_map` of a :class:`_orm.registry` or declarative base class, where a ``Mapped[]`` element that included one of the subtypes present in that ``Union`` would be matched to that entry, potentially ignoring other entries that matched exactly. The correct behavior now takes place such that an entry should only match in ``type_annotation_map`` exactly, as a ``Union`` type is a self-contained type. For example, an attribute with ``Mapped[float]`` would previously match to a ``type_annotation_map`` entry ``Union[float, Decimal]``; this will no longer match and will now only match to an entry that states ``float``. Pull request courtesy Frazer McLean. Fixes #11370 Closes: #11942 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/11942 Pull-request-sha: 21a3d1971a04e117a557f6e6bac77bce9f6bb0a9 Change-Id: I3467be00f8fa8bd011dd4805a77a3b80ff74a215 (cherry picked from commit 40c30ec44616223216737327f97bac66a13eedee) --- doc/build/changelog/unreleased_20/11370.rst | 15 +++ lib/sqlalchemy/orm/decl_api.py | 6 +- lib/sqlalchemy/util/typing.py | 25 +---- test/base/test_utils.py | 12 +++ .../test_tm_future_annotations_sync.py | 99 ++++++++++++++++++- test/orm/declarative/test_typed_mapping.py | 99 ++++++++++++++++++- 6 files changed, 223 insertions(+), 33 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11370.rst diff --git a/doc/build/changelog/unreleased_20/11370.rst b/doc/build/changelog/unreleased_20/11370.rst new file mode 100644 index 00000000000..56e85531fc9 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11370.rst @@ -0,0 +1,15 @@ +.. change:: + :tags: bug, orm + :tickets: 11370 + + Fixed issue regarding ``Union`` types that would be present in the + :paramref:`_orm.registry.type_annotation_map` of a :class:`_orm.registry` + or declarative base class, where a ``Mapped[]`` element that included one + of the subtypes present in that ``Union`` would be matched to that entry, + potentially ignoring other entries that matched exactly. The correct + behavior now takes place such that an entry should only match in + ``type_annotation_map`` exactly, as a ``Union`` type is a self-contained + type. For example, an attribute with ``Mapped[float]`` would previously + match to a ``type_annotation_map`` entry ``Union[float, Decimal]``; this + will no longer match and will now only match to an entry that states + ``float``. Pull request courtesy Frazer McLean. diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 311a9bd4a51..718cf72516b 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -73,6 +73,7 @@ from ..util import hybridproperty from ..util import typing as compat_typing from ..util.typing import CallableReference +from ..util.typing import de_optionalize_union_types from ..util.typing import flatten_newtype from ..util.typing import is_generic from ..util.typing import is_literal @@ -1225,11 +1226,8 @@ def update_type_annotation_map( self.type_annotation_map.update( { - sub_type: sqltype + de_optionalize_union_types(typ): sqltype for typ, sqltype in type_annotation_map.items() - for sub_type in compat_typing.expand_unions( - typ, include_union=True, discard_none=True - ) } ) diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index a3df9777054..bd1ebd4c013 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -422,6 +422,9 @@ def de_optionalize_union_types( """ + while is_pep695(type_): + type_ = type_.__value__ + if is_fwd_ref(type_): return de_optionalize_fwd_ref_union_types(type_) @@ -478,26 +481,6 @@ def make_union_type(*types: _AnnotationScanType) -> Type[Any]: return cast(Any, Union).__getitem__(types) # type: ignore -def expand_unions( - type_: Type[Any], include_union: bool = False, discard_none: bool = False -) -> Tuple[Type[Any], ...]: - """Return a type as a tuple of individual types, expanding for - ``Union`` types.""" - - if is_union(type_): - typ = set(type_.__args__) - - if discard_none: - typ.discard(NoneType) - - if include_union: - return (type_,) + tuple(typ) # type: ignore - else: - return tuple(typ) # type: ignore - else: - return (type_,) - - def is_optional(type_: Any) -> TypeGuard[ArgsTypeProcotol]: return is_origin_of( type_, @@ -512,7 +495,7 @@ def is_optional_union(type_: Any) -> bool: def is_union(type_: Any) -> TypeGuard[ArgsTypeProcotol]: - return is_origin_of(type_, "Union") + return is_origin_of(type_, "Union", "UnionType") def is_origin_of_cls( diff --git a/test/base/test_utils.py b/test/base/test_utils.py index de8712c8523..85c419e94e8 100644 --- a/test/base/test_utils.py +++ b/test/base/test_utils.py @@ -4,6 +4,9 @@ from pathlib import Path import pickle import sys +import typing + +import typing_extensions from sqlalchemy import exc from sqlalchemy import sql @@ -39,6 +42,7 @@ from sqlalchemy.util._collections import merge_lists_w_ordering from sqlalchemy.util._has_cy import _import_cy_extensions from sqlalchemy.util._has_cy import HAS_CYEXTENSION +from sqlalchemy.util.typing import is_union class WeakSequenceTest(fixtures.TestBase): @@ -3630,3 +3634,11 @@ def test_all_cyext_imported(self): for f in cython_files } eq_({m.__name__ for m in ext}, set(names)) + + +class TypingTest(fixtures.TestBase): + def test_is_union(self): + assert is_union(typing.Union[str, int]) + assert is_union(typing_extensions.Union[str, int]) + if compat.py310: + assert is_union(str | int) diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 2aa8f0f0b0f..6bf7d02c56c 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -33,6 +33,7 @@ from typing_extensions import get_args as get_args from typing_extensions import Literal as Literal from typing_extensions import TypeAlias as TypeAlias +from typing_extensions import TypeAliasType from typing_extensions import TypedDict from sqlalchemy import BIGINT @@ -41,6 +42,7 @@ from sqlalchemy import DateTime from sqlalchemy import exc from sqlalchemy import exc as sa_exc +from sqlalchemy import Float from sqlalchemy import ForeignKey from sqlalchemy import func from sqlalchemy import Identity @@ -94,6 +96,7 @@ from sqlalchemy.testing import is_false from sqlalchemy.testing import is_not from sqlalchemy.testing import is_true +from sqlalchemy.testing import skip_test from sqlalchemy.testing import Variation from sqlalchemy.testing.fixtures import fixture_session from sqlalchemy.util import compat @@ -124,6 +127,19 @@ class _SomeDict2(TypedDict): _TypingLiteral = typing.Literal["a", "b"] _TypingExtensionsLiteral = typing_extensions.Literal["a", "b"] +_JsonPrimitive: TypeAlias = Union[str, int, float, bool, None] +_JsonObject: TypeAlias = Dict[str, "_Json"] +_JsonArray: TypeAlias = List["_Json"] +_Json: TypeAlias = Union[_JsonObject, _JsonArray, _JsonPrimitive] + +if compat.py310: + _JsonPrimitivePep604: TypeAlias = str | int | float | bool | None + _JsonObjectPep604: TypeAlias = dict[str, "_JsonPep604"] + _JsonArrayPep604: TypeAlias = list["_JsonPep604"] + _JsonPep604: TypeAlias = ( + _JsonObjectPep604 | _JsonArrayPep604 | _JsonPrimitivePep604 + ) + if compat.py312: exec( """ @@ -1708,11 +1724,30 @@ class Element(decl_base): else: is_(getattr(Element.__table__.c.data, paramname), override_value) - def test_unions(self): + @testing.variation("union", ["union", "pep604"]) + @testing.variation("typealias", ["legacy", "pep695"]) + def test_unions(self, union, typealias): our_type = Numeric(10, 2) + if union.union: + UnionType = Union[float, Decimal] + elif union.pep604: + if not compat.py310: + skip_test("Required Python 3.10") + UnionType = float | Decimal + else: + union.fail() + + if typealias.legacy: + UnionTypeAlias = UnionType + elif typealias.pep695: + # same as type UnionTypeAlias = UnionType + UnionTypeAlias = TypeAliasType("UnionTypeAlias", UnionType) + else: + typealias.fail() + class Base(DeclarativeBase): - type_annotation_map = {Union[float, Decimal]: our_type} + type_annotation_map = {UnionTypeAlias: our_type} class User(Base): __tablename__ = "users" @@ -1753,6 +1788,10 @@ class User(Base): mapped_column() ) + if compat.py312: + MyTypeAlias = TypeAliasType("MyTypeAlias", float | Decimal) + pep695_data: Mapped[MyTypeAlias] = mapped_column() + is_(User.__table__.c.data.type, our_type) is_false(User.__table__.c.data.nullable) is_(User.__table__.c.reverse_data.type, our_type) @@ -1764,8 +1803,9 @@ class User(Base): is_true(User.__table__.c.reverse_optional_data.nullable) is_true(User.__table__.c.reverse_u_optional_data.nullable) - is_(User.__table__.c.float_data.type, our_type) - is_(User.__table__.c.decimal_data.type, our_type) + is_true(isinstance(User.__table__.c.float_data.type, Float)) + is_true(isinstance(User.__table__.c.float_data.type, Numeric)) + is_not(User.__table__.c.decimal_data.type, our_type) if compat.py310: for suffix in ("", "_fwd"): @@ -1779,6 +1819,57 @@ class User(Base): is_(optional_col.type, our_type) is_true(optional_col.nullable) + if compat.py312: + is_(User.__table__.c.pep695_data.type, our_type) + + @testing.variation("union", ["union", "pep604"]) + def test_optional_in_annotation_map(self, union): + """SQLAlchemy's behaviour is clear: an optional type means the column + is inferred as nullable. Some types which a user may want to put in the + type annotation map are already optional. JSON is a good example + because without any constraint, the type can be None via JSON null or + SQL NULL. + + By permitting optional types in the type annotation map, everything + just works, and mapped_column(nullable=False) is available if desired. + + See issue #11370 + """ + + class Base(DeclarativeBase): + if union.union: + type_annotation_map = { + _Json: JSON, + } + elif union.pep604: + if not compat.py310: + skip_test("Requires Python 3.10+") + type_annotation_map = { + _JsonPep604: JSON, + } + else: + union.fail() + + class A(Base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True) + if union.union: + json1: Mapped[_Json] + json2: Mapped[_Json] = mapped_column(nullable=False) + elif union.pep604: + if not compat.py310: + skip_test("Requires Python 3.10+") + json1: Mapped[_JsonPep604] + json2: Mapped[_JsonPep604] = mapped_column(nullable=False) + else: + union.fail() + + is_(A.__table__.c.json1.type._type_affinity, JSON) + is_(A.__table__.c.json2.type._type_affinity, JSON) + is_true(A.__table__.c.json1.nullable) + is_false(A.__table__.c.json2.nullable) + @testing.combinations( ("not_optional",), ("optional",), diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index b50573fa12f..929041ccfbf 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -24,6 +24,7 @@ from typing_extensions import get_args as get_args from typing_extensions import Literal as Literal from typing_extensions import TypeAlias as TypeAlias +from typing_extensions import TypeAliasType from typing_extensions import TypedDict from sqlalchemy import BIGINT @@ -32,6 +33,7 @@ from sqlalchemy import DateTime from sqlalchemy import exc from sqlalchemy import exc as sa_exc +from sqlalchemy import Float from sqlalchemy import ForeignKey from sqlalchemy import func from sqlalchemy import Identity @@ -85,6 +87,7 @@ from sqlalchemy.testing import is_false from sqlalchemy.testing import is_not from sqlalchemy.testing import is_true +from sqlalchemy.testing import skip_test from sqlalchemy.testing import Variation from sqlalchemy.testing.fixtures import fixture_session from sqlalchemy.util import compat @@ -115,6 +118,19 @@ class _SomeDict2(TypedDict): _TypingLiteral = typing.Literal["a", "b"] _TypingExtensionsLiteral = typing_extensions.Literal["a", "b"] +_JsonPrimitive: TypeAlias = Union[str, int, float, bool, None] +_JsonObject: TypeAlias = Dict[str, "_Json"] +_JsonArray: TypeAlias = List["_Json"] +_Json: TypeAlias = Union[_JsonObject, _JsonArray, _JsonPrimitive] + +if compat.py310: + _JsonPrimitivePep604: TypeAlias = str | int | float | bool | None + _JsonObjectPep604: TypeAlias = dict[str, "_JsonPep604"] + _JsonArrayPep604: TypeAlias = list["_JsonPep604"] + _JsonPep604: TypeAlias = ( + _JsonObjectPep604 | _JsonArrayPep604 | _JsonPrimitivePep604 + ) + if compat.py312: exec( """ @@ -1699,11 +1715,30 @@ class Element(decl_base): else: is_(getattr(Element.__table__.c.data, paramname), override_value) - def test_unions(self): + @testing.variation("union", ["union", "pep604"]) + @testing.variation("typealias", ["legacy", "pep695"]) + def test_unions(self, union, typealias): our_type = Numeric(10, 2) + if union.union: + UnionType = Union[float, Decimal] + elif union.pep604: + if not compat.py310: + skip_test("Required Python 3.10") + UnionType = float | Decimal + else: + union.fail() + + if typealias.legacy: + UnionTypeAlias = UnionType + elif typealias.pep695: + # same as type UnionTypeAlias = UnionType + UnionTypeAlias = TypeAliasType("UnionTypeAlias", UnionType) + else: + typealias.fail() + class Base(DeclarativeBase): - type_annotation_map = {Union[float, Decimal]: our_type} + type_annotation_map = {UnionTypeAlias: our_type} class User(Base): __tablename__ = "users" @@ -1744,6 +1779,10 @@ class User(Base): mapped_column() ) + if compat.py312: + MyTypeAlias = TypeAliasType("MyTypeAlias", float | Decimal) + pep695_data: Mapped[MyTypeAlias] = mapped_column() + is_(User.__table__.c.data.type, our_type) is_false(User.__table__.c.data.nullable) is_(User.__table__.c.reverse_data.type, our_type) @@ -1755,8 +1794,9 @@ class User(Base): is_true(User.__table__.c.reverse_optional_data.nullable) is_true(User.__table__.c.reverse_u_optional_data.nullable) - is_(User.__table__.c.float_data.type, our_type) - is_(User.__table__.c.decimal_data.type, our_type) + is_true(isinstance(User.__table__.c.float_data.type, Float)) + is_true(isinstance(User.__table__.c.float_data.type, Numeric)) + is_not(User.__table__.c.decimal_data.type, our_type) if compat.py310: for suffix in ("", "_fwd"): @@ -1770,6 +1810,57 @@ class User(Base): is_(optional_col.type, our_type) is_true(optional_col.nullable) + if compat.py312: + is_(User.__table__.c.pep695_data.type, our_type) + + @testing.variation("union", ["union", "pep604"]) + def test_optional_in_annotation_map(self, union): + """SQLAlchemy's behaviour is clear: an optional type means the column + is inferred as nullable. Some types which a user may want to put in the + type annotation map are already optional. JSON is a good example + because without any constraint, the type can be None via JSON null or + SQL NULL. + + By permitting optional types in the type annotation map, everything + just works, and mapped_column(nullable=False) is available if desired. + + See issue #11370 + """ + + class Base(DeclarativeBase): + if union.union: + type_annotation_map = { + _Json: JSON, + } + elif union.pep604: + if not compat.py310: + skip_test("Requires Python 3.10+") + type_annotation_map = { + _JsonPep604: JSON, + } + else: + union.fail() + + class A(Base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True) + if union.union: + json1: Mapped[_Json] + json2: Mapped[_Json] = mapped_column(nullable=False) + elif union.pep604: + if not compat.py310: + skip_test("Requires Python 3.10+") + json1: Mapped[_JsonPep604] + json2: Mapped[_JsonPep604] = mapped_column(nullable=False) + else: + union.fail() + + is_(A.__table__.c.json1.type._type_affinity, JSON) + is_(A.__table__.c.json2.type._type_affinity, JSON) + is_true(A.__table__.c.json1.nullable) + is_false(A.__table__.c.json2.nullable) + @testing.combinations( ("not_optional",), ("optional",), From 0095ac3e8d164a03fa3b7412a9c89f6cf03c06ee Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 11 Dec 2024 15:54:23 -0500 Subject: [PATCH 390/544] fix test due to merge of 11370 with 5252 Numeric and Float are split out in main so a type cant be both at the same time. Also there's no reason to do isinstance(Float) and isintance(Numeric) even if they are in the same hierarchy. Change-Id: I2263aaac264673a830b63689d39b6433b32c1d23 (cherry picked from commit b49fcb67afb302d2309efea71cde2a6584c1373c) --- test/orm/declarative/test_tm_future_annotations_sync.py | 2 +- test/orm/declarative/test_typed_mapping.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 6bf7d02c56c..a2eac4d7f4f 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -1804,7 +1804,7 @@ class User(Base): is_true(User.__table__.c.reverse_u_optional_data.nullable) is_true(isinstance(User.__table__.c.float_data.type, Float)) - is_true(isinstance(User.__table__.c.float_data.type, Numeric)) + is_not(User.__table__.c.decimal_data.type, our_type) if compat.py310: diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 929041ccfbf..5026e676a76 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -1795,7 +1795,7 @@ class User(Base): is_true(User.__table__.c.reverse_u_optional_data.nullable) is_true(isinstance(User.__table__.c.float_data.type, Float)) - is_true(isinstance(User.__table__.c.float_data.type, Numeric)) + is_not(User.__table__.c.decimal_data.type, our_type) if compat.py310: From 889800cce0b6f676fe864b7be5de98d8c9d1164a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 12 Dec 2024 10:57:46 -0500 Subject: [PATCH 391/544] modernize PG domain reflection test and skip for pg17.2 Fixes: #12174 Change-Id: If4b1c29d7ee62b2858f1ef9d75fe1c4c41217706 (cherry picked from commit 42fe1109c62008f2cd509ef402152704efb9ddb1) --- test/dialect/postgresql/test_reflection.py | 292 ++++++++++++++------- test/dialect/postgresql/test_types.py | 1 + test/requirements.py | 10 + 3 files changed, 201 insertions(+), 102 deletions(-) diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 3d29a89de7b..510c8aa33c5 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -35,6 +35,7 @@ from sqlalchemy.sql import ddl as sa_ddl from sqlalchemy.sql.schema import CheckConstraint from sqlalchemy.testing import AssertsCompiledSQL +from sqlalchemy.testing import config from sqlalchemy.testing import fixtures from sqlalchemy.testing import mock from sqlalchemy.testing.assertions import assert_warns @@ -405,90 +406,164 @@ class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): __only_on__ = "postgresql > 8.3" __backend__ = True - @classmethod - def setup_test_class(cls): - with testing.db.begin() as con: - for ddl in [ - 'CREATE SCHEMA IF NOT EXISTS "SomeSchema"', - "CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42", - "CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0", - "CREATE TYPE testtype AS ENUM ('test')", - "CREATE DOMAIN enumdomain AS testtype", - "CREATE DOMAIN arraydomain AS INTEGER[]", - "CREATE DOMAIN arraydomain_2d AS INTEGER[][]", - "CREATE DOMAIN arraydomain_3d AS INTEGER[][][]", - 'CREATE DOMAIN "SomeSchema"."Quoted.Domain" INTEGER DEFAULT 0', - 'CREATE DOMAIN nullable_domain AS TEXT COLLATE "C" CHECK ' - "(VALUE IN('FOO', 'BAR'))", - "CREATE DOMAIN not_nullable_domain AS TEXT NOT NULL", - "CREATE DOMAIN my_int AS int CONSTRAINT b_my_int_one CHECK " - "(VALUE > 1) CONSTRAINT a_my_int_two CHECK (VALUE < 42) " - "CHECK(VALUE != 22)", - ]: - con.exec_driver_sql(ddl) - - con.exec_driver_sql( - "CREATE TABLE testtable (question integer, answer " - "testdomain)" - ) - con.exec_driver_sql( - "CREATE TABLE test_schema.testtable(question " - "integer, answer test_schema.testdomain, anything " - "integer)" - ) - con.exec_driver_sql( - "CREATE TABLE crosschema (question integer, answer " - "test_schema.testdomain)" + # these fixtures are all currently using individual test scope, + # on a connection that's in a transaction that's rolled back. + # previously, this test would build up all the domains / tables + # at the class level and commit them. PostgreSQL seems to be extremely + # fast at building up / tearing down domains / schemas etc within an + # uncommitted transaction so it seems OK to keep these at per-test + # scope. + + @testing.fixture() + def broken_nullable_domains(self): + if not testing.requires.postgresql_working_nullable_domains.enabled: + config.skip_test( + "reflection of nullable domains broken on PG 17.0-17.2" ) - con.exec_driver_sql( - "CREATE TABLE enum_test (id integer, data enumdomain)" - ) + @testing.fixture() + def testdomain(self, connection, broken_nullable_domains): + connection.exec_driver_sql( + "CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42" + ) + yield + connection.exec_driver_sql("DROP DOMAIN testdomain") - con.exec_driver_sql( - "CREATE TABLE array_test (" - "id integer, " - "datas arraydomain, " - "datass arraydomain_2d, " - "datasss arraydomain_3d" - ")" - ) + @testing.fixture + def testtable(self, connection, testdomain): + connection.exec_driver_sql( + "CREATE TABLE testtable (question integer, answer " "testdomain)" + ) + yield + connection.exec_driver_sql("DROP TABLE testtable") - con.exec_driver_sql( - "CREATE TABLE quote_test " - '(id integer, data "SomeSchema"."Quoted.Domain")' - ) - con.exec_driver_sql( - "CREATE TABLE nullable_domain_test " - "(not_nullable_domain_col nullable_domain not null," - "nullable_local not_nullable_domain)" - ) + @testing.fixture + def nullable_domains(self, connection, broken_nullable_domains): + connection.exec_driver_sql( + 'CREATE DOMAIN nullable_domain AS TEXT COLLATE "C" CHECK ' + "(VALUE IN('FOO', 'BAR'))" + ) + connection.exec_driver_sql( + "CREATE DOMAIN not_nullable_domain AS TEXT NOT NULL" + ) + yield + connection.exec_driver_sql("DROP DOMAIN nullable_domain") + connection.exec_driver_sql("DROP DOMAIN not_nullable_domain") - @classmethod - def teardown_test_class(cls): - with testing.db.begin() as con: - con.exec_driver_sql("DROP TABLE testtable") - con.exec_driver_sql("DROP TABLE test_schema.testtable") - con.exec_driver_sql("DROP TABLE crosschema") - con.exec_driver_sql("DROP TABLE quote_test") - con.exec_driver_sql("DROP DOMAIN testdomain") - con.exec_driver_sql("DROP DOMAIN test_schema.testdomain") - con.exec_driver_sql("DROP TABLE enum_test") - con.exec_driver_sql("DROP DOMAIN enumdomain") - con.exec_driver_sql("DROP TYPE testtype") - con.exec_driver_sql("DROP TABLE array_test") - con.exec_driver_sql("DROP DOMAIN arraydomain") - con.exec_driver_sql("DROP DOMAIN arraydomain_2d") - con.exec_driver_sql("DROP DOMAIN arraydomain_3d") - con.exec_driver_sql('DROP DOMAIN "SomeSchema"."Quoted.Domain"') - con.exec_driver_sql('DROP SCHEMA "SomeSchema"') - - con.exec_driver_sql("DROP TABLE nullable_domain_test") - con.exec_driver_sql("DROP DOMAIN nullable_domain") - con.exec_driver_sql("DROP DOMAIN not_nullable_domain") - con.exec_driver_sql("DROP DOMAIN my_int") - - def test_table_is_reflected(self, connection): + @testing.fixture + def nullable_domain_table(self, connection, nullable_domains): + connection.exec_driver_sql( + "CREATE TABLE nullable_domain_test " + "(not_nullable_domain_col nullable_domain not null," + "nullable_local not_nullable_domain)" + ) + yield + connection.exec_driver_sql("DROP TABLE nullable_domain_test") + + @testing.fixture + def enum_domain(self, connection): + connection.exec_driver_sql("CREATE TYPE testtype AS ENUM ('test')") + connection.exec_driver_sql("CREATE DOMAIN enumdomain AS testtype") + yield + connection.exec_driver_sql("drop domain enumdomain") + connection.exec_driver_sql("drop type testtype") + + @testing.fixture + def enum_table(self, connection, enum_domain): + connection.exec_driver_sql( + "CREATE TABLE enum_test (id integer, data enumdomain)" + ) + yield + connection.exec_driver_sql("DROP TABLE enum_test") + + @testing.fixture + def array_domains(self, connection): + connection.exec_driver_sql("CREATE DOMAIN arraydomain AS INTEGER[]") + connection.exec_driver_sql( + "CREATE DOMAIN arraydomain_2d AS INTEGER[][]" + ) + connection.exec_driver_sql( + "CREATE DOMAIN arraydomain_3d AS INTEGER[][][]" + ) + yield + connection.exec_driver_sql("DROP DOMAIN arraydomain") + connection.exec_driver_sql("DROP DOMAIN arraydomain_2d") + connection.exec_driver_sql("DROP DOMAIN arraydomain_3d") + + @testing.fixture + def array_table(self, connection, array_domains): + connection.exec_driver_sql( + "CREATE TABLE array_test (" + "id integer, " + "datas arraydomain, " + "datass arraydomain_2d, " + "datasss arraydomain_3d" + ")" + ) + yield + connection.exec_driver_sql("DROP TABLE array_test") + + @testing.fixture + def some_schema(self, connection): + connection.exec_driver_sql('CREATE SCHEMA IF NOT EXISTS "SomeSchema"') + yield + connection.exec_driver_sql('DROP SCHEMA IF EXISTS "SomeSchema"') + + @testing.fixture + def quoted_schema_domain(self, connection, some_schema): + connection.exec_driver_sql( + 'CREATE DOMAIN "SomeSchema"."Quoted.Domain" INTEGER DEFAULT 0' + ) + yield + connection.exec_driver_sql('DROP DOMAIN "SomeSchema"."Quoted.Domain"') + + @testing.fixture + def int_domain(self, connection): + connection.exec_driver_sql( + "CREATE DOMAIN my_int AS int CONSTRAINT b_my_int_one CHECK " + "(VALUE > 1) CONSTRAINT a_my_int_two CHECK (VALUE < 42) " + "CHECK(VALUE != 22)" + ) + yield + connection.exec_driver_sql("DROP DOMAIN my_int") + + @testing.fixture + def quote_table(self, connection, quoted_schema_domain): + connection.exec_driver_sql( + "CREATE TABLE quote_test " + '(id integer, data "SomeSchema"."Quoted.Domain")' + ) + yield + connection.exec_driver_sql("drop table quote_test") + + @testing.fixture + def testdomain_schema(self, connection): + connection.exec_driver_sql( + "CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0" + ) + yield + connection.exec_driver_sql("DROP DOMAIN test_schema.testdomain") + + @testing.fixture + def testtable_schema(self, connection, testdomain_schema): + connection.exec_driver_sql( + "CREATE TABLE test_schema.testtable(question " + "integer, answer test_schema.testdomain, anything " + "integer)" + ) + yield + connection.exec_driver_sql("drop table test_schema.testtable") + + @testing.fixture + def crosschema_table(self, connection, testdomain_schema): + connection.exec_driver_sql( + "CREATE TABLE crosschema (question integer, answer " + f"{config.test_schema}.testdomain)" + ) + yield + connection.exec_driver_sql("DROP TABLE crosschema") + + def test_table_is_reflected(self, connection, testtable): metadata = MetaData() table = Table("testtable", metadata, autoload_with=connection) eq_( @@ -500,7 +575,7 @@ def test_table_is_reflected(self, connection): assert table.c.answer.type.name, "testdomain" assert isinstance(table.c.answer.type.data_type, Integer) - def test_nullable_from_domain(self, connection): + def test_nullable_from_domain(self, connection, nullable_domain_table): metadata = MetaData() table = Table( "nullable_domain_test", metadata, autoload_with=connection @@ -508,7 +583,7 @@ def test_nullable_from_domain(self, connection): is_(table.c.not_nullable_domain_col.nullable, False) is_(table.c.nullable_local.nullable, False) - def test_domain_is_reflected(self, connection): + def test_domain_is_reflected(self, connection, testtable): metadata = MetaData() table = Table("testtable", metadata, autoload_with=connection) eq_( @@ -520,13 +595,13 @@ def test_domain_is_reflected(self, connection): not table.columns.answer.nullable ), "Expected reflected column to not be nullable." - def test_enum_domain_is_reflected(self, connection): + def test_enum_domain_is_reflected(self, connection, enum_table): metadata = MetaData() table = Table("enum_test", metadata, autoload_with=connection) assert isinstance(table.c.data.type, DOMAIN) eq_(table.c.data.type.data_type.enums, ["test"]) - def test_array_domain_is_reflected(self, connection): + def test_array_domain_is_reflected(self, connection, array_table): metadata = MetaData() table = Table("array_test", metadata, autoload_with=connection) @@ -547,20 +622,24 @@ def assert_is_integer_array_domain(domain, name): array_domain_3d = table.c.datasss.type assert_is_integer_array_domain(array_domain_3d, "arraydomain_3d") - def test_quoted_remote_schema_domain_is_reflected(self, connection): + def test_quoted_remote_schema_domain_is_reflected( + self, connection, quote_table + ): metadata = MetaData() table = Table("quote_test", metadata, autoload_with=connection) assert isinstance(table.c.data.type, DOMAIN) assert table.c.data.type.name, "Quoted.Domain" assert isinstance(table.c.data.type.data_type, Integer) - def test_table_is_reflected_test_schema(self, connection): + def test_table_is_reflected_test_schema( + self, connection, testtable_schema + ): metadata = MetaData() table = Table( "testtable", metadata, autoload_with=connection, - schema="test_schema", + schema=config.test_schema, ) eq_( set(table.columns.keys()), @@ -569,13 +648,13 @@ def test_table_is_reflected_test_schema(self, connection): ) assert isinstance(table.c.anything.type, Integer) - def test_schema_domain_is_reflected(self, connection): + def test_schema_domain_is_reflected(self, connection, testtable_schema): metadata = MetaData() table = Table( "testtable", metadata, autoload_with=connection, - schema="test_schema", + schema=config.test_schema, ) eq_( str(table.columns.answer.server_default.arg), @@ -586,7 +665,9 @@ def test_schema_domain_is_reflected(self, connection): table.columns.answer.nullable ), "Expected reflected column to be nullable." - def test_crosschema_domain_is_reflected(self, connection): + def test_crosschema_domain_is_reflected( + self, connection, crosschema_table + ): metadata = MetaData() table = Table("crosschema", metadata, autoload_with=connection) eq_( @@ -598,7 +679,7 @@ def test_crosschema_domain_is_reflected(self, connection): table.columns.answer.nullable ), "Expected reflected column to be nullable." - def test_unknown_types(self, connection): + def test_unknown_types(self, connection, testtable): from sqlalchemy.dialects.postgresql import base ischema_names = base.PGDialect.ischema_names @@ -618,8 +699,17 @@ def warns(): finally: base.PGDialect.ischema_names = ischema_names - @property - def all_domains(self): + @testing.fixture + def all_domains( + self, + quoted_schema_domain, + array_domains, + enum_domain, + nullable_domains, + int_domain, + testdomain, + testdomain_schema, + ): return { "public": [ { @@ -741,29 +831,27 @@ def all_domains(self): ], } - def test_inspect_domains(self, connection): + def test_inspect_domains(self, connection, all_domains): inspector = inspect(connection) domains = inspector.get_domains() domain_names = {d["name"] for d in domains} - expect_domain_names = {d["name"] for d in self.all_domains["public"]} + expect_domain_names = {d["name"] for d in all_domains["public"]} eq_(domain_names, expect_domain_names) - eq_(domains, self.all_domains["public"]) + eq_(domains, all_domains["public"]) - def test_inspect_domains_schema(self, connection): + def test_inspect_domains_schema(self, connection, all_domains): inspector = inspect(connection) eq_( inspector.get_domains("test_schema"), - self.all_domains["test_schema"], - ) - eq_( - inspector.get_domains("SomeSchema"), self.all_domains["SomeSchema"] + all_domains["test_schema"], ) + eq_(inspector.get_domains("SomeSchema"), all_domains["SomeSchema"]) - def test_inspect_domains_star(self, connection): + def test_inspect_domains_star(self, connection, all_domains): inspector = inspect(connection) - all_ = [d for dl in self.all_domains.values() for d in dl] + all_ = [d for dl in all_domains.values() for d in dl] all_ += inspector.get_domains("information_schema") exp = sorted(all_, key=lambda d: (d["schema"], d["name"])) domains = inspector.get_domains("*") diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 2c5bd98fde1..5f39aa608c8 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -1237,6 +1237,7 @@ class DomainTest( __backend__ = True __only_on__ = "postgresql > 8.3" + @testing.requires.postgresql_working_nullable_domains def test_domain_type_reflection(self, metadata, connection): positive_int = DOMAIN( "positive_int", Integer(), check="value > 0", not_null=True diff --git a/test/requirements.py b/test/requirements.py index ebfe9272bc4..67635c6554e 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -1580,6 +1580,16 @@ def postgresql_test_dblink(self): def postgresql_jsonb(self): return only_on("postgresql >= 9.4") + @property + def postgresql_working_nullable_domains(self): + # see https://www.postgresql.org/message-id/flat/a90f53c4-56f3-4b07-aefc-49afdc67dba6%40app.fastmail.com # noqa: E501 + return skip_if( + lambda config: (17, 0) + < config.db.dialect.server_version_info + < (17, 3), + "reflection of nullable domains broken on PG 17.0-17.2", + ) + @property def native_hstore(self): return self.any_psycopg_compatibility From 7daae15bc5c96f5b211a21f180b995c91e704eba Mon Sep 17 00:00:00 2001 From: Guilherme Martins Crocetti <24530683+gmcrocetti@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:11:27 -0500 Subject: [PATCH 392/544] SQLite strict tables Added SQLite table option to enable ``STRICT`` tables. Fixes #7398 Closes: #12124 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12124 Pull-request-sha: e77273d0ba5c09d120c2582e94b96b781ebecb90 Change-Id: I0ffe9f6fc2c27627f53a1bc1808077e74617658a (cherry picked from commit 5b0eeaca61972cc75b7d50b11fbc582753518e61) --- doc/build/changelog/unreleased_20/7398.rst | 6 ++++++ lib/sqlalchemy/dialects/sqlite/base.py | 16 +++++++++++++--- test/dialect/test_sqlite.py | 8 ++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/7398.rst diff --git a/doc/build/changelog/unreleased_20/7398.rst b/doc/build/changelog/unreleased_20/7398.rst new file mode 100644 index 00000000000..9a27ae99a73 --- /dev/null +++ b/doc/build/changelog/unreleased_20/7398.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: usecase, sqlite + :ticket: 7398 + + Added SQLite table option to enable ``STRICT`` tables. + Pull request courtesy of Guilherme Crocetti. diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 0e4c9694bbf..5ae7ffbf0f3 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -870,12 +870,18 @@ def set_sqlite_pragma(dbapi_connection, connection_record): Table("some_table", metadata, ..., sqlite_with_rowid=False) +* + ``STRICT``:: + + Table("some_table", metadata, ..., sqlite_strict=True) + + .. versionadded:: 2.0.37 + .. seealso:: `SQLite CREATE TABLE options `_ - .. _sqlite_include_internal: Reflecting internal schema tables @@ -1754,9 +1760,12 @@ def visit_create_index( return text def post_create_table(self, table): + text = "" if table.dialect_options["sqlite"]["with_rowid"] is False: - return "\n WITHOUT ROWID" - return "" + text += "\n WITHOUT ROWID" + if table.dialect_options["sqlite"]["strict"] is True: + text += "\n STRICT" + return text class SQLiteTypeCompiler(compiler.GenericTypeCompiler): @@ -1991,6 +2000,7 @@ class SQLiteDialect(default.DefaultDialect): { "autoincrement": False, "with_rowid": True, + "strict": False, }, ), (sa_schema.Index, {"where": None}), diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 37a8da6abeb..246b9852329 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -1145,6 +1145,14 @@ def test_create_table_without_rowid(self): "CREATE TABLE atable (id INTEGER) WITHOUT ROWID", ) + def test_create_table_strict(self): + m = MetaData() + table = Table("atable", m, Column("id", Integer), sqlite_strict=True) + self.assert_compile( + schema.CreateTable(table), + "CREATE TABLE atable (id INTEGER) STRICT", + ) + class OnConflictDDLTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = sqlite.dialect() From 43d29b9695eb8229c70fafe87616ccc9ad969b3f Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 19 Nov 2024 23:12:51 +0100 Subject: [PATCH 393/544] General improvement on annotated declarative Fix issue that resulted in inconsistent handing of unions depending on how they were declared Consistently support TypeAliasType. This has required a revision of the implementation added in #11305 to have a consistent behavior. References: #11944 References: #11955 References: #11305 Change-Id: Iffc34fd42b9769f73ddb4331bd59b6b37391635d (cherry picked from commit e6b0b421d60ecf660cf3872f3f32dd2b7a739b59) --- doc/build/changelog/unreleased_20/11944.rst | 6 + doc/build/changelog/unreleased_20/11955.rst | 13 + doc/build/orm/declarative_tables.rst | 170 +++++++- lib/sqlalchemy/orm/decl_api.py | 71 +-- lib/sqlalchemy/orm/decl_base.py | 4 +- lib/sqlalchemy/orm/descriptor_props.py | 5 +- lib/sqlalchemy/orm/properties.py | 45 +- lib/sqlalchemy/orm/util.py | 8 +- lib/sqlalchemy/sql/sqltypes.py | 49 ++- lib/sqlalchemy/util/typing.py | 186 +++++--- test/base/test_typing_utils.py | 409 ++++++++++++++++++ test/base/test_utils.py | 12 - .../declarative/test_tm_future_annotations.py | 4 +- .../test_tm_future_annotations_sync.py | 353 +++++++++++---- test/orm/declarative/test_typed_mapping.py | 353 +++++++++++---- tools/format_docs_code.py | 9 + 16 files changed, 1376 insertions(+), 321 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11944.rst create mode 100644 doc/build/changelog/unreleased_20/11955.rst create mode 100644 test/base/test_typing_utils.py diff --git a/doc/build/changelog/unreleased_20/11944.rst b/doc/build/changelog/unreleased_20/11944.rst new file mode 100644 index 00000000000..e7469180ec2 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11944.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, orm + :tickets: 11944 + + Fixed bug in how type unions were handled that made the behavior + of ``a | b`` different from ``Union[a, b]``. diff --git a/doc/build/changelog/unreleased_20/11955.rst b/doc/build/changelog/unreleased_20/11955.rst new file mode 100644 index 00000000000..eeeb2bcbddb --- /dev/null +++ b/doc/build/changelog/unreleased_20/11955.rst @@ -0,0 +1,13 @@ +.. change:: + :tags: bug, orm + :tickets: 11955 + + Consistently handle ``TypeAliasType`` (defined in PEP 695) obtained with the + ``type X = int`` syntax introduced in python 3.12. + Now in all cases one such alias must be explicitly added to the type map for + it to be usable inside ``Mapped[]``. + This change also revises the approach added in :ticket:`11305`, now requiring + the ``TypeAliasType`` to be added to the type map. + Documentation on how unions and type alias types are handled by SQLAlchemy + has been added in the :ref:`orm_declarative_mapped_column_type_map` section + of the documentation. diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst index b2c91981b3e..4bb4237ac17 100644 --- a/doc/build/orm/declarative_tables.rst +++ b/doc/build/orm/declarative_tables.rst @@ -316,9 +316,8 @@ the registry and Declarative base could be configured as:: import datetime - from sqlalchemy import BIGINT, Integer, NVARCHAR, String, TIMESTAMP - from sqlalchemy.orm import DeclarativeBase - from sqlalchemy.orm import Mapped, mapped_column, registry + from sqlalchemy import BIGINT, NVARCHAR, String, TIMESTAMP + from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column class Base(DeclarativeBase): @@ -369,6 +368,59 @@ while still being able to use succinct annotation-only :func:`_orm.mapped_column configurations. There are two more levels of Python-type configurability available beyond this, described in the next two sections. +Union types inside the Type Map +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +SQLAlchemy supports mapping union types inside the type map to allow +mapping database types that can support multiple Python types, +such as :class:`_types.JSON` or :class:`_postgresql.JSONB`:: + + from sqlalchemy import JSON + from sqlalchemy.dialects import postgresql + from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + from sqlalchemy.schema import CreateTable + + json_list = list[int] | list[str] + json_scalar = float | str | bool | None + + + class Base(DeclarativeBase): + type_annotation_map = { + json_list: postgresql.JSONB, + json_scalar: JSON, + } + + + class SomeClass(Base): + __tablename__ = "some_table" + + id: Mapped[int] = mapped_column(primary_key=True) + list_col: Mapped[list[str] | list[int]] + scalar_col: Mapped[json_scalar] + scalar_col_not_null: Mapped[str | float | bool] + +Using the union directly inside ``Mapped`` or creating a new one with the same +effective types has the same behavior: ``list_col`` will be matched to the +``json_list`` union even if it does not reference it directly (the order of the +types also does not matter). +If the union added to the type map includes ``None``, it will be ignored +when matching the ``Mapped`` type since ``None`` is only used to decide +the column nullability. It follows that both ``scalar_col`` and +``scalar_col_not_null`` will match the ``json_scalar`` union. + +The CREATE TABLE statement of the table created above is as follows: + +.. sourcecode:: pycon+sql + + >>> print(CreateTable(SomeClass.__table__).compile(dialect=postgresql.dialect())) + {printsql}CREATE TABLE some_table ( + id SERIAL NOT NULL, + list_col JSONB NOT NULL, + scalar_col JSON, + scalar_col_not_null JSON NOT NULL, + PRIMARY KEY (id) + ) + .. _orm_declarative_mapped_column_type_map_pep593: Mapping Multiple Type Configurations to Python Types @@ -458,6 +510,96 @@ us a wide degree of flexibility, the next section illustrates a second way in which ``Annotated`` may be used with Declarative that is even more open ended. +Support for Type Alias Types (defined by PEP 695) and NewType +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The typing module allows an user to create "new types" using ``typing.NewType``:: + + from typing import NewType + + nstr30 = NewType("nstr30", str) + nstr50 = NewType("nstr50", str) + +These are considered as different by the type checkers and by python:: + + >>> print(str == nstr30, nstr50 == nstr30, nstr30 == NewType("nstr30", str)) + False False False + +Another similar feature was added in Python 3.12 to create aliases, +using a new syntax to define ``typing.TypeAliasType``:: + + type SmallInt = int + type BigInt = int + type JsonScalar = str | float | bool | None + +Like ``typing.NewType``, these are treated by python as different, meaning that they are +not equal between each other even if they represent the same Python type. +In the example above, ``SmallInt`` and ``BigInt`` are not considered equal even +if they both are aliases of the python type ``int``:: + + >>> print(SmallInt == BigInt) + False + +SQLAlchemy supports using ``typing.NewType`` and ``typing.TypeAliasType`` +in the ``type_annotation_map``. They can be used to associate the same python type +to different :class:`_types.TypeEngine` types, similarly +to ``typing.Annotated``:: + + from sqlalchemy import SmallInteger, BigInteger, JSON, String + from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + from sqlalchemy.schema import CreateTable + + + class TABase(DeclarativeBase): + type_annotation_map = { + nstr30: String(30), + nstr50: String(50), + SmallInt: SmallInteger, + BigInteger: BigInteger, + JsonScalar: JSON, + } + + + class SomeClass(TABase): + __tablename__ = "some_table" + + id: Mapped[int] = mapped_column(primary_key=True) + normal_str: Mapped[str] + + short_str: Mapped[nstr30] + long_str: Mapped[nstr50] + + small_int: Mapped[SmallInt] + big_int: Mapped[BigInteger] + scalar_col: Mapped[JsonScalar] + +a CREATE TABLE for the above mapping will illustrate the different variants +of integer and string we've configured, and looks like: + +.. sourcecode:: pycon+sql + + >>> print(CreateTable(SomeClass.__table__)) + {printsql}CREATE TABLE some_table ( + id INTEGER NOT NULL, + normal_str VARCHAR NOT NULL, + short_str VARCHAR(30) NOT NULL, + long_str VARCHAR(50) NOT NULL, + small_int SMALLINT NOT NULL, + big_int BIGINT NOT NULL, + scalar_col JSON, + PRIMARY KEY (id) + ) + +Since the ``JsonScalar`` type includes ``None`` the columns is nullable, while +``id`` and ``normal_str`` columns use the default mapping for their respective +Python type. + +As mentioned above, since ``typing.NewType`` and ``typing.TypeAliasType`` are +considered standalone types, they must be referenced directly inside ``Mapped`` +and must be added explicitly to the type map. +Failing to do so will raise an error since SQLAlchemy does not know what +SQL type to use. + .. _orm_declarative_mapped_column_pep593: Mapping Whole Column Declarations to Python Types @@ -743,6 +885,28 @@ appropriate settings, including default string length. If a ``typing.Literal`` that does not consist of only string values is passed, an informative error is raised. +``typing.TypeAliasType`` can also be used to create enums, by assigning them +to a ``typing.Literal`` of strings:: + + from typing import Literal + + type Status = Literal["on", "off", "unknown"] + +Since this is a ``typing.TypeAliasType``, it represents a unique type object, +so it must be placed in the ``type_annotation_map`` for it to be looked up +successfully, keyed to the :class:`.Enum` type as follows:: + + import enum + import sqlalchemy + + + class Base(DeclarativeBase): + type_annotation_map = {Status: sqlalchemy.Enum(enum.Enum)} + +Since SQLAlchemy supports mapping different ``typing.TypeAliasType`` +objects that are otherwise structurally equivalent individually, +these must be present in ``type_annotation_map`` to avoid ambiguity. + Native Enums and Naming +++++++++++++++++++++++ diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 718cf72516b..a3b0ac21f0a 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -14,7 +14,6 @@ import typing from typing import Any from typing import Callable -from typing import cast from typing import ClassVar from typing import Dict from typing import FrozenSet @@ -72,6 +71,7 @@ from ..util import hybridmethod from ..util import hybridproperty from ..util import typing as compat_typing +from ..util import warn_deprecated from ..util.typing import CallableReference from ..util.typing import de_optionalize_union_types from ..util.typing import flatten_newtype @@ -80,6 +80,7 @@ from ..util.typing import is_newtype from ..util.typing import is_pep695 from ..util.typing import Literal +from ..util.typing import LITERAL_TYPES from ..util.typing import Self if TYPE_CHECKING: @@ -1232,40 +1233,27 @@ def update_type_annotation_map( ) def _resolve_type( - self, python_type: _MatchedOnType + self, python_type: _MatchedOnType, _do_fallbacks: bool = True ) -> Optional[sqltypes.TypeEngine[Any]]: - - python_type_to_check = python_type - while is_pep695(python_type_to_check): - python_type_to_check = python_type_to_check.__value__ - - check_is_pt = python_type is python_type_to_check - python_type_type: Type[Any] search: Iterable[Tuple[_MatchedOnType, Type[Any]]] - if is_generic(python_type_to_check): - if is_literal(python_type_to_check): - python_type_type = cast("Type[Any]", python_type_to_check) + if is_generic(python_type): + if is_literal(python_type): + python_type_type = python_type # type: ignore[assignment] - search = ( # type: ignore[assignment] + search = ( (python_type, python_type_type), - (Literal, python_type_type), + *((lt, python_type_type) for lt in LITERAL_TYPES), # type: ignore[arg-type] # noqa: E501 ) else: - python_type_type = python_type_to_check.__origin__ + python_type_type = python_type.__origin__ search = ((python_type, python_type_type),) - elif is_newtype(python_type_to_check): - python_type_type = flatten_newtype(python_type_to_check) - search = ((python_type, python_type_type),) - elif isinstance(python_type_to_check, type): - python_type_type = python_type_to_check - search = ( - (pt if check_is_pt else python_type, pt) - for pt in python_type_type.__mro__ - ) + elif isinstance(python_type, type): + python_type_type = python_type + search = ((pt, pt) for pt in python_type_type.__mro__) else: - python_type_type = python_type_to_check # type: ignore[assignment] + python_type_type = python_type # type: ignore[assignment] search = ((python_type, python_type_type),) for pt, flattened in search: @@ -1290,6 +1278,39 @@ def _resolve_type( if resolved_sql_type is not None: return resolved_sql_type + # 2.0 fallbacks + if _do_fallbacks: + python_type_to_check: Any = None + kind = None + if is_pep695(python_type): + # NOTE: assume there aren't type alias types of new types. + python_type_to_check = python_type + while is_pep695(python_type_to_check): + python_type_to_check = python_type_to_check.__value__ + python_type_to_check = de_optionalize_union_types( + python_type_to_check + ) + kind = "TypeAliasType" + if is_newtype(python_type): + python_type_to_check = flatten_newtype(python_type) + kind = "NewType" + + if python_type_to_check is not None: + res_after_fallback = self._resolve_type( + python_type_to_check, False + ) + if res_after_fallback is not None: + assert kind is not None + warn_deprecated( + f"Matching the provided {kind} '{python_type}' on " + "its resolved value without matching it in the " + "type_annotation_map is deprecated; add this type to " + "the type_annotation_map to allow it to match " + "explicitly.", + "2.0", + ) + return res_after_fallback + return None @property diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index b069d23c0f5..aa64eaa6667 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -65,11 +65,11 @@ from ..sql.schema import Table from ..util import topological from ..util.typing import _AnnotationScanType +from ..util.typing import get_args from ..util.typing import is_fwd_ref from ..util.typing import is_literal from ..util.typing import Protocol from ..util.typing import TypedDict -from ..util.typing import typing_get_args if TYPE_CHECKING: from ._typing import _ClassDict @@ -1319,7 +1319,7 @@ def _collect_annotation( extracted_mapped_annotation, mapped_container = extracted if attr_value is None and not is_literal(extracted_mapped_annotation): - for elem in typing_get_args(extracted_mapped_annotation): + for elem in get_args(extracted_mapped_annotation): if isinstance(elem, str) or is_fwd_ref( elem, check_generic=True ): diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index faf287cce6c..4e07050a1d6 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -53,9 +53,10 @@ from ..sql import expression from ..sql import operators from ..sql.elements import BindParameter +from ..util.typing import get_args from ..util.typing import is_fwd_ref from ..util.typing import is_pep593 -from ..util.typing import typing_get_args + if typing.TYPE_CHECKING: from ._typing import _InstanceDict @@ -364,7 +365,7 @@ def declarative_scan( argument = extracted_mapped_annotation if is_pep593(argument): - argument = typing_get_args(argument)[0] + argument = get_args(argument)[0] if argument and self.composite_class is None: if isinstance(argument, str) or is_fwd_ref( diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index b6fb3d43e31..96ae9d7f82a 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -55,13 +55,13 @@ from ..sql.schema import SchemaConst from ..sql.type_api import TypeEngine from ..util.typing import de_optionalize_union_types +from ..util.typing import get_args +from ..util.typing import includes_none from ..util.typing import is_fwd_ref -from ..util.typing import is_optional_union from ..util.typing import is_pep593 from ..util.typing import is_pep695 from ..util.typing import is_union from ..util.typing import Self -from ..util.typing import typing_get_args if TYPE_CHECKING: from ._typing import _IdentityKeyType @@ -752,38 +752,36 @@ def _init_column_for_annotation( cls, argument, originating_module ) - nullable = is_optional_union(argument) + nullable = includes_none(argument) if not self._has_nullable: self.column.nullable = nullable our_type = de_optionalize_union_types(argument) - use_args_from = None - - our_original_type = our_type - - if is_pep695(our_type): - our_type = our_type.__value__ + find_mapped_in: Tuple[Any, ...] = () + our_type_is_pep593 = False + raw_pep_593_type = None if is_pep593(our_type): our_type_is_pep593 = True - pep_593_components = typing_get_args(our_type) + pep_593_components = get_args(our_type) raw_pep_593_type = pep_593_components[0] - if is_optional_union(raw_pep_593_type): + if nullable: raw_pep_593_type = de_optionalize_union_types(raw_pep_593_type) - - nullable = True - if not self._has_nullable: - self.column.nullable = nullable - for elem in pep_593_components[1:]: - if isinstance(elem, MappedColumn): - use_args_from = elem - break + find_mapped_in = pep_593_components[1:] + elif is_pep695(argument) and is_pep593(argument.__value__): + # do not support nested annotation inside unions ets + find_mapped_in = get_args(argument.__value__)[1:] + + use_args_from: Optional[MappedColumn[Any]] + for elem in find_mapped_in: + if isinstance(elem, MappedColumn): + use_args_from = elem + break else: - our_type_is_pep593 = False - raw_pep_593_type = None + use_args_from = None if use_args_from is not None: if ( @@ -857,10 +855,11 @@ def _init_column_for_annotation( if sqltype._isnull and not self.column.foreign_keys: new_sqltype = None + checks: List[Any] if our_type_is_pep593: - checks = [our_original_type, raw_pep_593_type] + checks = [our_type, raw_pep_593_type] else: - checks = [our_original_type] + checks = [our_type] for check_type in checks: new_sqltype = registry._resolve_type(check_type) diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index dbfa6d5f1b8..11b6ac2c1ca 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -91,10 +91,10 @@ ) from ..util.typing import eval_name_only as _eval_name_only from ..util.typing import fixup_container_fwd_refs +from ..util.typing import get_origin from ..util.typing import is_origin_of_cls from ..util.typing import Literal from ..util.typing import Protocol -from ..util.typing import typing_get_origin if typing.TYPE_CHECKING: from ._typing import _EntityType @@ -123,7 +123,7 @@ from ..sql.selectable import Selectable from ..sql.visitors import anon_map from ..util.typing import _AnnotationScanType - from ..util.typing import ArgsTypeProcotol + from ..util.typing import ArgsTypeProtocol _T = TypeVar("_T", bound=Any) @@ -177,7 +177,7 @@ class _DeStringifyUnionElements(Protocol): def __call__( self, cls: Type[Any], - annotation: ArgsTypeProcotol, + annotation: ArgsTypeProtocol, originating_module: str, *, str_cleanup_fn: Optional[Callable[[str, str], str]] = None, @@ -1543,7 +1543,7 @@ def _inspect_mc( def _inspect_generic_alias( class_: Type[_O], ) -> Optional[Mapper[_O]]: - origin = cast("Type[_O]", typing_get_origin(class_)) + origin = cast("Type[_O]", get_origin(class_)) return _inspect_mc(origin) diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index f16db640664..a7d140ec6bd 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -59,9 +59,11 @@ from ..engine import processors from ..util import langhelpers from ..util import OrderedDict +from ..util import warn_deprecated +from ..util.typing import get_args from ..util.typing import is_literal +from ..util.typing import is_pep695 from ..util.typing import Literal -from ..util.typing import typing_get_args if TYPE_CHECKING: from ._typing import _ColumnExpressionArgument @@ -1511,6 +1513,19 @@ def _resolve_for_python_type( native_enum = None + def process_literal(pt): + # for a literal, where we need to get its contents, parse it out. + enum_args = get_args(pt) + bad_args = [arg for arg in enum_args if not isinstance(arg, str)] + if bad_args: + raise exc.ArgumentError( + f"Can't create string-based Enum datatype from non-string " + f"values: {', '.join(repr(x) for x in bad_args)}. Please " + f"provide an explicit Enum datatype for this Python type" + ) + native_enum = False + return enum_args, native_enum + if not we_are_generic_form and python_type is matched_on: # if we have enumerated values, and the incoming python # type is exactly the one that matched in the type map, @@ -1519,16 +1534,32 @@ def _resolve_for_python_type( enum_args = self._enums_argument elif is_literal(python_type): - # for a literal, where we need to get its contents, parse it out. - enum_args = typing_get_args(python_type) - bad_args = [arg for arg in enum_args if not isinstance(arg, str)] - if bad_args: + enum_args, native_enum = process_literal(python_type) + elif is_pep695(python_type): + value = python_type.__value__ + if is_pep695(value): + new_value = value + while is_pep695(new_value): + new_value = new_value.__value__ + if is_literal(new_value): + value = new_value + warn_deprecated( + f"Mapping recursive TypeAliasType '{python_type}' " + "that resolve to literal to generate an Enum is " + "deprecated. SQLAlchemy 2.1 will not support this " + "use case. Please avoid using recursing " + "TypeAliasType.", + "2.0", + ) + if not is_literal(value): raise exc.ArgumentError( - f"Can't create string-based Enum datatype from non-string " - f"values: {', '.join(repr(x) for x in bad_args)}. Please " - f"provide an explicit Enum datatype for this Python type" + f"Can't associate TypeAliasType '{python_type}' to an " + "Enum since it's not a direct alias of a Literal. Only " + "aliases in this form `type my_alias = Literal['a', " + "'b']` are supported when generating Enums." ) - native_enum = False + enum_args, native_enum = process_literal(value) + elif isinstance(python_type, type) and issubclass( python_type, enum.Enum ): diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index bd1ebd4c013..645a41a2406 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -9,6 +9,7 @@ from __future__ import annotations import builtins +from collections import deque import collections.abc as collections_abc import re import sys @@ -54,6 +55,7 @@ from typing_extensions import TypeGuard as TypeGuard # 3.10 from typing_extensions import Self as Self # 3.11 from typing_extensions import TypeAliasType as TypeAliasType # 3.12 + from typing_extensions import Never as Never # 3.11 _T = TypeVar("_T", bound=Any) _KT = TypeVar("_KT") @@ -65,9 +67,9 @@ if compat.py38: # typing_extensions.Literal is different from typing.Literal until # Python 3.10.1 - _LITERAL_TYPES = frozenset([typing.Literal, Literal]) + LITERAL_TYPES = frozenset([typing.Literal, Literal]) else: - _LITERAL_TYPES = frozenset([Literal]) + LITERAL_TYPES = frozenset([Literal]) if compat.py310: @@ -79,16 +81,13 @@ NoneFwd = ForwardRef("None") -typing_get_args = get_args -typing_get_origin = get_origin - _AnnotationScanType = Union[ Type[Any], str, ForwardRef, NewType, TypeAliasType, "GenericProtocol[Any]" ] -class ArgsTypeProcotol(Protocol): +class ArgsTypeProtocol(Protocol): """protocol for types that have ``__args__`` there's no public interface for this AFAIK @@ -209,7 +208,7 @@ def fixup_container_fwd_refs( if ( is_generic(type_) - and typing_get_origin(type_) + and get_origin(type_) in ( dict, set, @@ -229,11 +228,11 @@ def fixup_container_fwd_refs( ) ): # compat with py3.10 and earlier - return typing_get_origin(type_).__class_getitem__( # type: ignore + return get_origin(type_).__class_getitem__( # type: ignore tuple( [ ForwardRef(elem) if isinstance(elem, str) else elem - for elem in typing_get_args(type_) + for elem in get_args(type_) ] ) ) @@ -332,7 +331,7 @@ def resolve_name_to_real_class_name(name: str, module_name: str) -> str: def de_stringify_union_elements( cls: Type[Any], - annotation: ArgsTypeProcotol, + annotation: ArgsTypeProtocol, originating_module: str, locals_: Mapping[str, Any], *, @@ -352,8 +351,8 @@ def de_stringify_union_elements( ) -def is_pep593(type_: Optional[_AnnotationScanType]) -> bool: - return type_ is not None and typing_get_origin(type_) is Annotated +def is_pep593(type_: Optional[Any]) -> bool: + return type_ is not None and get_origin(type_) is Annotated def is_non_string_iterable(obj: Any) -> TypeGuard[Iterable[Any]]: @@ -362,8 +361,8 @@ def is_non_string_iterable(obj: Any) -> TypeGuard[Iterable[Any]]: ) -def is_literal(type_: _AnnotationScanType) -> bool: - return get_origin(type_) in _LITERAL_TYPES +def is_literal(type_: Any) -> bool: + return get_origin(type_) in LITERAL_TYPES def is_newtype(type_: Optional[_AnnotationScanType]) -> TypeGuard[NewType]: @@ -389,6 +388,43 @@ def flatten_newtype(type_: NewType) -> Type[Any]: return super_type # type: ignore[return-value] +def pep695_values(type_: _AnnotationScanType) -> Set[Any]: + """Extracts the value from a TypeAliasType, recursively exploring unions + and inner TypeAliasType to flatten them into a single set. + + Forward references are not evaluated, so no recursive exploration happens + into them. + """ + _seen = set() + + def recursive_value(type_): + if type_ in _seen: + # recursion are not supported (at least it's flagged as + # an error by pyright). Just avoid infinite loop + return type_ + _seen.add(type_) + if not is_pep695(type_): + return type_ + value = type_.__value__ + if not is_union(value): + return value + return [recursive_value(t) for t in value.__args__] + + res = recursive_value(type_) + if isinstance(res, list): + types = set() + stack = deque(res) + while stack: + t = stack.popleft() + if isinstance(t, list): + stack.extend(t) + else: + types.add(None if t in {NoneType, NoneFwd} else t) + return types + else: + return {res} + + def is_fwd_ref( type_: _AnnotationScanType, check_generic: bool = False ) -> TypeGuard[ForwardRef]: @@ -422,13 +458,10 @@ def de_optionalize_union_types( """ - while is_pep695(type_): - type_ = type_.__value__ - if is_fwd_ref(type_): - return de_optionalize_fwd_ref_union_types(type_) + return _de_optionalize_fwd_ref_union_types(type_, False) - elif is_optional(type_): + elif is_union(type_) and includes_none(type_): typ = set(type_.__args__) typ.discard(NoneType) @@ -440,9 +473,21 @@ def de_optionalize_union_types( return type_ -def de_optionalize_fwd_ref_union_types( - type_: ForwardRef, -) -> _AnnotationScanType: +@overload +def _de_optionalize_fwd_ref_union_types( + type_: ForwardRef, return_has_none: Literal[True] +) -> bool: ... + + +@overload +def _de_optionalize_fwd_ref_union_types( + type_: ForwardRef, return_has_none: Literal[False] +) -> _AnnotationScanType: ... + + +def _de_optionalize_fwd_ref_union_types( + type_: ForwardRef, return_has_none: bool +) -> Union[_AnnotationScanType, bool]: """return the non-optional type for Optional[], Union[None, ...], x|None, etc. without de-stringifying forward refs. @@ -454,47 +499,77 @@ def de_optionalize_fwd_ref_union_types( mm = re.match(r"^(.+?)\[(.+)\]$", annotation) if mm: - if mm.group(1) == "Optional": - return ForwardRef(mm.group(2)) - elif mm.group(1) == "Union": - elements = re.split(r",\s*", mm.group(2)) - return make_union_type( - *[ForwardRef(elem) for elem in elements if elem != "None"] - ) + g1 = mm.group(1).split(".")[-1] + if g1 == "Optional": + return True if return_has_none else ForwardRef(mm.group(2)) + elif g1 == "Union": + if "[" in mm.group(2): + # cases like "Union[Dict[str, int], int, None]" + elements: list[str] = [] + current: list[str] = [] + ignore_comma = 0 + for char in mm.group(2): + if char == "[": + ignore_comma += 1 + elif char == "]": + ignore_comma -= 1 + elif ignore_comma == 0 and char == ",": + elements.append("".join(current).strip()) + current.clear() + continue + current.append(char) + else: + elements = re.split(r",\s*", mm.group(2)) + parts = [ForwardRef(elem) for elem in elements if elem != "None"] + if return_has_none: + return len(elements) != len(parts) + else: + return make_union_type(*parts) if parts else Never # type: ignore[return-value] # noqa: E501 else: - return type_ + return False if return_has_none else type_ pipe_tokens = re.split(r"\s*\|\s*", annotation) - if "None" in pipe_tokens: - return ForwardRef("|".join(p for p in pipe_tokens if p != "None")) + has_none = "None" in pipe_tokens + if return_has_none: + return has_none + if has_none: + anno_str = "|".join(p for p in pipe_tokens if p != "None") + return ForwardRef(anno_str) if anno_str else Never # type: ignore[return-value] # noqa: E501 return type_ def make_union_type(*types: _AnnotationScanType) -> Type[Any]: - """Make a Union type. + """Make a Union type.""" + return Union.__getitem__(types) # type: ignore - This is needed by :func:`.de_optionalize_union_types` which removes - ``NoneType`` from a ``Union``. - """ - return cast(Any, Union).__getitem__(types) # type: ignore - - -def is_optional(type_: Any) -> TypeGuard[ArgsTypeProcotol]: - return is_origin_of( - type_, - "Optional", - "Union", - "UnionType", - ) +def includes_none(type_: Any) -> bool: + """Returns if the type annotation ``type_`` allows ``None``. - -def is_optional_union(type_: Any) -> bool: - return is_optional(type_) and NoneType in typing_get_args(type_) - - -def is_union(type_: Any) -> TypeGuard[ArgsTypeProcotol]: + This function supports: + * forward refs + * unions + * pep593 - Annotated + * pep695 - TypeAliasType (does not support looking into + fw reference of other pep695) + * NewType + * plain types like ``int``, ``None``, etc + """ + if is_fwd_ref(type_): + return _de_optionalize_fwd_ref_union_types(type_, True) + if is_union(type_): + return any(includes_none(t) for t in get_args(type_)) + if is_pep593(type_): + return includes_none(get_args(type_)[0]) + if is_pep695(type_): + return any(includes_none(t) for t in pep695_values(type_)) + if is_newtype(type_): + return includes_none(type_.__supertype__) + return type_ in (NoneFwd, NoneType, None) + + +def is_union(type_: Any) -> TypeGuard[ArgsTypeProtocol]: return is_origin_of(type_, "Union", "UnionType") @@ -504,7 +579,7 @@ def is_origin_of_cls( """return True if the given type has an __origin__ that shares a base with the given class""" - origin = typing_get_origin(type_) + origin = get_origin(type_) if origin is None: return False @@ -517,7 +592,7 @@ def is_origin_of( """return True if the given type has an __origin__ with the given name and optional module.""" - origin = typing_get_origin(type_) + origin = get_origin(type_) if origin is None: return False @@ -607,6 +682,3 @@ def __get__(self, instance: object, owner: Any) -> _FN: ... def __set__(self, instance: Any, value: _FN) -> None: ... def __delete__(self, instance: Any) -> None: ... - - -# $def ro_descriptor_reference(fn: Callable[]) diff --git a/test/base/test_typing_utils.py b/test/base/test_typing_utils.py new file mode 100644 index 00000000000..67e7bf41432 --- /dev/null +++ b/test/base/test_typing_utils.py @@ -0,0 +1,409 @@ +# NOTE: typing implementation is full of heuristic so unit test it to avoid +# unexpected breakages. + +import typing + +import typing_extensions + +from sqlalchemy.testing import fixtures +from sqlalchemy.testing import requires +from sqlalchemy.testing.assertions import eq_ +from sqlalchemy.testing.assertions import is_ +from sqlalchemy.util import py310 +from sqlalchemy.util import py311 +from sqlalchemy.util import py312 +from sqlalchemy.util import py38 +from sqlalchemy.util import typing as sa_typing + +TV = typing.TypeVar("TV") + + +def union_types(): + res = [typing.Union[int, str]] + if py310: + res.append(int | str) + return res + + +def null_union_types(): + res = [ + typing.Optional[typing.Union[int, str]], + typing.Union[int, str, None], + typing.Union[int, str, "None"], + ] + if py310: + res.append(int | str | None) + res.append(typing.Optional[int | str]) + res.append(typing.Union[int, str] | None) + res.append(typing.Optional[int] | str) + return res + + +def make_fw_ref(anno: str) -> typing.ForwardRef: + return typing.Union[anno] + + +TA_int = typing_extensions.TypeAliasType("TA_int", int) +TA_union = typing_extensions.TypeAliasType("TA_union", typing.Union[int, str]) +TA_null_union = typing_extensions.TypeAliasType( + "TA_null_union", typing.Union[int, str, None] +) +TA_null_union2 = typing_extensions.TypeAliasType( + "TA_null_union2", typing.Union[int, str, "None"] +) +TA_null_union3 = typing_extensions.TypeAliasType( + "TA_null_union3", typing.Union[int, "typing.Union[None, bool]"] +) +TA_null_union4 = typing_extensions.TypeAliasType( + "TA_null_union4", typing.Union[int, "TA_null_union2"] +) +TA_union_ta = typing_extensions.TypeAliasType( + "TA_union_ta", typing.Union[TA_int, str] +) +TA_null_union_ta = typing_extensions.TypeAliasType( + "TA_null_union_ta", typing.Union[TA_null_union, float] +) +TA_list = typing_extensions.TypeAliasType( + "TA_list", typing.Union[int, str, typing.List["TA_list"]] +) +# these below not valid. Verify that it does not cause exceptions in any case +TA_recursive = typing_extensions.TypeAliasType( + "TA_recursive", typing.Union["TA_recursive", str] +) +TA_null_recursive = typing_extensions.TypeAliasType( + "TA_null_recursive", typing.Union[TA_recursive, None] +) +TA_recursive_a = typing_extensions.TypeAliasType( + "TA_recursive_a", typing.Union["TA_recursive_b", int] +) +TA_recursive_b = typing_extensions.TypeAliasType( + "TA_recursive_b", typing.Union["TA_recursive_a", str] +) + + +def type_aliases(): + return [ + TA_int, + TA_union, + TA_null_union, + TA_null_union2, + TA_null_union3, + TA_null_union4, + TA_union_ta, + TA_null_union_ta, + TA_list, + TA_recursive, + TA_null_recursive, + TA_recursive_a, + TA_recursive_b, + ] + + +NT_str = typing.NewType("NT_str", str) +NT_null = typing.NewType("NT_null", None) +# this below is not valid. Verify that it does not cause exceptions in any case +NT_union = typing.NewType("NT_union", typing.Union[str, int]) + + +def new_types(): + return [NT_str, NT_null, NT_union] + + +A_str = typing_extensions.Annotated[str, "meta"] +A_null_str = typing_extensions.Annotated[ + typing.Union[str, None], "other_meta", "null" +] +A_union = typing_extensions.Annotated[typing.Union[str, int], "other_meta"] +A_null_union = typing_extensions.Annotated[ + typing.Union[str, int, None], "other_meta", "null" +] + + +def annotated_l(): + return [A_str, A_null_str, A_union, A_null_union] + + +def all_types(): + return ( + union_types() + + null_union_types() + + type_aliases() + + new_types() + + annotated_l() + ) + + +def exec_code(code: str, *vars: str) -> typing.Any: + assert vars + scope = {} + exec(code, None, scope) + if len(vars) == 1: + return scope[vars[0]] + return [scope[name] for name in vars] + + +class TestTestingThings(fixtures.TestBase): + def test_unions_are_the_same(self): + # no need to test typing_extensions.Union, typing_extensions.Optional + is_(typing.Union, typing_extensions.Union) + is_(typing.Optional, typing_extensions.Optional) + if py312: + is_(typing.TypeAliasType, typing_extensions.TypeAliasType) + + def test_make_union(self): + v = int, str + eq_(typing.Union[int, str], typing.Union.__getitem__(v)) + if py311: + # need eval since it's a syntax error in python < 3.11 + eq_(typing.Union[int, str], eval("typing.Union[*(int, str)]")) + eq_(typing.Union[int, str], eval("typing.Union[*v]")) + + @requires.python312 + def test_make_type_alias_type(self): + # verify that TypeAliasType('foo', int) it the same as 'type foo = int' + x_type = exec_code("type x = int", "x") + x = typing.TypeAliasType("x", int) + + eq_(type(x_type), type(x)) + eq_(x_type.__name__, x.__name__) + eq_(x_type.__value__, x.__value__) + + def test_make_fw_ref(self): + eq_(make_fw_ref("str"), typing.ForwardRef("str")) + eq_(make_fw_ref("str|int"), typing.ForwardRef("str|int")) + eq_( + make_fw_ref("Optional[Union[str, int]]"), + typing.ForwardRef("Optional[Union[str, int]]"), + ) + + +class TestTyping(fixtures.TestBase): + def test_is_pep593(self): + eq_(sa_typing.is_pep593(str), False) + eq_(sa_typing.is_pep593(None), False) + eq_(sa_typing.is_pep593(typing_extensions.Annotated[int, "a"]), True) + if py310: + eq_(sa_typing.is_pep593(typing.Annotated[int, "a"]), True) + + for t in annotated_l(): + eq_(sa_typing.is_pep593(t), True) + for t in ( + union_types() + null_union_types() + type_aliases() + new_types() + ): + eq_(sa_typing.is_pep593(t), False) + + def test_is_literal(self): + if py38: + eq_(sa_typing.is_literal(typing.Literal["a"]), True) + eq_(sa_typing.is_literal(typing_extensions.Literal["a"]), True) + eq_(sa_typing.is_literal(None), False) + for t in all_types(): + eq_(sa_typing.is_literal(t), False) + + def test_is_newtype(self): + eq_(sa_typing.is_newtype(str), False) + + for t in new_types(): + eq_(sa_typing.is_newtype(t), True) + for t in ( + union_types() + null_union_types() + type_aliases() + annotated_l() + ): + eq_(sa_typing.is_newtype(t), False) + + def test_is_generic(self): + class W(typing.Generic[TV]): + pass + + eq_(sa_typing.is_generic(typing.List[int]), True) + eq_(sa_typing.is_generic(W), False) + eq_(sa_typing.is_generic(W[str]), True) + + if py312: + t = exec_code("class W[T]: pass", "W") + eq_(sa_typing.is_generic(t), False) + eq_(sa_typing.is_generic(t[int]), True) + + for t in all_types(): + eq_(sa_typing.is_literal(t), False) + + def test_is_pep695(self): + eq_(sa_typing.is_pep695(str), False) + for t in ( + union_types() + null_union_types() + new_types() + annotated_l() + ): + eq_(sa_typing.is_pep695(t), False) + for t in type_aliases(): + eq_(sa_typing.is_pep695(t), True) + + def test_pep695_value(self): + eq_(sa_typing.pep695_values(int), {int}) + eq_( + sa_typing.pep695_values(typing.Union[int, str]), + {typing.Union[int, str]}, + ) + + for t in ( + union_types() + null_union_types() + new_types() + annotated_l() + ): + eq_(sa_typing.pep695_values(t), {t}) + + eq_( + sa_typing.pep695_values(typing.Union[int, TA_int]), + {typing.Union[int, TA_int]}, + ) + + eq_(sa_typing.pep695_values(TA_int), {int}) + eq_(sa_typing.pep695_values(TA_union), {int, str}) + eq_(sa_typing.pep695_values(TA_null_union), {int, str, None}) + eq_(sa_typing.pep695_values(TA_null_union2), {int, str, None}) + eq_( + sa_typing.pep695_values(TA_null_union3), + {int, typing.ForwardRef("typing.Union[None, bool]")}, + ) + eq_( + sa_typing.pep695_values(TA_null_union4), + {int, typing.ForwardRef("TA_null_union2")}, + ) + eq_(sa_typing.pep695_values(TA_union_ta), {int, str}) + eq_(sa_typing.pep695_values(TA_null_union_ta), {int, str, None, float}) + eq_( + sa_typing.pep695_values(TA_list), + {int, str, typing.List[typing.ForwardRef("TA_list")]}, + ) + eq_( + sa_typing.pep695_values(TA_recursive), + {typing.ForwardRef("TA_recursive"), str}, + ) + eq_( + sa_typing.pep695_values(TA_null_recursive), + {typing.ForwardRef("TA_recursive"), str, None}, + ) + eq_( + sa_typing.pep695_values(TA_recursive_a), + {typing.ForwardRef("TA_recursive_b"), int}, + ) + eq_( + sa_typing.pep695_values(TA_recursive_b), + {typing.ForwardRef("TA_recursive_a"), str}, + ) + + def test_is_fwd_ref(self): + eq_(sa_typing.is_fwd_ref(int), False) + eq_(sa_typing.is_fwd_ref(make_fw_ref("str")), True) + eq_(sa_typing.is_fwd_ref(typing.Union[str, int]), False) + eq_(sa_typing.is_fwd_ref(typing.Union["str", int]), False) + eq_(sa_typing.is_fwd_ref(typing.Union["str", int], True), True) + + for t in all_types(): + eq_(sa_typing.is_fwd_ref(t), False) + + def test_de_optionalize_union_types(self): + fn = sa_typing.de_optionalize_union_types + + eq_( + fn(typing.Optional[typing.Union[int, str]]), typing.Union[int, str] + ) + eq_(fn(typing.Union[int, str, None]), typing.Union[int, str]) + eq_(fn(typing.Union[int, str, "None"]), typing.Union[int, str]) + + eq_(fn(make_fw_ref("None")), typing_extensions.Never) + eq_(fn(make_fw_ref("typing.Union[None]")), typing_extensions.Never) + eq_(fn(make_fw_ref("Union[None, str]")), typing.ForwardRef("str")) + eq_( + fn(make_fw_ref("Union[None, str, int]")), + typing.Union["str", "int"], + ) + eq_(fn(make_fw_ref("Optional[int]")), typing.ForwardRef("int")) + eq_( + fn(make_fw_ref("typing.Optional[Union[int | str]]")), + typing.ForwardRef("Union[int | str]"), + ) + + for t in null_union_types(): + res = fn(t) + eq_(sa_typing.is_union(res), True) + eq_(type(None) not in res.__args__, True) + + for t in union_types() + type_aliases() + new_types() + annotated_l(): + eq_(fn(t), t) + + eq_( + fn(make_fw_ref("Union[typing.Dict[str, int], int, None]")), + typing.Union["typing.Dict[str, int]", "int"], + ) + + def test_make_union_type(self): + eq_(sa_typing.make_union_type(int), int) + eq_(sa_typing.make_union_type(None), type(None)) + eq_(sa_typing.make_union_type(int, str), typing.Union[int, str]) + eq_( + sa_typing.make_union_type(int, typing.Optional[str]), + typing.Union[int, str, None], + ) + eq_( + sa_typing.make_union_type(int, typing.Union[str, bool]), + typing.Union[int, str, bool], + ) + eq_( + sa_typing.make_union_type(bool, TA_int, NT_str), + typing.Union[bool, TA_int, NT_str], + ) + + def test_includes_none(self): + eq_(sa_typing.includes_none(None), True) + eq_(sa_typing.includes_none(type(None)), True) + eq_(sa_typing.includes_none(typing.ForwardRef("None")), True) + eq_(sa_typing.includes_none(int), False) + for t in union_types(): + eq_(sa_typing.includes_none(t), False) + + for t in null_union_types(): + eq_(sa_typing.includes_none(t), True, str(t)) + + # TODO: these are false negatives + false_negative = { + TA_null_union4, # does not evaluate FW ref + } + for t in type_aliases() + new_types(): + if t in false_negative: + exp = False + else: + exp = "null" in t.__name__ + eq_(sa_typing.includes_none(t), exp, str(t)) + + for t in annotated_l(): + eq_( + sa_typing.includes_none(t), + "null" in sa_typing.get_args(t), + str(t), + ) + # nested things + eq_(sa_typing.includes_none(typing.Union[int, "None"]), True) + eq_(sa_typing.includes_none(typing.Union[bool, TA_null_union]), True) + eq_(sa_typing.includes_none(typing.Union[bool, NT_null]), True) + # nested fw + eq_( + sa_typing.includes_none( + typing.Union[int, "typing.Union[str, None]"] + ), + True, + ) + eq_( + sa_typing.includes_none( + typing.Union[int, "typing.Union[int, str]"] + ), + False, + ) + + # there are not supported. should return True + eq_( + sa_typing.includes_none(typing.Union[bool, "TA_null_union"]), False + ) + eq_(sa_typing.includes_none(typing.Union[bool, "NT_null"]), False) + + def test_is_union(self): + eq_(sa_typing.is_union(str), False) + for t in union_types() + null_union_types(): + eq_(sa_typing.is_union(t), True) + for t in type_aliases() + new_types() + annotated_l(): + eq_(sa_typing.is_union(t), False) diff --git a/test/base/test_utils.py b/test/base/test_utils.py index 85c419e94e8..de8712c8523 100644 --- a/test/base/test_utils.py +++ b/test/base/test_utils.py @@ -4,9 +4,6 @@ from pathlib import Path import pickle import sys -import typing - -import typing_extensions from sqlalchemy import exc from sqlalchemy import sql @@ -42,7 +39,6 @@ from sqlalchemy.util._collections import merge_lists_w_ordering from sqlalchemy.util._has_cy import _import_cy_extensions from sqlalchemy.util._has_cy import HAS_CYEXTENSION -from sqlalchemy.util.typing import is_union class WeakSequenceTest(fixtures.TestBase): @@ -3634,11 +3630,3 @@ def test_all_cyext_imported(self): for f in cython_files } eq_({m.__name__ for m in ext}, set(names)) - - -class TypingTest(fixtures.TestBase): - def test_is_union(self): - assert is_union(typing.Union[str, int]) - assert is_union(typing_extensions.Union[str, int]) - if compat.py310: - assert is_union(str | int) diff --git a/test/orm/declarative/test_tm_future_annotations.py b/test/orm/declarative/test_tm_future_annotations.py index c34d54169e8..165f43b42d3 100644 --- a/test/orm/declarative/test_tm_future_annotations.py +++ b/test/orm/declarative/test_tm_future_annotations.py @@ -1,8 +1,8 @@ """This file includes annotation-sensitive tests while having ``from __future__ import annotations`` in effect. -Only tests that don't have an equivalent in ``test_typed_mappings`` are -specified here. All test from ``test_typed_mappings`` are copied over to +Only tests that don't have an equivalent in ``test_typed_mapping`` are +specified here. All test from ``test_typed_mapping`` are copied over to the ``test_tm_future_annotations_sync`` by the ``sync_test_file`` script. """ diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index a2eac4d7f4f..4b379266388 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -96,8 +96,9 @@ from sqlalchemy.testing import is_false from sqlalchemy.testing import is_not from sqlalchemy.testing import is_true -from sqlalchemy.testing import skip_test +from sqlalchemy.testing import requires from sqlalchemy.testing import Variation +from sqlalchemy.testing.assertions import ne_ from sqlalchemy.testing.fixtures import fixture_session from sqlalchemy.util import compat from sqlalchemy.util.typing import Annotated @@ -118,11 +119,6 @@ class _SomeDict2(TypedDict): _StrPep695: TypeAlias = str _UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] -_Literal695: TypeAlias = Literal["to-do", "in-progress", "done"] -_Recursive695_0: TypeAlias = _Literal695 -_Recursive695_1: TypeAlias = _Recursive695_0 -_Recursive695_2: TypeAlias = _Recursive695_1 - if compat.py38: _TypingLiteral = typing.Literal["a", "b"] _TypingExtensionsLiteral = typing_extensions.Literal["a", "b"] @@ -147,16 +143,16 @@ class _SomeDict2(TypedDict): type _StrPep695 = str type strtypalias_keyword = Annotated[str, mapped_column(info={"hi": "there"})] - -strtypalias_tat: typing.TypeAliasType = Annotated[ +type strtypalias_keyword_nested = int | Annotated[ + str, mapped_column(info={"hi": "there"})] +strtypalias_ta: typing.TypeAlias = Annotated[ str, mapped_column(info={"hi": "there"})] - strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] type _Literal695 = Literal["to-do", "in-progress", "done"] -type _Recursive695_0 = _Literal695 -type _Recursive695_1 = _Recursive695_0 -type _Recursive695_2 = _Recursive695_1 +type _RecursiveLiteral695 = _Literal695 + +type _JsonPep695 = _JsonPep604 """, globals(), ) @@ -856,6 +852,84 @@ class Test(decl_base): eq_(Test.__table__.c.data.type.length, 30) is_(Test.__table__.c.structure.type._type_affinity, JSON) + @testing.variation( + "option", + [ + "plain", + "union", + "union_604", + "union_null", + "union_null_604", + "optional", + "optional_union", + "optional_union_604", + ], + ) + @testing.variation("in_map", ["yes", "no", "value"]) + @testing.requires.python312 + def test_pep695_behavior(self, decl_base, in_map, option): + """Issue #11955""" + global tat + + if option.plain: + tat = TypeAliasType("tat", str) + elif option.union: + tat = TypeAliasType("tat", Union[str, int]) + elif option.union_604: + tat = TypeAliasType("tat", str | int) + elif option.union_null: + tat = TypeAliasType("tat", Union[str, int, None]) + elif option.union_null_604: + tat = TypeAliasType("tat", str | int | None) + elif option.optional: + tat = TypeAliasType("tat", Optional[str]) + elif option.optional_union: + tat = TypeAliasType("tat", Optional[Union[str, int]]) + elif option.optional_union_604: + tat = TypeAliasType("tat", Optional[str | int]) + else: + option.fail() + + if in_map.yes: + decl_base.registry.update_type_annotation_map({tat: String(99)}) + elif in_map.value: + decl_base.registry.update_type_annotation_map( + {tat.__value__: String(99)} + ) + + def declare(): + class Test(decl_base): + __tablename__ = "test" + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[tat] + + return Test.__table__.c.data + + if in_map.yes: + col = declare() + length = 99 + elif in_map.value or option.optional or option.plain: + with expect_deprecated( + "Matching the provided TypeAliasType 'tat' on its " + "resolved value without matching it in the " + "type_annotation_map is deprecated; add this type to the " + "type_annotation_map to allow it to match explicitly.", + ): + col = declare() + length = 99 if in_map.value else None + else: + with expect_raises_message( + exc.ArgumentError, + "Could not locate SQLAlchemy Core type for Python type", + ): + declare() + return + + is_true(isinstance(col.type, String)) + eq_(col.type.length, length) + nullable = "null" in option.name or "optional" in option.name + eq_(col.nullable, nullable) + @testing.requires.python312 def test_pep695_typealias_as_typemap_keys( self, decl_base: Type[DeclarativeBase] @@ -876,12 +950,23 @@ class Test(decl_base): eq_(Test.__table__.c.data.type.length, 30) is_(Test.__table__.c.structure.type._type_affinity, JSON) - @testing.variation("alias_type", ["none", "typekeyword", "typealiastype"]) + @testing.variation( + "alias_type", + ["none", "typekeyword", "typealias", "typekeyword_nested"], + ) @testing.requires.python312 def test_extract_pep593_from_pep695( self, decl_base: Type[DeclarativeBase], alias_type ): """test #11130""" + if alias_type.typekeyword: + decl_base.registry.update_type_annotation_map( + {strtypalias_keyword: VARCHAR(33)} # noqa: F821 + ) + if alias_type.typekeyword_nested: + decl_base.registry.update_type_annotation_map( + {strtypalias_keyword_nested: VARCHAR(42)} # noqa: F821 + ) class MyClass(decl_base): __tablename__ = "my_table" @@ -890,33 +975,96 @@ class MyClass(decl_base): if alias_type.typekeyword: data_one: Mapped[strtypalias_keyword] # noqa: F821 - elif alias_type.typealiastype: - data_one: Mapped[strtypalias_tat] # noqa: F821 + elif alias_type.typealias: + data_one: Mapped[strtypalias_ta] # noqa: F821 elif alias_type.none: data_one: Mapped[strtypalias_plain] # noqa: F821 + elif alias_type.typekeyword_nested: + data_one: Mapped[strtypalias_keyword_nested] # noqa: F821 else: alias_type.fail() table = MyClass.__table__ assert table is not None - eq_(MyClass.data_one.expression.info, {"hi": "there"}) + if alias_type.typekeyword_nested: + # a nested annotation is not supported + eq_(MyClass.data_one.expression.info, {}) + else: + eq_(MyClass.data_one.expression.info, {"hi": "there"}) + if alias_type.typekeyword: + eq_(MyClass.data_one.type.length, 33) + elif alias_type.typekeyword_nested: + eq_(MyClass.data_one.type.length, 42) + else: + eq_(MyClass.data_one.type.length, None) + + @testing.variation("type_", ["literal", "recursive", "not_literal"]) + @testing.combinations(True, False, argnames="in_map") @testing.requires.python312 - def test_pep695_literal_defaults_to_enum(self, decl_base): + def test_pep695_literal_defaults_to_enum(self, decl_base, type_, in_map): """test #11305.""" - class Foo(decl_base): - __tablename__ = "footable" + def declare(): + class Foo(decl_base): + __tablename__ = "footable" - id: Mapped[int] = mapped_column(primary_key=True) - status: Mapped[_Literal695] - r2: Mapped[_Recursive695_2] + id: Mapped[int] = mapped_column(primary_key=True) + if type_.recursive: + status: Mapped[_RecursiveLiteral695] # noqa: F821 + elif type_.literal: + status: Mapped[_Literal695] # noqa: F821 + elif type_.not_literal: + status: Mapped[_StrPep695] # noqa: F821 + else: + type_.fail() + + return Foo - for col in (Foo.__table__.c.status, Foo.__table__.c.r2): + if in_map: + decl_base.registry.update_type_annotation_map( + { + _Literal695: Enum(enum.Enum), # noqa: F821 + _RecursiveLiteral695: Enum(enum.Enum), # noqa: F821 + _StrPep695: Enum(enum.Enum), # noqa: F821 + } + ) + if type_.recursive: + with expect_deprecated( + "Mapping recursive TypeAliasType '.+' that resolve to " + "literal to generate an Enum is deprecated. SQLAlchemy " + "2.1 will not support this use case. Please avoid using " + "recursing TypeAliasType", + ): + Foo = declare() + elif type_.literal: + Foo = declare() + else: + with expect_raises_message( + exc.ArgumentError, + "Can't associate TypeAliasType '.+' to an Enum " + "since it's not a direct alias of a Literal. Only " + "aliases in this form `type my_alias = Literal.'a', " + "'b'.` are supported when generating Enums.", + ): + declare() + return + else: + with expect_deprecated( + "Matching the provided TypeAliasType '.*' on its " + "resolved value without matching it in the " + "type_annotation_map is deprecated; add this type to the " + "type_annotation_map to allow it to match explicitly.", + ): + Foo = declare() + col = Foo.__table__.c.status + if in_map and not type_.not_literal: is_true(isinstance(col.type, Enum)) eq_(col.type.enums, ["to-do", "in-progress", "done"]) is_(col.type.native_enum, False) + else: + is_true(isinstance(col.type, String)) @testing.requires.python38 def test_typing_literal_identity(self, decl_base): @@ -1233,6 +1381,33 @@ class MyClass(decl_base): eq_(MyClass.__table__.c.data_four.type.length, 150) is_false(MyClass.__table__.c.data_four.nullable) + def test_newtype_missing_from_map(self, decl_base): + global str50 + + str50 = NewType("str50", str) + + if compat.py310: + text = ".*str50" + else: + # NewTypes before 3.10 had a very bad repr + # .new_type at 0x...> + text = ".*NewType.*" + + with expect_deprecated( + f"Matching the provided NewType '{text}' on its " + "resolved value without matching it in the " + "type_annotation_map is deprecated; add this type to the " + "type_annotation_map to allow it to match explicitly.", + ): + + class MyClass(decl_base): + __tablename__ = "my_table" + + id: Mapped[int] = mapped_column(primary_key=True) + data_one: Mapped[str50] + + is_true(isinstance(MyClass.data_one.type, String)) + def test_extract_base_type_from_pep593( self, decl_base: Type[DeclarativeBase] ): @@ -1724,39 +1899,40 @@ class Element(decl_base): else: is_(getattr(Element.__table__.c.data, paramname), override_value) - @testing.variation("union", ["union", "pep604"]) - @testing.variation("typealias", ["legacy", "pep695"]) - def test_unions(self, union, typealias): + @testing.variation( + "union", + [ + "union", + ("pep604", requires.python310), + "union_null", + ("pep604_null", requires.python310), + ], + ) + def test_unions(self, union): + global UnionType our_type = Numeric(10, 2) if union.union: UnionType = Union[float, Decimal] + elif union.union_null: + UnionType = Union[float, Decimal, None] elif union.pep604: - if not compat.py310: - skip_test("Required Python 3.10") UnionType = float | Decimal + elif union.pep604_null: + UnionType = float | Decimal | None else: union.fail() - if typealias.legacy: - UnionTypeAlias = UnionType - elif typealias.pep695: - # same as type UnionTypeAlias = UnionType - UnionTypeAlias = TypeAliasType("UnionTypeAlias", UnionType) - else: - typealias.fail() - class Base(DeclarativeBase): - type_annotation_map = {UnionTypeAlias: our_type} + type_annotation_map = {UnionType: our_type} class User(Base): __tablename__ = "users" - __table__: Table id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[Union[float, Decimal]] = mapped_column() - reverse_data: Mapped[Union[Decimal, float]] = mapped_column() + data: Mapped[Union[float, Decimal]] + reverse_data: Mapped[Union[Decimal, float]] optional_data: Mapped[Optional[Union[float, Decimal]]] = ( mapped_column() @@ -1773,6 +1949,9 @@ class User(Base): mapped_column() ) + refer_union: Mapped[UnionType] + refer_union_optional: Mapped[Optional[UnionType]] + float_data: Mapped[float] = mapped_column() decimal_data: Mapped[Decimal] = mapped_column() @@ -1788,65 +1967,54 @@ class User(Base): mapped_column() ) - if compat.py312: - MyTypeAlias = TypeAliasType("MyTypeAlias", float | Decimal) - pep695_data: Mapped[MyTypeAlias] = mapped_column() - - is_(User.__table__.c.data.type, our_type) - is_false(User.__table__.c.data.nullable) - is_(User.__table__.c.reverse_data.type, our_type) - is_(User.__table__.c.optional_data.type, our_type) - is_true(User.__table__.c.optional_data.nullable) + info = [ + ("data", False), + ("reverse_data", False), + ("optional_data", True), + ("reverse_optional_data", True), + ("reverse_u_optional_data", True), + ("refer_union", "null" in union.name), + ("refer_union_optional", True), + ] + if compat.py310: + info += [ + ("pep604_data", False), + ("pep604_reverse", False), + ("pep604_optional", True), + ("pep604_data_fwd", False), + ("pep604_reverse_fwd", False), + ("pep604_optional_fwd", True), + ] - is_(User.__table__.c.reverse_optional_data.type, our_type) - is_(User.__table__.c.reverse_u_optional_data.type, our_type) - is_true(User.__table__.c.reverse_optional_data.nullable) - is_true(User.__table__.c.reverse_u_optional_data.nullable) + for name, nullable in info: + col = User.__table__.c[name] + is_(col.type, our_type, name) + is_(col.nullable, nullable, name) is_true(isinstance(User.__table__.c.float_data.type, Float)) + ne_(User.__table__.c.float_data.type, our_type) - is_not(User.__table__.c.decimal_data.type, our_type) + is_true(isinstance(User.__table__.c.decimal_data.type, Numeric)) + ne_(User.__table__.c.decimal_data.type, our_type) - if compat.py310: - for suffix in ("", "_fwd"): - data_col = User.__table__.c[f"pep604_data{suffix}"] - reverse_col = User.__table__.c[f"pep604_reverse{suffix}"] - optional_col = User.__table__.c[f"pep604_optional{suffix}"] - is_(data_col.type, our_type) - is_false(data_col.nullable) - is_(reverse_col.type, our_type) - is_false(reverse_col.nullable) - is_(optional_col.type, our_type) - is_true(optional_col.nullable) - - if compat.py312: - is_(User.__table__.c.pep695_data.type, our_type) - - @testing.variation("union", ["union", "pep604"]) + @testing.variation( + "union", + [ + "union", + ("pep604", requires.python310), + ("pep695", requires.python312), + ], + ) def test_optional_in_annotation_map(self, union): - """SQLAlchemy's behaviour is clear: an optional type means the column - is inferred as nullable. Some types which a user may want to put in the - type annotation map are already optional. JSON is a good example - because without any constraint, the type can be None via JSON null or - SQL NULL. - - By permitting optional types in the type annotation map, everything - just works, and mapped_column(nullable=False) is available if desired. - - See issue #11370 - """ + """See issue #11370""" class Base(DeclarativeBase): if union.union: - type_annotation_map = { - _Json: JSON, - } + type_annotation_map = {_Json: JSON} elif union.pep604: - if not compat.py310: - skip_test("Requires Python 3.10+") - type_annotation_map = { - _JsonPep604: JSON, - } + type_annotation_map = {_JsonPep604: JSON} + elif union.pep695: + type_annotation_map = {_JsonPep695: JSON} # noqa: F821 else: union.fail() @@ -1858,10 +2026,13 @@ class A(Base): json1: Mapped[_Json] json2: Mapped[_Json] = mapped_column(nullable=False) elif union.pep604: - if not compat.py310: - skip_test("Requires Python 3.10+") json1: Mapped[_JsonPep604] json2: Mapped[_JsonPep604] = mapped_column(nullable=False) + elif union.pep695: + json1: Mapped[_JsonPep695] # noqa: F821 + json2: Mapped[_JsonPep695] = mapped_column( # noqa: F821 + nullable=False + ) else: union.fail() diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 5026e676a76..f1970f2183b 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -87,8 +87,9 @@ from sqlalchemy.testing import is_false from sqlalchemy.testing import is_not from sqlalchemy.testing import is_true -from sqlalchemy.testing import skip_test +from sqlalchemy.testing import requires from sqlalchemy.testing import Variation +from sqlalchemy.testing.assertions import ne_ from sqlalchemy.testing.fixtures import fixture_session from sqlalchemy.util import compat from sqlalchemy.util.typing import Annotated @@ -109,11 +110,6 @@ class _SomeDict2(TypedDict): _StrPep695: TypeAlias = str _UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] -_Literal695: TypeAlias = Literal["to-do", "in-progress", "done"] -_Recursive695_0: TypeAlias = _Literal695 -_Recursive695_1: TypeAlias = _Recursive695_0 -_Recursive695_2: TypeAlias = _Recursive695_1 - if compat.py38: _TypingLiteral = typing.Literal["a", "b"] _TypingExtensionsLiteral = typing_extensions.Literal["a", "b"] @@ -138,16 +134,16 @@ class _SomeDict2(TypedDict): type _StrPep695 = str type strtypalias_keyword = Annotated[str, mapped_column(info={"hi": "there"})] - -strtypalias_tat: typing.TypeAliasType = Annotated[ +type strtypalias_keyword_nested = int | Annotated[ + str, mapped_column(info={"hi": "there"})] +strtypalias_ta: typing.TypeAlias = Annotated[ str, mapped_column(info={"hi": "there"})] - strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] type _Literal695 = Literal["to-do", "in-progress", "done"] -type _Recursive695_0 = _Literal695 -type _Recursive695_1 = _Recursive695_0 -type _Recursive695_2 = _Recursive695_1 +type _RecursiveLiteral695 = _Literal695 + +type _JsonPep695 = _JsonPep604 """, globals(), ) @@ -847,6 +843,84 @@ class Test(decl_base): eq_(Test.__table__.c.data.type.length, 30) is_(Test.__table__.c.structure.type._type_affinity, JSON) + @testing.variation( + "option", + [ + "plain", + "union", + "union_604", + "union_null", + "union_null_604", + "optional", + "optional_union", + "optional_union_604", + ], + ) + @testing.variation("in_map", ["yes", "no", "value"]) + @testing.requires.python312 + def test_pep695_behavior(self, decl_base, in_map, option): + """Issue #11955""" + # anno only: global tat + + if option.plain: + tat = TypeAliasType("tat", str) + elif option.union: + tat = TypeAliasType("tat", Union[str, int]) + elif option.union_604: + tat = TypeAliasType("tat", str | int) + elif option.union_null: + tat = TypeAliasType("tat", Union[str, int, None]) + elif option.union_null_604: + tat = TypeAliasType("tat", str | int | None) + elif option.optional: + tat = TypeAliasType("tat", Optional[str]) + elif option.optional_union: + tat = TypeAliasType("tat", Optional[Union[str, int]]) + elif option.optional_union_604: + tat = TypeAliasType("tat", Optional[str | int]) + else: + option.fail() + + if in_map.yes: + decl_base.registry.update_type_annotation_map({tat: String(99)}) + elif in_map.value: + decl_base.registry.update_type_annotation_map( + {tat.__value__: String(99)} + ) + + def declare(): + class Test(decl_base): + __tablename__ = "test" + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[tat] + + return Test.__table__.c.data + + if in_map.yes: + col = declare() + length = 99 + elif in_map.value or option.optional or option.plain: + with expect_deprecated( + "Matching the provided TypeAliasType 'tat' on its " + "resolved value without matching it in the " + "type_annotation_map is deprecated; add this type to the " + "type_annotation_map to allow it to match explicitly.", + ): + col = declare() + length = 99 if in_map.value else None + else: + with expect_raises_message( + exc.ArgumentError, + "Could not locate SQLAlchemy Core type for Python type", + ): + declare() + return + + is_true(isinstance(col.type, String)) + eq_(col.type.length, length) + nullable = "null" in option.name or "optional" in option.name + eq_(col.nullable, nullable) + @testing.requires.python312 def test_pep695_typealias_as_typemap_keys( self, decl_base: Type[DeclarativeBase] @@ -867,12 +941,23 @@ class Test(decl_base): eq_(Test.__table__.c.data.type.length, 30) is_(Test.__table__.c.structure.type._type_affinity, JSON) - @testing.variation("alias_type", ["none", "typekeyword", "typealiastype"]) + @testing.variation( + "alias_type", + ["none", "typekeyword", "typealias", "typekeyword_nested"], + ) @testing.requires.python312 def test_extract_pep593_from_pep695( self, decl_base: Type[DeclarativeBase], alias_type ): """test #11130""" + if alias_type.typekeyword: + decl_base.registry.update_type_annotation_map( + {strtypalias_keyword: VARCHAR(33)} # noqa: F821 + ) + if alias_type.typekeyword_nested: + decl_base.registry.update_type_annotation_map( + {strtypalias_keyword_nested: VARCHAR(42)} # noqa: F821 + ) class MyClass(decl_base): __tablename__ = "my_table" @@ -881,33 +966,96 @@ class MyClass(decl_base): if alias_type.typekeyword: data_one: Mapped[strtypalias_keyword] # noqa: F821 - elif alias_type.typealiastype: - data_one: Mapped[strtypalias_tat] # noqa: F821 + elif alias_type.typealias: + data_one: Mapped[strtypalias_ta] # noqa: F821 elif alias_type.none: data_one: Mapped[strtypalias_plain] # noqa: F821 + elif alias_type.typekeyword_nested: + data_one: Mapped[strtypalias_keyword_nested] # noqa: F821 else: alias_type.fail() table = MyClass.__table__ assert table is not None - eq_(MyClass.data_one.expression.info, {"hi": "there"}) + if alias_type.typekeyword_nested: + # a nested annotation is not supported + eq_(MyClass.data_one.expression.info, {}) + else: + eq_(MyClass.data_one.expression.info, {"hi": "there"}) + if alias_type.typekeyword: + eq_(MyClass.data_one.type.length, 33) + elif alias_type.typekeyword_nested: + eq_(MyClass.data_one.type.length, 42) + else: + eq_(MyClass.data_one.type.length, None) + + @testing.variation("type_", ["literal", "recursive", "not_literal"]) + @testing.combinations(True, False, argnames="in_map") @testing.requires.python312 - def test_pep695_literal_defaults_to_enum(self, decl_base): + def test_pep695_literal_defaults_to_enum(self, decl_base, type_, in_map): """test #11305.""" - class Foo(decl_base): - __tablename__ = "footable" + def declare(): + class Foo(decl_base): + __tablename__ = "footable" - id: Mapped[int] = mapped_column(primary_key=True) - status: Mapped[_Literal695] - r2: Mapped[_Recursive695_2] + id: Mapped[int] = mapped_column(primary_key=True) + if type_.recursive: + status: Mapped[_RecursiveLiteral695] # noqa: F821 + elif type_.literal: + status: Mapped[_Literal695] # noqa: F821 + elif type_.not_literal: + status: Mapped[_StrPep695] # noqa: F821 + else: + type_.fail() + + return Foo - for col in (Foo.__table__.c.status, Foo.__table__.c.r2): + if in_map: + decl_base.registry.update_type_annotation_map( + { + _Literal695: Enum(enum.Enum), # noqa: F821 + _RecursiveLiteral695: Enum(enum.Enum), # noqa: F821 + _StrPep695: Enum(enum.Enum), # noqa: F821 + } + ) + if type_.recursive: + with expect_deprecated( + "Mapping recursive TypeAliasType '.+' that resolve to " + "literal to generate an Enum is deprecated. SQLAlchemy " + "2.1 will not support this use case. Please avoid using " + "recursing TypeAliasType", + ): + Foo = declare() + elif type_.literal: + Foo = declare() + else: + with expect_raises_message( + exc.ArgumentError, + "Can't associate TypeAliasType '.+' to an Enum " + "since it's not a direct alias of a Literal. Only " + "aliases in this form `type my_alias = Literal.'a', " + "'b'.` are supported when generating Enums.", + ): + declare() + return + else: + with expect_deprecated( + "Matching the provided TypeAliasType '.*' on its " + "resolved value without matching it in the " + "type_annotation_map is deprecated; add this type to the " + "type_annotation_map to allow it to match explicitly.", + ): + Foo = declare() + col = Foo.__table__.c.status + if in_map and not type_.not_literal: is_true(isinstance(col.type, Enum)) eq_(col.type.enums, ["to-do", "in-progress", "done"]) is_(col.type.native_enum, False) + else: + is_true(isinstance(col.type, String)) @testing.requires.python38 def test_typing_literal_identity(self, decl_base): @@ -1224,6 +1372,33 @@ class MyClass(decl_base): eq_(MyClass.__table__.c.data_four.type.length, 150) is_false(MyClass.__table__.c.data_four.nullable) + def test_newtype_missing_from_map(self, decl_base): + # anno only: global str50 + + str50 = NewType("str50", str) + + if compat.py310: + text = ".*str50" + else: + # NewTypes before 3.10 had a very bad repr + # .new_type at 0x...> + text = ".*NewType.*" + + with expect_deprecated( + f"Matching the provided NewType '{text}' on its " + "resolved value without matching it in the " + "type_annotation_map is deprecated; add this type to the " + "type_annotation_map to allow it to match explicitly.", + ): + + class MyClass(decl_base): + __tablename__ = "my_table" + + id: Mapped[int] = mapped_column(primary_key=True) + data_one: Mapped[str50] + + is_true(isinstance(MyClass.data_one.type, String)) + def test_extract_base_type_from_pep593( self, decl_base: Type[DeclarativeBase] ): @@ -1715,39 +1890,40 @@ class Element(decl_base): else: is_(getattr(Element.__table__.c.data, paramname), override_value) - @testing.variation("union", ["union", "pep604"]) - @testing.variation("typealias", ["legacy", "pep695"]) - def test_unions(self, union, typealias): + @testing.variation( + "union", + [ + "union", + ("pep604", requires.python310), + "union_null", + ("pep604_null", requires.python310), + ], + ) + def test_unions(self, union): + # anno only: global UnionType our_type = Numeric(10, 2) if union.union: UnionType = Union[float, Decimal] + elif union.union_null: + UnionType = Union[float, Decimal, None] elif union.pep604: - if not compat.py310: - skip_test("Required Python 3.10") UnionType = float | Decimal + elif union.pep604_null: + UnionType = float | Decimal | None else: union.fail() - if typealias.legacy: - UnionTypeAlias = UnionType - elif typealias.pep695: - # same as type UnionTypeAlias = UnionType - UnionTypeAlias = TypeAliasType("UnionTypeAlias", UnionType) - else: - typealias.fail() - class Base(DeclarativeBase): - type_annotation_map = {UnionTypeAlias: our_type} + type_annotation_map = {UnionType: our_type} class User(Base): __tablename__ = "users" - __table__: Table id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[Union[float, Decimal]] = mapped_column() - reverse_data: Mapped[Union[Decimal, float]] = mapped_column() + data: Mapped[Union[float, Decimal]] + reverse_data: Mapped[Union[Decimal, float]] optional_data: Mapped[Optional[Union[float, Decimal]]] = ( mapped_column() @@ -1764,6 +1940,9 @@ class User(Base): mapped_column() ) + refer_union: Mapped[UnionType] + refer_union_optional: Mapped[Optional[UnionType]] + float_data: Mapped[float] = mapped_column() decimal_data: Mapped[Decimal] = mapped_column() @@ -1779,65 +1958,54 @@ class User(Base): mapped_column() ) - if compat.py312: - MyTypeAlias = TypeAliasType("MyTypeAlias", float | Decimal) - pep695_data: Mapped[MyTypeAlias] = mapped_column() - - is_(User.__table__.c.data.type, our_type) - is_false(User.__table__.c.data.nullable) - is_(User.__table__.c.reverse_data.type, our_type) - is_(User.__table__.c.optional_data.type, our_type) - is_true(User.__table__.c.optional_data.nullable) + info = [ + ("data", False), + ("reverse_data", False), + ("optional_data", True), + ("reverse_optional_data", True), + ("reverse_u_optional_data", True), + ("refer_union", "null" in union.name), + ("refer_union_optional", True), + ] + if compat.py310: + info += [ + ("pep604_data", False), + ("pep604_reverse", False), + ("pep604_optional", True), + ("pep604_data_fwd", False), + ("pep604_reverse_fwd", False), + ("pep604_optional_fwd", True), + ] - is_(User.__table__.c.reverse_optional_data.type, our_type) - is_(User.__table__.c.reverse_u_optional_data.type, our_type) - is_true(User.__table__.c.reverse_optional_data.nullable) - is_true(User.__table__.c.reverse_u_optional_data.nullable) + for name, nullable in info: + col = User.__table__.c[name] + is_(col.type, our_type, name) + is_(col.nullable, nullable, name) is_true(isinstance(User.__table__.c.float_data.type, Float)) + ne_(User.__table__.c.float_data.type, our_type) - is_not(User.__table__.c.decimal_data.type, our_type) + is_true(isinstance(User.__table__.c.decimal_data.type, Numeric)) + ne_(User.__table__.c.decimal_data.type, our_type) - if compat.py310: - for suffix in ("", "_fwd"): - data_col = User.__table__.c[f"pep604_data{suffix}"] - reverse_col = User.__table__.c[f"pep604_reverse{suffix}"] - optional_col = User.__table__.c[f"pep604_optional{suffix}"] - is_(data_col.type, our_type) - is_false(data_col.nullable) - is_(reverse_col.type, our_type) - is_false(reverse_col.nullable) - is_(optional_col.type, our_type) - is_true(optional_col.nullable) - - if compat.py312: - is_(User.__table__.c.pep695_data.type, our_type) - - @testing.variation("union", ["union", "pep604"]) + @testing.variation( + "union", + [ + "union", + ("pep604", requires.python310), + ("pep695", requires.python312), + ], + ) def test_optional_in_annotation_map(self, union): - """SQLAlchemy's behaviour is clear: an optional type means the column - is inferred as nullable. Some types which a user may want to put in the - type annotation map are already optional. JSON is a good example - because without any constraint, the type can be None via JSON null or - SQL NULL. - - By permitting optional types in the type annotation map, everything - just works, and mapped_column(nullable=False) is available if desired. - - See issue #11370 - """ + """See issue #11370""" class Base(DeclarativeBase): if union.union: - type_annotation_map = { - _Json: JSON, - } + type_annotation_map = {_Json: JSON} elif union.pep604: - if not compat.py310: - skip_test("Requires Python 3.10+") - type_annotation_map = { - _JsonPep604: JSON, - } + type_annotation_map = {_JsonPep604: JSON} + elif union.pep695: + type_annotation_map = {_JsonPep695: JSON} # noqa: F821 else: union.fail() @@ -1849,10 +2017,13 @@ class A(Base): json1: Mapped[_Json] json2: Mapped[_Json] = mapped_column(nullable=False) elif union.pep604: - if not compat.py310: - skip_test("Requires Python 3.10+") json1: Mapped[_JsonPep604] json2: Mapped[_JsonPep604] = mapped_column(nullable=False) + elif union.pep695: + json1: Mapped[_JsonPep695] # noqa: F821 + json2: Mapped[_JsonPep695] = mapped_column( # noqa: F821 + nullable=False + ) else: union.fail() diff --git a/tools/format_docs_code.py b/tools/format_docs_code.py index 3a06ac9f273..a3b6965c862 100644 --- a/tools/format_docs_code.py +++ b/tools/format_docs_code.py @@ -12,6 +12,7 @@ from argparse import ArgumentParser from argparse import RawDescriptionHelpFormatter from collections.abc import Iterator +import dataclasses from functools import partial from itertools import chain from pathlib import Path @@ -33,6 +34,8 @@ re.compile(r"build"), ) +CUSTOM_TARGET_VERSIONS = {"declarative_tables.rst": "PY312"} + class BlockLine(NamedTuple): line: str @@ -66,6 +69,12 @@ def _format_block( code = "\n".join(l.code for l in input_block) mode = PYTHON_BLACK_MODE if is_python_file else RST_BLACK_MODE + custom_target = CUSTOM_TARGET_VERSIONS.get(Path(file).name) + if custom_target: + mode = dataclasses.replace( + mode, target_versions={TargetVersion[custom_target]} + ) + try: formatted = format_str(code, mode=mode) except Exception as e: From 581e02c5eed659dc1b5894bbfa31b2ea6e3bdb18 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 22:14:35 +0100 Subject: [PATCH 394/544] Bump pypa/cibuildwheel from 2.21.3 to 2.22.0 (#12129) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.21.3 to 2.22.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.21.3...v2.22.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cherry picked from commit aaff39eede26fde1e297035021fb596cc6fa8df9) --- .github/workflows/create-wheels.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 677ac32881a..b8e6adffeb1 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -74,7 +74,7 @@ jobs: - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} - uses: pypa/cibuildwheel@v2.21.3 + uses: pypa/cibuildwheel@v2.22.0 env: CIBW_ARCHS_LINUX: ${{ matrix.linux_archs }} CIBW_BUILD: ${{ matrix.python }} From 2a386a248e6bccb9a23cac0933b7a2fd18a7926d Mon Sep 17 00:00:00 2001 From: Jason Pebble <136021304+Pebble94464@users.noreply.github.com> Date: Thu, 12 Dec 2024 21:14:59 +0000 Subject: [PATCH 395/544] Added sqlalchemy-hsqldb to list of external dialects (#12175) (cherry picked from commit 4ed7095154d3b457ef39208c716326dbc007906b) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index d0710ef346e..9f18cbba22e 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -100,6 +100,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Greenplum | sqlalchemy-greenplum_ | +------------------------------------------------+---------------------------------------+ +| HyperSQL (hsqldb) | sqlalchemy-hsqldb_ | ++------------------------------------------------+---------------------------------------+ | IBM DB2 and Informix | ibm-db-sa_ | +------------------------------------------------+---------------------------------------+ | IBM Netezza Performance Server [1]_ | nzalchemy_ | @@ -171,6 +173,7 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _impyla: https://pypi.org/project/impyla/ .. _databend-sqlalchemy: https://github.com/datafuselabs/databend-sqlalchemy .. _sqlalchemy-greenplum: https://github.com/PlaidCloud/sqlalchemy-greenplum +.. _sqlalchemy-hsqldb: https://pypi.org/project/sqlalchemy-hsqldb/ .. _databricks: https://docs.databricks.com/en/dev-tools/sqlalchemy.html .. _clickhouse-sqlalchemy: https://pypi.org/project/clickhouse-sqlalchemy/ .. _sqlalchemy-kinetica: https://github.com/kineticadb/sqlalchemy-kinetica/ From 4b0a8495e956fb6cf205f9a20ba699c7154a57ca Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 12 Dec 2024 23:15:17 +0100 Subject: [PATCH 396/544] chore: update black surprisingly no change was done to the code Change-Id: I50af32fc2a172fa4aee52939fcec53d5b142ceed (cherry picked from commit f1335227c0f5da63f9a4f9179bdb8ef6916bc758) --- .pre-commit-config.yaml | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d523c0499af..1d58505b79f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/python/black - rev: 24.1.1 + rev: 24.10.0 hooks: - id: black diff --git a/tox.ini b/tox.ini index d872bbc8fe8..62d29be43a9 100644 --- a/tox.ini +++ b/tox.ini @@ -251,7 +251,7 @@ deps= # in case it requires a version pin pydocstyle pygments - black==24.1.1 + black==24.10.0 slotscheck>=0.17.0 # required by generate_tuple_map_overloads From 5c711ddf478f98dee728068e92520601e7d35401 Mon Sep 17 00:00:00 2001 From: Pablo Nicolas Estevez Date: Mon, 9 Dec 2024 14:44:44 -0500 Subject: [PATCH 397/544] add delete limit to mysql; ensure int for update/delete limits MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Added support for the ``LIMIT`` clause with ``DELETE`` for the MySQL and MariaDB dialects, to complement the already present option for ``UPDATE``. The :meth:`.delete.with_dialect_options` method of the `:func:`.delete` construct accepts parameters for ``mysql_limit`` and ``mariadb_limit``, allowing users to specify a limit on the number of rows deleted. Pull request courtesy of Pablo Nicolás Estevez. Added logic to ensure that the ``mysql_limit`` and ``mariadb_limit`` parameters of :meth:`.update.with_dialect_options` and :meth:`.delete.with_dialect_options` when compiled to string will only compile if the parameter is passed as an integer; a ``ValueError`` is raised otherwise. corrected mysql documentation for update/delete options which must be specified using the ``with_dialect_options()`` method. Fixes: #11764 Closes: #12146 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12146 Pull-request-sha: e34708374c67e016cda88919109fec5e6462eced Change-Id: I8681ddabaa192b672c7a9b9981c4fe9e4bdc8d03 (cherry picked from commit 134ad3bbdc4bcbee13acc043be0a98cc314fcaec) --- doc/build/changelog/unreleased_20/11764.rst | 20 ++++++++++ lib/sqlalchemy/dialects/mysql/base.py | 21 ++++++++-- lib/sqlalchemy/orm/query.py | 30 +++++++++++--- lib/sqlalchemy/sql/compiler.py | 8 ++++ test/dialect/mysql/test_compiler.py | 44 +++++++++++++++++++++ test/orm/dml/test_update_delete_where.py | 23 ++++++++++- 6 files changed, 137 insertions(+), 9 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11764.rst diff --git a/doc/build/changelog/unreleased_20/11764.rst b/doc/build/changelog/unreleased_20/11764.rst new file mode 100644 index 00000000000..499852b6d09 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11764.rst @@ -0,0 +1,20 @@ +.. change:: + :tags: usecase, mysql, mariadb + :tickets: 11764 + + Added support for the ``LIMIT`` clause with ``DELETE`` for the MySQL and + MariaDB dialects, to complement the already present option for + ``UPDATE``. The :meth:`.delete.with_dialect_options` method of the + `:func:`.delete` construct accepts parameters for ``mysql_limit`` and + ``mariadb_limit``, allowing users to specify a limit on the number of rows + deleted. Pull request courtesy of Pablo Nicolás Estevez. + + +.. change:: + :tags: bug, mysql, mariadb + + Added logic to ensure that the ``mysql_limit`` and ``mariadb_limit`` + parameters of :meth:`.update.with_dialect_options` and + :meth:`.delete.with_dialect_options` when compiled to string will only + compile if the parameter is passed as an integer; a ``ValueError`` is + raised otherwise. diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 935550fbd09..6becac7545d 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -488,7 +488,14 @@ def connect(dbapi_connection, connection_record): * UPDATE with LIMIT:: - update(..., mysql_limit=10, mariadb_limit=10) + update(...).with_dialect_options(mysql_limit=10, mariadb_limit=10) + +* DELETE + with LIMIT:: + + delete(...).with_dialect_options(mysql_limit=10, mariadb_limit=10) + + .. versionadded:: 2.0.37 Added delete with limit * optimizer hints, use :meth:`_expression.Select.prefix_with` and :meth:`_query.Query.prefix_with`:: @@ -1711,8 +1718,15 @@ def limit_clause(self, select, **kw): def update_limit_clause(self, update_stmt): limit = update_stmt.kwargs.get("%s_limit" % self.dialect.name, None) - if limit: - return "LIMIT %s" % limit + if limit is not None: + return f"LIMIT {int(limit)}" + else: + return None + + def delete_limit_clause(self, delete_stmt): + limit = delete_stmt.kwargs.get("%s_limit" % self.dialect.name, None) + if limit is not None: + return f"LIMIT {int(limit)}" else: return None @@ -2536,6 +2550,7 @@ class MySQLDialect(default.DefaultDialect): construct_arguments = [ (sa_schema.Table, {"*": None}), (sql.Update, {"limit": None}), + (sql.Delete, {"limit": None}), (sa_schema.PrimaryKeyConstraint, {"using": None}), ( sa_schema.Index, diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 59cf2f54907..6a262b43ee3 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -3143,7 +3143,9 @@ def count(self) -> int: ) def delete( - self, synchronize_session: SynchronizeSessionArgument = "auto" + self, + synchronize_session: SynchronizeSessionArgument = "auto", + delete_args: Optional[Dict[Any, Any]] = None, ) -> int: r"""Perform a DELETE with an arbitrary WHERE clause. @@ -3168,6 +3170,13 @@ def delete( :ref:`orm_expression_update_delete` for a discussion of these strategies. + :param delete_args: Optional dictionary, if present will be passed + to the underlying :func:`_expression.delete` construct as the ``**kw`` + for the object. May be used to pass dialect-specific arguments such + as ``mysql_limit``. + + .. versionadded:: 2.0.37 + :return: the count of rows matched as returned by the database's "row count" feature. @@ -3177,7 +3186,7 @@ def delete( """ # noqa: E501 - bulk_del = BulkDelete(self) + bulk_del = BulkDelete(self, delete_args) if self.dispatch.before_compile_delete: for fn in self.dispatch.before_compile_delete: new_query = fn(bulk_del.query, bulk_del) @@ -3187,6 +3196,10 @@ def delete( self = bulk_del.query delete_ = sql.delete(*self._raw_columns) # type: ignore + + if delete_args: + delete_ = delete_.with_dialect_options(**delete_args) + delete_._where_criteria = self._where_criteria result: CursorResult[Any] = self.session.execute( delete_, @@ -3242,9 +3255,8 @@ def update( strategies. :param update_args: Optional dictionary, if present will be passed - to the underlying :func:`_expression.update` - construct as the ``**kw`` for - the object. May be used to pass dialect-specific arguments such + to the underlying :func:`_expression.update` construct as the ``**kw`` + for the object. May be used to pass dialect-specific arguments such as ``mysql_limit``, as well as other special arguments such as :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`. @@ -3419,6 +3431,14 @@ def __init__( class BulkDelete(BulkUD): """BulkUD which handles DELETEs.""" + def __init__( + self, + query: Query[Any], + delete_kwargs: Optional[Dict[Any, Any]], + ): + super().__init__(query) + self.delete_kwargs = delete_kwargs + class RowReturningQuery(Query[Row[_TP]]): if TYPE_CHECKING: diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 46110eeae7f..0551c060055 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -6114,6 +6114,10 @@ def update_limit_clause(self, update_stmt): """Provide a hook for MySQL to add LIMIT to the UPDATE""" return None + def delete_limit_clause(self, delete_stmt): + """Provide a hook for MySQL to add LIMIT to the DELETE""" + return None + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): """Provide a hook to override the initial table clause in an UPDATE statement. @@ -6406,6 +6410,10 @@ def visit_delete(self, delete_stmt, visiting_cte=None, **kw): if t: text += " WHERE " + t + limit_clause = self.delete_limit_clause(delete_stmt) + if limit_clause: + text += " " + limit_clause + if ( self.implicit_returning or delete_stmt._returning ) and not self.returning_precedes_values: diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index f0dcb583884..59d604eace1 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -55,13 +55,16 @@ from sqlalchemy.dialects.mysql import insert from sqlalchemy.dialects.mysql import match from sqlalchemy.sql import column +from sqlalchemy.sql import delete from sqlalchemy.sql import table +from sqlalchemy.sql import update from sqlalchemy.sql.expression import bindparam from sqlalchemy.sql.expression import literal_column from sqlalchemy.testing import assert_raises_message from sqlalchemy.testing import AssertsCompiledSQL from sqlalchemy.testing import eq_ from sqlalchemy.testing import eq_ignore_whitespace +from sqlalchemy.testing import expect_raises from sqlalchemy.testing import expect_warnings from sqlalchemy.testing import fixtures from sqlalchemy.testing import mock @@ -724,6 +727,14 @@ def test_update_limit(self): .with_dialect_options(mysql_limit=5), "UPDATE t SET col1=%s LIMIT 5", ) + + # does not make sense but we want this to compile + self.assert_compile( + t.update() + .values({"col1": 123}) + .with_dialect_options(mysql_limit=0), + "UPDATE t SET col1=%s LIMIT 0", + ) self.assert_compile( t.update() .values({"col1": 123}) @@ -738,6 +749,39 @@ def test_update_limit(self): "UPDATE t SET col1=%s WHERE t.col2 = %s LIMIT 1", ) + def test_delete_limit(self): + t = sql.table("t", sql.column("col1"), sql.column("col2")) + + self.assert_compile(t.delete(), "DELETE FROM t") + self.assert_compile( + t.delete().with_dialect_options(mysql_limit=5), + "DELETE FROM t LIMIT 5", + ) + # does not make sense but we want this to compile + self.assert_compile( + t.delete().with_dialect_options(mysql_limit=0), + "DELETE FROM t LIMIT 0", + ) + self.assert_compile( + t.delete().with_dialect_options(mysql_limit=None), + "DELETE FROM t", + ) + self.assert_compile( + t.delete() + .where(t.c.col2 == 456) + .with_dialect_options(mysql_limit=1), + "DELETE FROM t WHERE t.col2 = %s LIMIT 1", + ) + + @testing.combinations((update,), (delete,)) + def test_update_delete_limit_int_only(self, crud_fn): + t = sql.table("t", sql.column("col1"), sql.column("col2")) + + with expect_raises(ValueError): + crud_fn(t).with_dialect_options(mysql_limit="not an int").compile( + dialect=mysql.dialect() + ) + def test_utc_timestamp(self): self.assert_compile(func.utc_timestamp(), "utc_timestamp()") diff --git a/test/orm/dml/test_update_delete_where.py b/test/orm/dml/test_update_delete_where.py index da8efa44fa4..7d06a8618cd 100644 --- a/test/orm/dml/test_update_delete_where.py +++ b/test/orm/dml/test_update_delete_where.py @@ -2586,7 +2586,7 @@ def test_update_from_multitable_same_names(self): ) -class ExpressionUpdateTest(fixtures.MappedTest): +class ExpressionUpdateDeleteTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table( @@ -2652,6 +2652,27 @@ def do_orm_execute(bulk_ud): eq_(update_stmt.dialect_kwargs, update_args) + def test_delete_args(self): + Data = self.classes.Data + session = fixture_session() + delete_args = {"mysql_limit": 1} + + m1 = testing.mock.Mock() + + @event.listens_for(session, "after_bulk_delete") + def do_orm_execute(bulk_ud): + delete_stmt = ( + bulk_ud.result.context.compiled.compile_state.statement + ) + m1(delete_stmt) + + q = session.query(Data) + q.delete(delete_args=delete_args) + + delete_stmt = m1.mock_calls[0][1][0] + + eq_(delete_stmt.dialect_kwargs, delete_args) + class InheritTest(fixtures.DeclarativeMappedTest): run_inserts = "each" From 1f726a3a8819f518e760a544076228dbc2dd169d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 16 Dec 2024 10:58:01 -0500 Subject: [PATCH 398/544] harden HSTORE registration * use the driver_connection when we register on the connection * assert targets passed to register_hstore assert as boolean true; psycopg docs say "if None, register globally" but looking in the source it's actually registering globally if any false-evaluating object is passed. Change-Id: Ie1fd7c96714b7fe76ef964501691fa48352be259 (cherry picked from commit 29569ccfde7247a7e0ed2afe43db53494da62fb2) --- lib/sqlalchemy/dialects/postgresql/hstore.py | 6 ++++++ lib/sqlalchemy/dialects/postgresql/psycopg.py | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index 5a2d451316d..291af36c69b 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -195,6 +195,9 @@ def matrix(self): comparator_factory = Comparator def bind_processor(self, dialect): + # note that dialect-specific types like that of psycopg and + # psycopg2 will override this method to allow driver-level conversion + # instead, see _PsycopgHStore def process(value): if isinstance(value, dict): return _serialize_hstore(value) @@ -204,6 +207,9 @@ def process(value): return process def result_processor(self, dialect, coltype): + # note that dialect-specific types like that of psycopg and + # psycopg2 will override this method to allow driver-level conversion + # instead, see _PsycopgHStore def process(value): if value is not None: return _parse_hstore(value) diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index 3c8a5e4c598..b880bc7f41b 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -406,10 +406,12 @@ def initialize(self, connection): # register the adapter for connections made subsequent to # this one + assert self._psycopg_adapters_map register_hstore(info, self._psycopg_adapters_map) # register the adapter for this connection - register_hstore(info, connection.connection) + assert connection.connection + register_hstore(info, connection.connection.driver_connection) @classmethod def import_dbapi(cls): From f7e3f811d4d85b6cc755b8ea0afe2eab54f5f8f5 Mon Sep 17 00:00:00 2001 From: FeeeeK <26704473+FeeeeK@users.noreply.github.com> Date: Sat, 14 Dec 2024 03:03:24 -0500 Subject: [PATCH 399/544] Add missing `SmallInteger` column spec for `asyncpg` Adds missing column spec for `SmallInteger` in `asyncpg` driver Fixes: #12170 Closes: #12171 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12171 Pull-request-sha: 82886d8521cb4e78822d685a864a9af438f6ea6b Change-Id: I2cb15f066de756d4e3f21bcac6af2cf03bd25a1c (cherry picked from commit c5abd84a2c3c7a1f4e733dbee387aae939464f3e) --- doc/build/changelog/unreleased_20/12170.rst | 7 ++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 5 + test/dialect/postgresql/test_dialect.py | 105 +++++++++--------- 3 files changed, 65 insertions(+), 52 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12170.rst diff --git a/doc/build/changelog/unreleased_20/12170.rst b/doc/build/changelog/unreleased_20/12170.rst new file mode 100644 index 00000000000..452181efa37 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12170.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, postgresql + :tickets: 12170 + + Fixed issue where creating a table with a primary column of + :class:`_sql.SmallInteger` and using the asyncpg driver would result in + the type being compiled to ``SERIAL`` rather than ``SMALLSERIAL``. diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 0928a0f71ad..a175e77d657 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -265,6 +265,10 @@ class AsyncpgInteger(sqltypes.Integer): render_bind_cast = True +class AsyncpgSmallInteger(sqltypes.SmallInteger): + render_bind_cast = True + + class AsyncpgBigInteger(sqltypes.BigInteger): render_bind_cast = True @@ -1051,6 +1055,7 @@ class PGDialect_asyncpg(PGDialect): INTERVAL: AsyncPgInterval, sqltypes.Boolean: AsyncpgBoolean, sqltypes.Integer: AsyncpgInteger, + sqltypes.SmallInteger: AsyncpgSmallInteger, sqltypes.BigInteger: AsyncpgBigInteger, sqltypes.Numeric: AsyncpgNumeric, sqltypes.Float: AsyncpgFloat, diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index 3f55c085fb4..892e2abc9be 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -1573,61 +1573,62 @@ def test_numeric_raise(self, connection): stmt = text("select cast('hi' as char) as hi").columns(hi=Numeric) assert_raises(exc.InvalidRequestError, connection.execute, stmt) - @testing.only_on("postgresql+psycopg2") - def test_serial_integer(self): - class BITD(TypeDecorator): - impl = Integer - - cache_ok = True - - def load_dialect_impl(self, dialect): - if dialect.name == "postgresql": - return BigInteger() - else: - return Integer() - - for version, type_, expected in [ - (None, Integer, "SERIAL"), - (None, BigInteger, "BIGSERIAL"), - ((9, 1), SmallInteger, "SMALLINT"), - ((9, 2), SmallInteger, "SMALLSERIAL"), - (None, postgresql.INTEGER, "SERIAL"), - (None, postgresql.BIGINT, "BIGSERIAL"), - ( - None, - Integer().with_variant(BigInteger(), "postgresql"), - "BIGSERIAL", - ), - ( - None, - Integer().with_variant(postgresql.BIGINT, "postgresql"), - "BIGSERIAL", - ), - ( - (9, 2), - Integer().with_variant(SmallInteger, "postgresql"), - "SMALLSERIAL", - ), - (None, BITD(), "BIGSERIAL"), - ]: - m = MetaData() + @testing.combinations( + (None, Integer, "SERIAL"), + (None, BigInteger, "BIGSERIAL"), + ((9, 1), SmallInteger, "SMALLINT"), + ((9, 2), SmallInteger, "SMALLSERIAL"), + (None, SmallInteger, "SMALLSERIAL"), + (None, postgresql.INTEGER, "SERIAL"), + (None, postgresql.BIGINT, "BIGSERIAL"), + ( + None, + Integer().with_variant(BigInteger(), "postgresql"), + "BIGSERIAL", + ), + ( + None, + Integer().with_variant(postgresql.BIGINT, "postgresql"), + "BIGSERIAL", + ), + ( + (9, 2), + Integer().with_variant(SmallInteger, "postgresql"), + "SMALLSERIAL", + ), + (None, "BITD()", "BIGSERIAL"), + argnames="version, type_, expected", + ) + def test_serial_integer(self, version, type_, expected, testing_engine): + if type_ == "BITD()": - t = Table("t", m, Column("c", type_, primary_key=True)) + class BITD(TypeDecorator): + impl = Integer - if version: - dialect = testing.db.dialect.__class__() - dialect._get_server_version_info = mock.Mock( - return_value=version - ) - dialect.initialize(testing.db.connect()) - else: - dialect = testing.db.dialect + cache_ok = True - ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t)) - eq_( - ddl_compiler.get_column_specification(t.c.c), - "c %s NOT NULL" % expected, - ) + def load_dialect_impl(self, dialect): + if dialect.name == "postgresql": + return BigInteger() + else: + return Integer() + + type_ = BITD() + t = Table("t", MetaData(), Column("c", type_, primary_key=True)) + + if version: + engine = testing_engine() + dialect = engine.dialect + dialect._get_server_version_info = mock.Mock(return_value=version) + engine.connect().close() # initialize the dialect + else: + dialect = testing.db.dialect + + ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t)) + eq_( + ddl_compiler.get_column_specification(t.c.c), + "c %s NOT NULL" % expected, + ) @testing.requires.psycopg2_compatibility def test_initial_transaction_state_psycopg2(self): From ca592bb8a096fb197f3ece348273a76dd4d6c9a2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 18 Dec 2024 11:24:58 -0500 Subject: [PATCH 400/544] harden typing / coercion for on conflict/on duplicate key in 2.1 we want these structures to be cacheable, so start by cleaning up types and adding coercions to enforce those types. these will be more locked down in 2.1 as we will need to move bound parameter coercion outside of compilation, but here do some small starts and introduce in 2.0. in one interest of cachability, a "literal_binds" that found its way into SQLite's compiler is replaced with "literal_execute", the difference being that the latter is cacheable. This literal is apparently necessary to suit SQLite's query planner for the "index criteria" portion of the on conflict clause that otherwise can't work with a real bound parameter. Change-Id: I4d66ec1473321616a1707da324a7dfe7a61ec94e (cherry picked from commit 219bcb3a77edd72ef8fc36c8ded921d6fb9a34a5) --- lib/sqlalchemy/dialects/_typing.py | 12 +- lib/sqlalchemy/dialects/mysql/base.py | 2 + lib/sqlalchemy/dialects/mysql/dml.py | 2 + lib/sqlalchemy/dialects/postgresql/base.py | 2 + lib/sqlalchemy/dialects/postgresql/dml.py | 49 ++++- lib/sqlalchemy/dialects/sqlite/base.py | 6 +- lib/sqlalchemy/dialects/sqlite/dml.py | 49 +++-- lib/sqlalchemy/sql/coercions.py | 6 +- lib/sqlalchemy/sql/schema.py | 4 + test/dialect/postgresql/test_compiler.py | 103 ++++++++++ test/dialect/postgresql/test_on_conflict.py | 11 +- test/dialect/test_sqlite.py | 216 ++++++++++++++++---- 12 files changed, 381 insertions(+), 81 deletions(-) diff --git a/lib/sqlalchemy/dialects/_typing.py b/lib/sqlalchemy/dialects/_typing.py index 9ee6e4bca1c..811e125fd5e 100644 --- a/lib/sqlalchemy/dialects/_typing.py +++ b/lib/sqlalchemy/dialects/_typing.py @@ -12,14 +12,16 @@ from typing import Optional from typing import Union -from ..sql._typing import _DDLColumnArgument -from ..sql.elements import DQLDMLClauseElement +from ..sql import roles +from ..sql.schema import Column from ..sql.schema import ColumnCollectionConstraint from ..sql.schema import Index _OnConflictConstraintT = Union[str, ColumnCollectionConstraint, Index, None] -_OnConflictIndexElementsT = Optional[Iterable[_DDLColumnArgument]] -_OnConflictIndexWhereT = Optional[DQLDMLClauseElement] +_OnConflictIndexElementsT = Optional[ + Iterable[Union[Column[Any], str, roles.DDLConstraintColumnRole]] +] +_OnConflictIndexWhereT = Optional[roles.WhereHavingRole] _OnConflictSetT = Optional[Mapping[Any, Any]] -_OnConflictWhereT = Union[DQLDMLClauseElement, str, None] +_OnConflictWhereT = Optional[roles.WhereHavingRole] diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 6becac7545d..ae863f30a64 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1403,6 +1403,8 @@ def visit_on_duplicate_key_update(self, on_duplicate, **kw): for column in (col for col in cols if col.key in on_duplicate.update): val = on_duplicate.update[column.key] + # TODO: this coercion should be up front. we can't cache + # SQL constructs with non-bound literals buried in them if coercions._is_literal(val): val = elements.BindParameter(None, val, type_=column.type) value_text = self.process(val.self_group(), use_schema=False) diff --git a/lib/sqlalchemy/dialects/mysql/dml.py b/lib/sqlalchemy/dialects/mysql/dml.py index d9164317b09..731d1943aa8 100644 --- a/lib/sqlalchemy/dialects/mysql/dml.py +++ b/lib/sqlalchemy/dialects/mysql/dml.py @@ -7,6 +7,7 @@ from __future__ import annotations from typing import Any +from typing import Dict from typing import List from typing import Mapping from typing import Optional @@ -185,6 +186,7 @@ class OnDuplicateClause(ClauseElement): _parameter_ordering: Optional[List[str]] = None + update: Dict[str, Any] stringify_dialect = "mysql" def __init__( diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index cd3ebfd5972..44b8c96a815 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -2085,6 +2085,8 @@ def visit_on_conflict_do_update(self, on_conflict, **kw): else: continue + # TODO: this coercion should be up front. we can't cache + # SQL constructs with non-bound literals buried in them if coercions._is_literal(value): value = elements.BindParameter(None, value, type_=c.type) diff --git a/lib/sqlalchemy/dialects/postgresql/dml.py b/lib/sqlalchemy/dialects/postgresql/dml.py index 4404ecd37bf..1615506c0b2 100644 --- a/lib/sqlalchemy/dialects/postgresql/dml.py +++ b/lib/sqlalchemy/dialects/postgresql/dml.py @@ -7,7 +7,10 @@ from __future__ import annotations from typing import Any +from typing import List from typing import Optional +from typing import Tuple +from typing import Union from . import ext from .._typing import _OnConflictConstraintT @@ -26,7 +29,9 @@ from ...sql.base import ReadOnlyColumnCollection from ...sql.dml import Insert as StandardInsert from ...sql.elements import ClauseElement +from ...sql.elements import ColumnElement from ...sql.elements import KeyedColumnElement +from ...sql.elements import TextClause from ...sql.expression import alias from ...util.typing import Self @@ -153,11 +158,10 @@ def on_conflict_do_update( :paramref:`.Insert.on_conflict_do_update.set_` dictionary. :param where: - Optional argument. If present, can be a literal SQL - string or an acceptable expression for a ``WHERE`` clause - that restricts the rows affected by ``DO UPDATE SET``. Rows - not meeting the ``WHERE`` condition will not be updated - (effectively a ``DO NOTHING`` for those rows). + Optional argument. An expression object representing a ``WHERE`` + clause that restricts the rows affected by ``DO UPDATE SET``. Rows not + meeting the ``WHERE`` condition will not be updated (effectively a + ``DO NOTHING`` for those rows). .. seealso:: @@ -212,8 +216,10 @@ class OnConflictClause(ClauseElement): stringify_dialect = "postgresql" constraint_target: Optional[str] - inferred_target_elements: _OnConflictIndexElementsT - inferred_target_whereclause: _OnConflictIndexWhereT + inferred_target_elements: Optional[List[Union[str, schema.Column[Any]]]] + inferred_target_whereclause: Optional[ + Union[ColumnElement[Any], TextClause] + ] def __init__( self, @@ -254,8 +260,24 @@ def __init__( if index_elements is not None: self.constraint_target = None - self.inferred_target_elements = index_elements - self.inferred_target_whereclause = index_where + self.inferred_target_elements = [ + coercions.expect(roles.DDLConstraintColumnRole, column) + for column in index_elements + ] + + self.inferred_target_whereclause = ( + coercions.expect( + ( + roles.StatementOptionRole + if isinstance(constraint, ext.ExcludeConstraint) + else roles.WhereHavingRole + ), + index_where, + ) + if index_where is not None + else None + ) + elif constraint is None: self.constraint_target = self.inferred_target_elements = ( self.inferred_target_whereclause @@ -269,6 +291,9 @@ class OnConflictDoNothing(OnConflictClause): class OnConflictDoUpdate(OnConflictClause): __visit_name__ = "on_conflict_do_update" + update_values_to_set: List[Tuple[Union[schema.Column[Any], str], Any]] + update_whereclause: Optional[ColumnElement[Any]] + def __init__( self, constraint: _OnConflictConstraintT = None, @@ -307,4 +332,8 @@ def __init__( (coercions.expect(roles.DMLColumnRole, key), value) for key, value in set_.items() ] - self.update_whereclause = where + self.update_whereclause = ( + coercions.expect(roles.WhereHavingRole, where) + if where is not None + else None + ) diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 5ae7ffbf0f3..51b957cf9ac 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1481,9 +1481,7 @@ def visit_not_regexp_match_op_binary(self, binary, operator, **kw): return self._generate_generic_binary(binary, " NOT REGEXP ", **kw) def _on_conflict_target(self, clause, **kw): - if clause.constraint_target is not None: - target_text = "(%s)" % clause.constraint_target - elif clause.inferred_target_elements is not None: + if clause.inferred_target_elements is not None: target_text = "(%s)" % ", ".join( ( self.preparer.quote(c) @@ -1497,7 +1495,7 @@ def _on_conflict_target(self, clause, **kw): clause.inferred_target_whereclause, include_table=False, use_schema=False, - literal_binds=True, + literal_execute=True, ) else: diff --git a/lib/sqlalchemy/dialects/sqlite/dml.py b/lib/sqlalchemy/dialects/sqlite/dml.py index dcf5e4482ee..163a6ed28b2 100644 --- a/lib/sqlalchemy/dialects/sqlite/dml.py +++ b/lib/sqlalchemy/dialects/sqlite/dml.py @@ -7,6 +7,10 @@ from __future__ import annotations from typing import Any +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union from .._typing import _OnConflictIndexElementsT from .._typing import _OnConflictIndexWhereT @@ -15,6 +19,7 @@ from ... import util from ...sql import coercions from ...sql import roles +from ...sql import schema from ...sql._typing import _DMLTableArgument from ...sql.base import _exclusive_against from ...sql.base import _generative @@ -22,7 +27,9 @@ from ...sql.base import ReadOnlyColumnCollection from ...sql.dml import Insert as StandardInsert from ...sql.elements import ClauseElement +from ...sql.elements import ColumnElement from ...sql.elements import KeyedColumnElement +from ...sql.elements import TextClause from ...sql.expression import alias from ...util.typing import Self @@ -141,11 +148,10 @@ def on_conflict_do_update( :paramref:`.Insert.on_conflict_do_update.set_` dictionary. :param where: - Optional argument. If present, can be a literal SQL - string or an acceptable expression for a ``WHERE`` clause - that restricts the rows affected by ``DO UPDATE SET``. Rows - not meeting the ``WHERE`` condition will not be updated - (effectively a ``DO NOTHING`` for those rows). + Optional argument. An expression object representing a ``WHERE`` + clause that restricts the rows affected by ``DO UPDATE SET``. Rows not + meeting the ``WHERE`` condition will not be updated (effectively a + ``DO NOTHING`` for those rows). """ @@ -184,9 +190,10 @@ def on_conflict_do_nothing( class OnConflictClause(ClauseElement): stringify_dialect = "sqlite" - constraint_target: None - inferred_target_elements: _OnConflictIndexElementsT - inferred_target_whereclause: _OnConflictIndexWhereT + inferred_target_elements: Optional[List[Union[str, schema.Column[Any]]]] + inferred_target_whereclause: Optional[ + Union[ColumnElement[Any], TextClause] + ] def __init__( self, @@ -194,11 +201,20 @@ def __init__( index_where: _OnConflictIndexWhereT = None, ): if index_elements is not None: - self.constraint_target = None - self.inferred_target_elements = index_elements - self.inferred_target_whereclause = index_where + self.inferred_target_elements = [ + coercions.expect(roles.DDLConstraintColumnRole, column) + for column in index_elements + ] + self.inferred_target_whereclause = ( + coercions.expect( + roles.WhereHavingRole, + index_where, + ) + if index_where is not None + else None + ) else: - self.constraint_target = self.inferred_target_elements = ( + self.inferred_target_elements = ( self.inferred_target_whereclause ) = None @@ -210,6 +226,9 @@ class OnConflictDoNothing(OnConflictClause): class OnConflictDoUpdate(OnConflictClause): __visit_name__ = "on_conflict_do_update" + update_values_to_set: List[Tuple[Union[schema.Column[Any], str], Any]] + update_whereclause: Optional[ColumnElement[Any]] + def __init__( self, index_elements: _OnConflictIndexElementsT = None, @@ -237,4 +256,8 @@ def __init__( (coercions.expect(roles.DMLColumnRole, key), value) for key, value in set_.items() ] - self.update_whereclause = where + self.update_whereclause = ( + coercions.expect(roles.WhereHavingRole, where) + if where is not None + else None + ) diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 136fc486463..a5730652055 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -57,9 +57,9 @@ from .elements import ClauseElement from .elements import ColumnClause from .elements import ColumnElement - from .elements import DQLDMLClauseElement from .elements import NamedColumn from .elements import SQLCoreOperations + from .elements import TextClause from .schema import Column from .selectable import _ColumnsClauseElement from .selectable import _JoinTargetProtocol @@ -190,7 +190,7 @@ def expect( role: Type[roles.DDLReferredColumnRole], element: Any, **kw: Any, -) -> Column[Any]: ... +) -> Union[Column[Any], str]: ... @overload @@ -206,7 +206,7 @@ def expect( role: Type[roles.StatementOptionRole], element: Any, **kw: Any, -) -> DQLDMLClauseElement: ... +) -> Union[ColumnElement[Any], TextClause]: ... @overload diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 463c8c31e07..c9b57615110 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -4225,6 +4225,10 @@ def __init__( ] = _gather_expressions if processed_expressions is not None: + + # this is expected to be an empty list + assert not processed_expressions + self._pending_colargs = [] for ( expr, diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index bb2dc653f83..f02b42c0b21 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -62,6 +62,7 @@ from sqlalchemy.sql import table from sqlalchemy.sql import util as sql_util from sqlalchemy.sql.functions import GenericFunction +from sqlalchemy.testing import expect_raises from sqlalchemy.testing import expect_raises_message from sqlalchemy.testing import fixtures from sqlalchemy.testing.assertions import assert_raises @@ -2699,6 +2700,11 @@ def define_tables(cls, metadata): (cls.table_with_metadata.c.description, "&&"), where=cls.table_with_metadata.c.description != "foo", ) + cls.excl_constr_anon_str = ExcludeConstraint( + (cls.table_with_metadata.c.name, "="), + (cls.table_with_metadata.c.description, "&&"), + where="description != 'foo'", + ) cls.goofy_index = Index( "goofy_index", table1.c.name, postgresql_where=table1.c.name > "m" ) @@ -2717,6 +2723,69 @@ def define_tables(cls, metadata): Column("name", String(50), key="name_keyed"), ) + @testing.combinations( + ( + lambda users, stmt: stmt.on_conflict_do_nothing( + index_elements=["id"], index_where=text("name = 'hi'") + ), + "ON CONFLICT (id) WHERE name = 'hi' DO NOTHING", + ), + ( + lambda users, stmt: stmt.on_conflict_do_nothing( + index_elements=[users.c.id], index_where=users.c.name == "hi" + ), + "ON CONFLICT (id) WHERE name = %(name_1)s DO NOTHING", + ), + ( + lambda users, stmt: stmt.on_conflict_do_nothing( + index_elements=["id"], index_where="name = 'hi'" + ), + exc.ArgumentError, + ), + ( + lambda users, stmt: stmt.on_conflict_do_update( + index_elements=[users.c.id], + set_={users.c.name: "there"}, + where=users.c.name == "hi", + ), + "ON CONFLICT (id) DO UPDATE SET name = %(param_1)s " + "WHERE users.name = %(name_1)s", + ), + ( + lambda users, stmt: stmt.on_conflict_do_update( + index_elements=[users.c.id], + set_={users.c.name: "there"}, + where=text("name = 'hi'"), + ), + "ON CONFLICT (id) DO UPDATE SET name = %(param_1)s " + "WHERE name = 'hi'", + ), + ( + lambda users, stmt: stmt.on_conflict_do_update( + index_elements=[users.c.id], + set_={users.c.name: "there"}, + where="name = 'hi'", + ), + exc.ArgumentError, + ), + ) + def test_assorted_arg_coercion(self, case, expected): + stmt = insert(self.tables.users) + + if isinstance(expected, type) and issubclass(expected, Exception): + with expect_raises(expected): + testing.resolve_lambda( + case, stmt=stmt, users=self.tables.users + ), + else: + self.assert_compile( + testing.resolve_lambda( + case, stmt=stmt, users=self.tables.users + ), + f"INSERT INTO users (id, name) VALUES (%(id)s, %(name)s) " + f"{expected}", + ) + @testing.combinations("control", "excluded", "dict") def test_set_excluded(self, scenario): """test #8014, sending all of .excluded to set""" @@ -3110,6 +3179,20 @@ def test_do_update_unnamed_exclude_constraint_target(self): "DO UPDATE SET name = excluded.name", ) + def test_do_update_unnamed_exclude_constraint_string_target(self): + i = insert(self.table1).values(dict(name="foo")) + i = i.on_conflict_do_update( + constraint=self.excl_constr_anon_str, + set_=dict(name=i.excluded.name), + ) + self.assert_compile( + i, + "INSERT INTO mytable (name) VALUES " + "(%(name)s) ON CONFLICT (name, description) " + "WHERE description != 'foo' " + "DO UPDATE SET name = excluded.name", + ) + def test_do_update_add_whereclause(self): i = insert(self.table1).values(dict(name="foo")) i = i.on_conflict_do_update( @@ -3130,6 +3213,26 @@ def test_do_update_add_whereclause(self): "AND mytable.description != %(description_2)s", ) + def test_do_update_str_index_where(self): + i = insert(self.table1).values(dict(name="foo")) + i = i.on_conflict_do_update( + constraint=self.excl_constr_anon_str, + set_=dict(name=i.excluded.name), + where=( + (self.table1.c.name != "brah") + & (self.table1.c.description != "brah") + ), + ) + self.assert_compile( + i, + "INSERT INTO mytable (name) VALUES " + "(%(name)s) ON CONFLICT (name, description) " + "WHERE description != 'foo' " + "DO UPDATE SET name = excluded.name " + "WHERE mytable.name != %(name_1)s " + "AND mytable.description != %(description_1)s", + ) + def test_do_update_add_whereclause_references_excluded(self): i = insert(self.table1).values(dict(name="foo")) i = i.on_conflict_do_update( diff --git a/test/dialect/postgresql/test_on_conflict.py b/test/dialect/postgresql/test_on_conflict.py index a9320f2c503..691f6c39620 100644 --- a/test/dialect/postgresql/test_on_conflict.py +++ b/test/dialect/postgresql/test_on_conflict.py @@ -583,7 +583,10 @@ def test_on_conflict_do_update_exotic_targets_four(self, connection): [(43, "nameunique2", "name2@gmail.com", "not")], ) - def test_on_conflict_do_update_exotic_targets_four_no_pk(self, connection): + @testing.variation("string_index_elements", [True, False]) + def test_on_conflict_do_update_exotic_targets_four_no_pk( + self, connection, string_index_elements + ): users = self.tables.users_xtra self._exotic_targets_fixture(connection) @@ -591,7 +594,11 @@ def test_on_conflict_do_update_exotic_targets_four_no_pk(self, connection): # upsert on target login_email, not id i = insert(users) i = i.on_conflict_do_update( - index_elements=[users.c.login_email], + index_elements=( + ["login_email"] + if string_index_elements + else [users.c.login_email] + ), set_=dict( id=i.excluded.id, name=i.excluded.name, diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 246b9852329..53565504938 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -2949,7 +2949,176 @@ def test_regexp_replace(self): ) -class OnConflictTest(AssertsCompiledSQL, fixtures.TablesTest): +class OnConflictCompileTest(AssertsCompiledSQL): + __dialect__ = "sqlite" + + @testing.combinations( + ( + lambda users, stmt: stmt.on_conflict_do_nothing( + index_elements=["id"], index_where=text("name = 'hi'") + ), + "ON CONFLICT (id) WHERE name = 'hi' DO NOTHING", + ), + ( + lambda users, stmt: stmt.on_conflict_do_nothing( + index_elements=["id"], index_where="name = 'hi'" + ), + exc.ArgumentError, + ), + ( + lambda users, stmt: stmt.on_conflict_do_nothing( + index_elements=[users.c.id], index_where=users.c.name == "hi" + ), + "ON CONFLICT (id) WHERE name = __[POSTCOMPILE_name_1] DO NOTHING", + ), + ( + lambda users, stmt: stmt.on_conflict_do_update( + index_elements=[users.c.id], + set_={users.c.name: "there"}, + where=users.c.name == "hi", + ), + "ON CONFLICT (id) DO UPDATE SET name = ? " "WHERE users.name = ?", + ), + ( + lambda users, stmt: stmt.on_conflict_do_update( + index_elements=[users.c.id], + set_={users.c.name: "there"}, + where=text("name = 'hi'"), + ), + "ON CONFLICT (id) DO UPDATE SET name = ? " "WHERE name = 'hi'", + ), + ( + lambda users, stmt: stmt.on_conflict_do_update( + index_elements=[users.c.id], + set_={users.c.name: "there"}, + where="name = 'hi'", + ), + exc.ArgumentError, + ), + argnames="case,expected", + ) + def test_assorted_arg_coercion(self, users, case, expected): + stmt = insert(users) + + if isinstance(expected, type) and issubclass(expected, Exception): + with expect_raises(expected): + testing.resolve_lambda(case, stmt=stmt, users=users), + else: + self.assert_compile( + testing.resolve_lambda(case, stmt=stmt, users=users), + f"INSERT INTO users (id, name) VALUES (?, ?) {expected}", + ) + + @testing.combinations("control", "excluded", "dict") + def test_set_excluded(self, scenario, users): + """test #8014, sending all of .excluded to set""" + + if scenario == "control": + + stmt = insert(users) + self.assert_compile( + stmt.on_conflict_do_update(set_=stmt.excluded), + "INSERT INTO users (id, name) VALUES (?, ?) ON CONFLICT " + "DO UPDATE SET id = excluded.id, name = excluded.name", + ) + else: + users_w_key = self.tables.users_w_key + + stmt = insert(users_w_key) + + if scenario == "excluded": + self.assert_compile( + stmt.on_conflict_do_update(set_=stmt.excluded), + "INSERT INTO users_w_key (id, name) VALUES (?, ?) " + "ON CONFLICT " + "DO UPDATE SET id = excluded.id, name = excluded.name", + ) + else: + self.assert_compile( + stmt.on_conflict_do_update( + set_={ + "id": stmt.excluded.id, + "name_keyed": stmt.excluded.name_keyed, + } + ), + "INSERT INTO users_w_key (id, name) VALUES (?, ?) " + "ON CONFLICT " + "DO UPDATE SET id = excluded.id, name = excluded.name", + ) + + def test_on_conflict_do_update_exotic_targets_six( + self, connection, users_xtra + ): + users = users_xtra + + unique_partial_index = schema.Index( + "idx_unique_partial_name", + users_xtra.c.name, + users_xtra.c.lets_index_this, + unique=True, + sqlite_where=users_xtra.c.lets_index_this == "unique_name", + ) + + conn = connection + conn.execute( + insert(users), + dict( + id=1, + name="name1", + login_email="mail1@gmail.com", + lets_index_this="unique_name", + ), + ) + i = insert(users) + i = i.on_conflict_do_update( + index_elements=unique_partial_index.columns, + index_where=unique_partial_index.dialect_options["sqlite"][ + "where" + ], + set_=dict( + name=i.excluded.name, login_email=i.excluded.login_email + ), + ) + + # this test illustrates that the index_where clause can't use + # bound parameters, where we see below a literal_execute parameter is + # used (will be sent as literal to the DBAPI). SQLite otherwise + # fails here with "(sqlite3.OperationalError) ON CONFLICT clause does + # not match any PRIMARY KEY or UNIQUE constraint" if sent as a real + # bind parameter. + self.assert_compile( + i, + "INSERT INTO users_xtra (id, name, login_email, lets_index_this) " + "VALUES (?, ?, ?, ?) ON CONFLICT (name, lets_index_this) " + "WHERE lets_index_this = __[POSTCOMPILE_lets_index_this_1] " + "DO UPDATE " + "SET name = excluded.name, login_email = excluded.login_email", + ) + + @testing.fixture + def users(self): + metadata = MetaData() + return Table( + "users", + metadata, + Column("id", Integer, primary_key=True), + Column("name", String(50)), + ) + + @testing.fixture + def users_xtra(self): + metadata = MetaData() + return Table( + "users_xtra", + metadata, + Column("id", Integer, primary_key=True), + Column("name", String(50)), + Column("login_email", String(50)), + Column("lets_index_this", String(50)), + ) + + +class OnConflictTest(fixtures.TablesTest): __only_on__ = ("sqlite >= 3.24.0",) __backend__ = True @@ -3009,49 +3178,8 @@ def process_bind_param(self, value, dialect): ) def test_bad_args(self): - assert_raises( - ValueError, insert(self.tables.users).on_conflict_do_update - ) - - @testing.combinations("control", "excluded", "dict") - @testing.skip_if("+pysqlite_numeric") - @testing.skip_if("+pysqlite_dollar") - def test_set_excluded(self, scenario): - """test #8014, sending all of .excluded to set""" - - if scenario == "control": - users = self.tables.users - - stmt = insert(users) - self.assert_compile( - stmt.on_conflict_do_update(set_=stmt.excluded), - "INSERT INTO users (id, name) VALUES (?, ?) ON CONFLICT " - "DO UPDATE SET id = excluded.id, name = excluded.name", - ) - else: - users_w_key = self.tables.users_w_key - - stmt = insert(users_w_key) - - if scenario == "excluded": - self.assert_compile( - stmt.on_conflict_do_update(set_=stmt.excluded), - "INSERT INTO users_w_key (id, name) VALUES (?, ?) " - "ON CONFLICT " - "DO UPDATE SET id = excluded.id, name = excluded.name", - ) - else: - self.assert_compile( - stmt.on_conflict_do_update( - set_={ - "id": stmt.excluded.id, - "name_keyed": stmt.excluded.name_keyed, - } - ), - "INSERT INTO users_w_key (id, name) VALUES (?, ?) " - "ON CONFLICT " - "DO UPDATE SET id = excluded.id, name = excluded.name", - ) + with expect_raises(ValueError): + insert(self.tables.users).on_conflict_do_update() def test_on_conflict_do_no_call_twice(self): users = self.tables.users From 02bd039796264268de38f7f293b95dbb13ca99f1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 18 Dec 2024 17:19:56 -0500 Subject: [PATCH 401/544] typing fix: allow stmt.excluded for set_ Change-Id: I6f0af23fba8f5868282505438e6ca0a5af7e1bbe (cherry picked from commit 5c79e5ce2dd9db491e9177e7f5af0a83058ebe06) --- lib/sqlalchemy/dialects/_typing.py | 5 ++++- test/typing/plain_files/dialects/postgresql/pg_stuff.py | 3 +++ test/typing/plain_files/dialects/sqlite/sqlite_stuff.py | 3 +++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/dialects/_typing.py b/lib/sqlalchemy/dialects/_typing.py index 811e125fd5e..8e04f3b3764 100644 --- a/lib/sqlalchemy/dialects/_typing.py +++ b/lib/sqlalchemy/dialects/_typing.py @@ -13,6 +13,7 @@ from typing import Union from ..sql import roles +from ..sql.base import ColumnCollection from ..sql.schema import Column from ..sql.schema import ColumnCollectionConstraint from ..sql.schema import Index @@ -23,5 +24,7 @@ Iterable[Union[Column[Any], str, roles.DDLConstraintColumnRole]] ] _OnConflictIndexWhereT = Optional[roles.WhereHavingRole] -_OnConflictSetT = Optional[Mapping[Any, Any]] +_OnConflictSetT = Optional[ + Union[Mapping[Any, Any], ColumnCollection[Any, Any]] +] _OnConflictWhereT = Optional[roles.WhereHavingRole] diff --git a/test/typing/plain_files/dialects/postgresql/pg_stuff.py b/test/typing/plain_files/dialects/postgresql/pg_stuff.py index 678d22b71f9..5e56efba98c 100644 --- a/test/typing/plain_files/dialects/postgresql/pg_stuff.py +++ b/test/typing/plain_files/dialects/postgresql/pg_stuff.py @@ -81,6 +81,9 @@ class Test(Base): unique, ["foo"], Test.id > 0, {"id": 42, Test.ident: 99}, Test.id == 22 ).excluded.foo.desc() +s1 = insert(Test) +s1.on_conflict_do_update(set_=s1.excluded) + # EXPECTED_TYPE: Column[Range[int]] reveal_type(Column(INT4RANGE())) diff --git a/test/typing/plain_files/dialects/sqlite/sqlite_stuff.py b/test/typing/plain_files/dialects/sqlite/sqlite_stuff.py index 00debda5096..456f402937a 100644 --- a/test/typing/plain_files/dialects/sqlite/sqlite_stuff.py +++ b/test/typing/plain_files/dialects/sqlite/sqlite_stuff.py @@ -21,3 +21,6 @@ class Test(Base): insert(Test).on_conflict_do_nothing("foo", Test.id > 0).on_conflict_do_update( unique, Test.id > 0, {"id": 42, Test.data: 99}, Test.id == 22 ).excluded.foo.desc() + +s1 = insert(Test) +s1.on_conflict_do_update(set_=s1.excluded) From 2b453cc4577ae35381a283afccdf68929dcd4e3d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 19 Dec 2024 17:50:21 -0500 Subject: [PATCH 402/544] fix SQLite on conflict tests in I4d66ec1473321616a1707da324a7dfe7a61ec94e we added new tests in the sqlite suite but we forgot to extend from fixtures.TestBase, so these tests did not get run at all. repair tests Change-Id: Iaec17a754e0ab1d4b43f063706b512ed335a7465 (cherry picked from commit 6a87d619d9dfe82a8135f0ab3268bf526ac42383) --- test/dialect/test_sqlite.py | 31 ++++++++++++++----------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 53565504938..f73ba4025a1 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -2949,7 +2949,7 @@ def test_regexp_replace(self): ) -class OnConflictCompileTest(AssertsCompiledSQL): +class OnConflictCompileTest(AssertsCompiledSQL, fixtures.TestBase): __dialect__ = "sqlite" @testing.combinations( @@ -3009,8 +3009,8 @@ def test_assorted_arg_coercion(self, users, case, expected): f"INSERT INTO users (id, name) VALUES (?, ?) {expected}", ) - @testing.combinations("control", "excluded", "dict") - def test_set_excluded(self, scenario, users): + @testing.combinations("control", "excluded", "dict", argnames="scenario") + def test_set_excluded(self, scenario, users, users_w_key): """test #8014, sending all of .excluded to set""" if scenario == "control": @@ -3022,7 +3022,6 @@ def test_set_excluded(self, scenario, users): "DO UPDATE SET id = excluded.id, name = excluded.name", ) else: - users_w_key = self.tables.users_w_key stmt = insert(users_w_key) @@ -3046,9 +3045,7 @@ def test_set_excluded(self, scenario, users): "DO UPDATE SET id = excluded.id, name = excluded.name", ) - def test_on_conflict_do_update_exotic_targets_six( - self, connection, users_xtra - ): + def test_on_conflict_do_update_exotic_targets_six(self, users_xtra): users = users_xtra unique_partial_index = schema.Index( @@ -3059,16 +3056,6 @@ def test_on_conflict_do_update_exotic_targets_six( sqlite_where=users_xtra.c.lets_index_this == "unique_name", ) - conn = connection - conn.execute( - insert(users), - dict( - id=1, - name="name1", - login_email="mail1@gmail.com", - lets_index_this="unique_name", - ), - ) i = insert(users) i = i.on_conflict_do_update( index_elements=unique_partial_index.columns, @@ -3105,6 +3092,16 @@ def users(self): Column("name", String(50)), ) + @testing.fixture + def users_w_key(self): + metadata = MetaData() + return Table( + "users_w_key", + metadata, + Column("id", Integer, primary_key=True), + Column("name", String(50), key="name_keyed"), + ) + @testing.fixture def users_xtra(self): metadata = MetaData() From 6e660ae98dbaa7ceba349ffdb69f60b2181a2dc5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 20 Dec 2024 18:02:37 -0500 Subject: [PATCH 403/544] update for mypy 1.14 Change-Id: I7315c06314ed25c2c00f56b2883f97f4489e433c (cherry picked from commit b39afd5008bef95a8c2c30eada1e22ef6a286670) --- lib/sqlalchemy/ext/horizontal_shard.py | 2 +- lib/sqlalchemy/orm/session.py | 2 +- tox.ini | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index 1d7b3f3580d..56242c478f8 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -318,7 +318,7 @@ def _choose_shard_and_assign( state.identity_token = shard_id return shard_id - def connection_callable( # type: ignore [override] + def connection_callable( self, mapper: Optional[Mapper[_T]] = None, instance: Optional[Any] = None, diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index f8ce45100ed..abaa22e4488 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -1773,7 +1773,7 @@ def __init__( # the idea is that at some point NO_ARG will warn that in the future # the default will switch to close_resets_only=False. - if close_resets_only or close_resets_only is _NoArg.NO_ARG: + if close_resets_only in (True, _NoArg.NO_ARG): self._close_state = _SessionCloseState.CLOSE_IS_RESET else: self._close_state = _SessionCloseState.ACTIVE diff --git a/tox.ini b/tox.ini index 62d29be43a9..e8229efde61 100644 --- a/tox.ini +++ b/tox.ini @@ -191,7 +191,7 @@ commands= deps= greenlet != 0.4.17 importlib_metadata; python_version < '3.8' - mypy >= 1.6.0 + mypy >= 1.14.0 types-greenlet commands = mypy {env:MYPY_COLOR} ./lib/sqlalchemy From 81f9a7446d5a3e72e9f5df13dafcfd9c3670f242 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 23 Dec 2024 11:46:57 -0500 Subject: [PATCH 404/544] document Oracle FLOAT/DOUBLE and binary variants Fixes: #9704 Change-Id: Id11722d32eeb2a8582348aa5846eefb19d7c83c7 (cherry picked from commit e182255e24500f4c0a101af5fee6b73e98149104) --- lib/sqlalchemy/dialects/oracle/base.py | 43 +++++++++++++++++++++++++ lib/sqlalchemy/dialects/oracle/types.py | 26 +++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index b0b9032c00d..e012fccdf0b 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -462,6 +462,49 @@ exclude_tablespaces=["SYSAUX", "SOME_TABLESPACE"], ) +.. _oracle_float_support: + +FLOAT / DOUBLE Support and Behaviors +------------------------------------ + +The SQLAlchemy :class:`.Float` and :class:`.Double` datatypes are generic +datatypes that resolve to the "least surprising" datatype for a given backend. +For Oracle Database, this means they resolve to the ``FLOAT`` and ``DOUBLE`` +types:: + + >>> from sqlalchemy import cast, literal, Float + >>> from sqlalchemy.dialects import oracle + >>> float_datatype = Float() + >>> print(cast(literal(5.0), float_datatype).compile(dialect=oracle.dialect())) + CAST(:param_1 AS FLOAT) + +Oracle's ``FLOAT`` / ``DOUBLE`` datatypes are aliases for ``NUMBER``. Oracle +Database stores ``NUMBER`` values with full precision, not floating point +precision, which means that ``FLOAT`` / ``DOUBLE`` do not actually behave like +native FP values. Oracle Database instead offers special datatypes +``BINARY_FLOAT`` and ``BINARY_DOUBLE`` to deliver real 4- and 8- byte FP +values. + +SQLAlchemy supports these datatypes directly using :class:`.BINARY_FLOAT` and +:class:`.BINARY_DOUBLE`. To use the :class:`.Float` or :class:`.Double` +datatypes in a database agnostic way, while allowing Oracle backends to utilize +one of these types, use the :meth:`.TypeEngine.with_variant` method to set up a +variant:: + + >>> from sqlalchemy import cast, literal, Float + >>> from sqlalchemy.dialects import oracle + >>> float_datatype = Float().with_variant(oracle.BINARY_FLOAT(), "oracle") + >>> print(cast(literal(5.0), float_datatype).compile(dialect=oracle.dialect())) + CAST(:param_1 AS BINARY_FLOAT) + +E.g. to use this datatype in a :class:`.Table` definition:: + + my_table = Table( + "my_table", metadata, + Column("fp_data", Float().with_variant(oracle.BINARY_FLOAT(), "oracle")) + ) + + DateTime Compatibility ---------------------- diff --git a/lib/sqlalchemy/dialects/oracle/types.py b/lib/sqlalchemy/dialects/oracle/types.py index 2f84415ea8f..539b2107076 100644 --- a/lib/sqlalchemy/dialects/oracle/types.py +++ b/lib/sqlalchemy/dialects/oracle/types.py @@ -111,10 +111,36 @@ def __init__( class BINARY_DOUBLE(sqltypes.Double): + """Implement the Oracle ``BINARY_DOUBLE`` datatype. + + This datatype differs from the Oracle ``DOUBLE`` datatype in that it + delivers a true 4-byte FP value. The datatype may be combined with a + generic :class:`.Double` datatype using :meth:`.TypeEngine.with_variant`. + + .. seealso:: + + :ref:`oracle_float_support` + + + """ + __visit_name__ = "BINARY_DOUBLE" class BINARY_FLOAT(sqltypes.Float): + """Implement the Oracle ``BINARY_FLOAT`` datatype. + + This datatype differs from the Oracle ``FLOAT`` datatype in that it + delivers a true 4-byte FP value. The datatype may be combined with a + generic :class:`.Float` datatype using :meth:`.TypeEngine.with_variant`. + + .. seealso:: + + :ref:`oracle_float_support` + + + """ + __visit_name__ = "BINARY_FLOAT" From c6f7e01a57aebcdc43b2da158606c554fe1e4637 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 23 Dec 2024 22:47:47 +0100 Subject: [PATCH 405/544] fix typo on double doc Change-Id: Ia8cffddae5c5c14954d7fa6b2e11c2b78c6e5f59 (cherry picked from commit 83492ae3b446dbc6ffc36da720417731d975d69c) --- lib/sqlalchemy/dialects/oracle/types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/dialects/oracle/types.py b/lib/sqlalchemy/dialects/oracle/types.py index 539b2107076..9318b45619a 100644 --- a/lib/sqlalchemy/dialects/oracle/types.py +++ b/lib/sqlalchemy/dialects/oracle/types.py @@ -114,7 +114,7 @@ class BINARY_DOUBLE(sqltypes.Double): """Implement the Oracle ``BINARY_DOUBLE`` datatype. This datatype differs from the Oracle ``DOUBLE`` datatype in that it - delivers a true 4-byte FP value. The datatype may be combined with a + delivers a true 8-byte FP value. The datatype may be combined with a generic :class:`.Double` datatype using :meth:`.TypeEngine.with_variant`. .. seealso:: From f5909cce1a8c1a1011c2459ae505bea326bfbaa3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 24 Dec 2024 15:15:36 -0500 Subject: [PATCH 406/544] fix lint job Change-Id: Ib3d5230212eb56f8f0454bb51b23a64d93de1e43 (cherry picked from commit 7f138cc7fc4cb192627e5d0d34fd894f7101cc6d) --- lib/sqlalchemy/dialects/oracle/base.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index e012fccdf0b..358dd9d8a6d 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -500,11 +500,13 @@ E.g. to use this datatype in a :class:`.Table` definition:: my_table = Table( - "my_table", metadata, - Column("fp_data", Float().with_variant(oracle.BINARY_FLOAT(), "oracle")) + "my_table", + metadata, + Column( + "fp_data", Float().with_variant(oracle.BINARY_FLOAT(), "oracle") + ), ) - DateTime Compatibility ---------------------- From 2908a2943f688786e4a26f87f5165bd99041ff0b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 24 Dec 2024 22:39:58 +0100 Subject: [PATCH 407/544] pin ubuntu 22 since 24 seems broken. Runners have 4 cores now Change-Id: Ie62b072446e8052d5465cfe2c01b2ccb05482aba (cherry picked from commit 8d73205f352e68c6603e90494494ef21027ec68f) --- .github/workflows/run-on-pr.yaml | 6 +++--- .github/workflows/run-test.yaml | 16 ++++++++-------- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/run-on-pr.yaml b/.github/workflows/run-on-pr.yaml index aa67872e325..0d1313bf39c 100644 --- a/.github/workflows/run-on-pr.yaml +++ b/.github/workflows/run-on-pr.yaml @@ -10,7 +10,7 @@ on: env: # global env to all steps - TOX_WORKERS: -n2 + TOX_WORKERS: -n4 permissions: contents: read @@ -23,7 +23,7 @@ jobs: # run this job using this matrix, excluding some combinations below. matrix: os: - - "ubuntu-latest" + - "ubuntu-22.04" python-version: - "3.12" build-type: @@ -60,7 +60,7 @@ jobs: strategy: matrix: os: - - "ubuntu-latest" + - "ubuntu-22.04" python-version: - "3.12" tox-env: diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index 85272a21a6f..c037b91237f 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -13,7 +13,7 @@ on: env: # global env to all steps - TOX_WORKERS: -n2 + TOX_WORKERS: -n4 permissions: contents: read @@ -26,7 +26,7 @@ jobs: # run this job using this matrix, excluding some combinations below. matrix: os: - - "ubuntu-latest" + - "ubuntu-22.04" - "windows-latest" - "macos-latest" - "macos-13" @@ -51,15 +51,15 @@ jobs: # autocommit tests fail on the ci for some reason - python-version: "pypy-3.10" pytest-args: "-k 'not test_autocommit_on and not test_turn_autocommit_off_via_default_iso_level and not test_autocommit_isolation_level'" - - os: "ubuntu-latest" + - os: "ubuntu-22.04" pytest-args: "--dbdriver pysqlite --dbdriver aiosqlite" exclude: # linux do not have x86 / arm64 python - - os: "ubuntu-latest" + - os: "ubuntu-22.04" architecture: x86 - - os: "ubuntu-latest" + - os: "ubuntu-22.04" architecture: arm64 # windows des not have arm64 python - os: "windows-latest" @@ -119,7 +119,7 @@ jobs: run-test-arm64: # Hopefully something native can be used at some point https://github.blog/changelog/2023-10-30-accelerate-your-ci-cd-with-arm-based-hosted-runners-in-github-actions/ name: test-arm64-${{ matrix.python-version }}-${{ matrix.build-type }}-${{ matrix.os }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: matrix: python-version: @@ -164,7 +164,7 @@ jobs: # run this job using this matrix, excluding some combinations below. matrix: os: - - "ubuntu-latest" + - "ubuntu-22.04" python-version: - "3.8" - "3.9" @@ -180,7 +180,7 @@ jobs: # run lint only on 3.12 - tox-env: lint python-version: "3.12" - os: "ubuntu-latest" + os: "ubuntu-22.04" exclude: # run pep484 only on 3.10+ - tox-env: pep484 diff --git a/pyproject.toml b/pyproject.toml index f75bf319243..5f3bd34e532 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ strict = true [tool.cibuildwheel] test-requires = "pytest pytest-xdist" # remove user site, otherwise the local checkout has precedence, disabling cyextensions -test-command = "python -s -m pytest -c {project}/pyproject.toml -n2 -q --nomemory --notimingintensive --nomypy {project}/test" +test-command = "python -s -m pytest -c {project}/pyproject.toml -n4 -q --nomemory --notimingintensive --nomypy {project}/test" build = "*" # python 3.6 is no longer supported by sqlalchemy From 638d763b861e7d530d16edf21373c331e06185ba Mon Sep 17 00:00:00 2001 From: Lele Gaifax Date: Thu, 26 Dec 2024 12:09:40 +0100 Subject: [PATCH 408/544] before_mapper_configured event doc fixes (#12200) * Remove spurious word in before_mapper_configured event doc * Fix reST markup in before_mapper_configured event doc (cherry picked from commit c6be0c1f205cf83467d68eb40fb650b9274519f9) --- lib/sqlalchemy/orm/events.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 959ad01043c..d2ae616371d 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -986,7 +986,7 @@ def before_mapper_configured( symbol which indicates to the :func:`.configure_mappers` call that this particular mapper (or hierarchy of mappers, if ``propagate=True`` is used) should be skipped in the current configuration run. When one or - more mappers are skipped, the he "new mappers" flag will remain set, + more mappers are skipped, the "new mappers" flag will remain set, meaning the :func:`.configure_mappers` function will continue to be called when mappers are used, to continue to try to configure all available mappers. @@ -995,7 +995,7 @@ def before_mapper_configured( :meth:`.MapperEvents.before_configured`, :meth:`.MapperEvents.after_configured`, and :meth:`.MapperEvents.mapper_configured`, the - :meth;`.MapperEvents.before_mapper_configured` event provides for a + :meth:`.MapperEvents.before_mapper_configured` event provides for a meaningful return value when it is registered with the ``retval=True`` parameter. From 98596bd20a9e8ac81cee1eaaf6f42444a6725982 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 30 Dec 2024 13:17:29 -0500 Subject: [PATCH 409/544] further fixes for _cleanup_mapped_str_annotation Fixed issues in type handling within the ``type_annotation_map`` feature which prevented the use of unions, using either pep-604 or ``Union`` syntaxes under future annotations mode, which contained multiple generic types as elements from being correctly resolvable. also adds some further tests to assert that None added into the type map for pep695, typing.NewType etc. sets up nullability on the column Fixes: #12207 Change-Id: I4057694cf35868972db2942721049d79301b19c4 (cherry picked from commit fd3d17a30b15cc45ba18efaeb24ecc29b0ea1087) --- doc/build/changelog/unreleased_20/12207.rst | 8 + lib/sqlalchemy/orm/decl_base.py | 8 +- lib/sqlalchemy/orm/properties.py | 12 +- lib/sqlalchemy/orm/util.py | 37 ++--- lib/sqlalchemy/util/typing.py | 57 ++++--- .../declarative/test_tm_future_annotations.py | 85 ++++++++++ .../test_tm_future_annotations_sync.py | 156 ++++++++++++++---- test/orm/declarative/test_typed_mapping.py | 156 ++++++++++++++---- 8 files changed, 392 insertions(+), 127 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12207.rst diff --git a/doc/build/changelog/unreleased_20/12207.rst b/doc/build/changelog/unreleased_20/12207.rst new file mode 100644 index 00000000000..a6457b90ba7 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12207.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm + :tickets: 12207 + + Fixed issues in type handling within the ``type_annotation_map`` feature + which prevented the use of unions, using either pep-604 or ``Union`` + syntaxes under future annotations mode, which contained multiple generic + types as elements from being correctly resolvable. diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index aa64eaa6667..073aa16c350 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -1308,10 +1308,8 @@ def _collect_annotation( type(attr_value), required=False, is_dataclass_field=is_dataclass_field, - expect_mapped=expect_mapped - and not is_dataclass, # self.allow_dataclass_fields, + expect_mapped=expect_mapped and not is_dataclass, ) - if extracted is None: # ClassVar can come out here return None @@ -1320,8 +1318,8 @@ def _collect_annotation( if attr_value is None and not is_literal(extracted_mapped_annotation): for elem in get_args(extracted_mapped_annotation): - if isinstance(elem, str) or is_fwd_ref( - elem, check_generic=True + if is_fwd_ref( + elem, check_generic=True, check_for_plain_string=True ): elem = de_stringify_annotation( self.cls, diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 96ae9d7f82a..48528ef2765 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -43,7 +43,6 @@ from .interfaces import StrategizedProperty from .relationships import RelationshipProperty from .util import de_stringify_annotation -from .util import de_stringify_union_elements from .. import exc as sa_exc from .. import ForeignKey from .. import log @@ -60,7 +59,6 @@ from ..util.typing import is_fwd_ref from ..util.typing import is_pep593 from ..util.typing import is_pep695 -from ..util.typing import is_union from ..util.typing import Self if TYPE_CHECKING: @@ -738,20 +736,14 @@ def _init_column_for_annotation( ) -> None: sqltype = self.column.type - if isinstance(argument, str) or is_fwd_ref( - argument, check_generic=True + if is_fwd_ref( + argument, check_generic=True, check_for_plain_string=True ): assert originating_module is not None argument = de_stringify_annotation( cls, argument, originating_module, include_generic=True ) - if is_union(argument): - assert originating_module is not None - argument = de_stringify_union_elements( - cls, argument, originating_module - ) - nullable = includes_none(argument) if not self._has_nullable: diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 11b6ac2c1ca..e1cc16bd98b 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -86,9 +86,6 @@ from ..sql.selectable import FromClause from ..util.langhelpers import MemoizedSlots from ..util.typing import de_stringify_annotation as _de_stringify_annotation -from ..util.typing import ( - de_stringify_union_elements as _de_stringify_union_elements, -) from ..util.typing import eval_name_only as _eval_name_only from ..util.typing import fixup_container_fwd_refs from ..util.typing import get_origin @@ -123,7 +120,6 @@ from ..sql.selectable import Selectable from ..sql.visitors import anon_map from ..util.typing import _AnnotationScanType - from ..util.typing import ArgsTypeProtocol _T = TypeVar("_T", bound=Any) @@ -140,7 +136,6 @@ ) ) - _de_stringify_partial = functools.partial( functools.partial, locals_=util.immutabledict( @@ -173,23 +168,6 @@ def __call__( ) -class _DeStringifyUnionElements(Protocol): - def __call__( - self, - cls: Type[Any], - annotation: ArgsTypeProtocol, - originating_module: str, - *, - str_cleanup_fn: Optional[Callable[[str, str], str]] = None, - ) -> Type[Any]: ... - - -de_stringify_union_elements = cast( - _DeStringifyUnionElements, - _de_stringify_partial(_de_stringify_union_elements), -) - - class _EvalNameOnly(Protocol): def __call__(self, name: str, module_name: str) -> Any: ... @@ -2231,7 +2209,7 @@ def _cleanup_mapped_str_annotation( inner: Optional[Match[str]] - mm = re.match(r"^(.+?)\[(.+)\]$", annotation) + mm = re.match(r"^([^ \|]+?)\[(.+)\]$", annotation) if not mm: return annotation @@ -2271,7 +2249,7 @@ def _cleanup_mapped_str_annotation( while True: stack.append(real_symbol if mm is inner else inner.group(1)) g2 = inner.group(2) - inner = re.match(r"^(.+?)\[(.+)\]$", g2) + inner = re.match(r"^([^ \|]+?)\[(.+)\]$", g2) if inner is None: stack.append(g2) break @@ -2293,8 +2271,10 @@ def _cleanup_mapped_str_annotation( # ['Mapped', "'Optional[Dict[str, str]]'"] not re.match(r"""^["'].*["']$""", stack[-1]) # avoid further generics like Dict[] such as - # ['Mapped', 'dict[str, str] | None'] - and not re.match(r".*\[.*\]", stack[-1]) + # ['Mapped', 'dict[str, str] | None'], + # ['Mapped', 'list[int] | list[str]'], + # ['Mapped', 'Union[list[int], list[str]]'], + and not re.search(r"[\[\]]", stack[-1]) ): stripchars = "\"' " stack[-1] = ", ".join( @@ -2334,6 +2314,11 @@ def _extract_mapped_subtype( return None try: + # destringify the "outside" of the annotation. note we are not + # adding include_generic so it will *not* dig into generic contents, + # which will remain as ForwardRef or plain str under future annotations + # mode. The full destringify happens later when mapped_column goes + # to do a full lookup in the registry type_annotations_map. annotated = de_stringify_annotation( cls, raw_annotation, diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 645a41a2406..03ae44eaad0 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -329,28 +329,6 @@ def resolve_name_to_real_class_name(name: str, module_name: str) -> str: return getattr(obj, "__name__", name) -def de_stringify_union_elements( - cls: Type[Any], - annotation: ArgsTypeProtocol, - originating_module: str, - locals_: Mapping[str, Any], - *, - str_cleanup_fn: Optional[Callable[[str, str], str]] = None, -) -> Type[Any]: - return make_union_type( - *[ - de_stringify_annotation( - cls, - anno, - originating_module, - {}, - str_cleanup_fn=str_cleanup_fn, - ) - for anno in annotation.__args__ - ] - ) - - def is_pep593(type_: Optional[Any]) -> bool: return type_ is not None and get_origin(type_) is Annotated @@ -426,12 +404,21 @@ def recursive_value(type_): def is_fwd_ref( - type_: _AnnotationScanType, check_generic: bool = False + type_: _AnnotationScanType, + check_generic: bool = False, + check_for_plain_string: bool = False, ) -> TypeGuard[ForwardRef]: - if isinstance(type_, ForwardRef): + if check_for_plain_string and isinstance(type_, str): + return True + elif isinstance(type_, ForwardRef): return True elif check_generic and is_generic(type_): - return any(is_fwd_ref(arg, True) for arg in type_.__args__) + return any( + is_fwd_ref( + arg, True, check_for_plain_string=check_for_plain_string + ) + for arg in type_.__args__ + ) else: return False @@ -456,13 +443,31 @@ def de_optionalize_union_types( """Given a type, filter out ``Union`` types that include ``NoneType`` to not include the ``NoneType``. + Contains extra logic to work on non-flattened unions, unions that contain + ``None`` (seen in py38, 37) + """ if is_fwd_ref(type_): return _de_optionalize_fwd_ref_union_types(type_, False) elif is_union(type_) and includes_none(type_): - typ = set(type_.__args__) + if compat.py39: + typ = set(type_.__args__) + else: + # py38, 37 - unions are not automatically flattened, can contain + # None rather than NoneType + stack_of_unions = deque([type_]) + typ = set() + while stack_of_unions: + u_typ = stack_of_unions.popleft() + for elem in u_typ.__args__: + if is_union(elem): + stack_of_unions.append(elem) + else: + typ.add(elem) + + typ.discard(None) # type: ignore typ.discard(NoneType) typ.discard(NoneFwd) diff --git a/test/orm/declarative/test_tm_future_annotations.py b/test/orm/declarative/test_tm_future_annotations.py index 165f43b42d3..9b0d4f334bc 100644 --- a/test/orm/declarative/test_tm_future_annotations.py +++ b/test/orm/declarative/test_tm_future_annotations.py @@ -30,9 +30,11 @@ from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship +from sqlalchemy.orm.util import _cleanup_mapped_str_annotation from sqlalchemy.sql import sqltypes from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_raises_message +from sqlalchemy.testing import fixtures from sqlalchemy.testing import is_ from sqlalchemy.testing import is_true from .test_typed_mapping import expect_annotation_syntax_error @@ -49,6 +51,89 @@ class M3: pass +class AnnoUtilTest(fixtures.TestBase): + @testing.combinations( + ("Mapped[Address]", 'Mapped["Address"]'), + ('Mapped["Address"]', 'Mapped["Address"]'), + ("Mapped['Address']", "Mapped['Address']"), + ("Mapped[Address | None]", 'Mapped["Address | None"]'), + ("Mapped[None | Address]", 'Mapped["None | Address"]'), + ('Mapped["Address | None"]', 'Mapped["Address | None"]'), + ("Mapped['None | Address']", "Mapped['None | Address']"), + ('Mapped["Address" | "None"]', 'Mapped["Address" | "None"]'), + ('Mapped["None" | "Address"]', 'Mapped["None" | "Address"]'), + ("Mapped[A_]", 'Mapped["A_"]'), + ("Mapped[_TypingLiteral]", 'Mapped["_TypingLiteral"]'), + ("Mapped[datetime.datetime]", 'Mapped["datetime.datetime"]'), + ("Mapped[List[Edge]]", 'Mapped[List["Edge"]]'), + ( + "Mapped[collections.abc.MutableSequence[B]]", + 'Mapped[collections.abc.MutableSequence["B"]]', + ), + ("Mapped[typing.Sequence[B]]", 'Mapped[typing.Sequence["B"]]'), + ("Mapped[dict[str, str]]", 'Mapped[dict["str", "str"]]'), + ("Mapped[Dict[str, str]]", 'Mapped[Dict["str", "str"]]'), + ("Mapped[list[str]]", 'Mapped[list["str"]]'), + ("Mapped[dict[str, str] | None]", "Mapped[dict[str, str] | None]"), + ("Mapped[Optional[anno_str_mc]]", 'Mapped[Optional["anno_str_mc"]]'), + ( + "Mapped[Optional[Dict[str, str]]]", + 'Mapped[Optional[Dict["str", "str"]]]', + ), + ( + "Mapped[Optional[Union[Decimal, float]]]", + 'Mapped[Optional[Union["Decimal", "float"]]]', + ), + ( + "Mapped[Optional[Union[list[int], list[str]]]]", + "Mapped[Optional[Union[list[int], list[str]]]]", + ), + ("Mapped[TestType[str]]", 'Mapped[TestType["str"]]'), + ("Mapped[TestType[str, str]]", 'Mapped[TestType["str", "str"]]'), + ("Mapped[Union[A, None]]", 'Mapped[Union["A", "None"]]'), + ("Mapped[Union[Decimal, float]]", 'Mapped[Union["Decimal", "float"]]'), + ( + "Mapped[Union[Decimal, float, None]]", + 'Mapped[Union["Decimal", "float", "None"]]', + ), + ( + "Mapped[Union[Dict[str, str], None]]", + "Mapped[Union[Dict[str, str], None]]", + ), + ("Mapped[Union[float, Decimal]]", 'Mapped[Union["float", "Decimal"]]'), + ( + "Mapped[Union[list[int], list[str]]]", + "Mapped[Union[list[int], list[str]]]", + ), + ( + "Mapped[Union[list[int], list[str], None]]", + "Mapped[Union[list[int], list[str], None]]", + ), + ( + "Mapped[Union[None, Dict[str, str]]]", + "Mapped[Union[None, Dict[str, str]]]", + ), + ( + "Mapped[Union[None, list[int], list[str]]]", + "Mapped[Union[None, list[int], list[str]]]", + ), + ("Mapped[A | None]", 'Mapped["A | None"]'), + ("Mapped[Decimal | float]", 'Mapped["Decimal | float"]'), + ("Mapped[Decimal | float | None]", 'Mapped["Decimal | float | None"]'), + ( + "Mapped[list[int] | list[str] | None]", + "Mapped[list[int] | list[str] | None]", + ), + ("Mapped[None | dict[str, str]]", "Mapped[None | dict[str, str]]"), + ( + "Mapped[None | list[int] | list[str]]", + "Mapped[None | list[int] | list[str]]", + ), + ) + def test_cleanup_mapped_str_annotation(self, given, expected): + eq_(_cleanup_mapped_str_annotation(given, __name__), expected) + + class MappedColumnTest(_MappedColumnTest): def test_fully_qualified_mapped_name(self, decl_base): """test #8853, regression caused by #8759 ;) diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index 4b379266388..b2356aef638 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -116,8 +116,9 @@ class _SomeDict2(TypedDict): _StrTypeAlias: TypeAlias = str -_StrPep695: TypeAlias = str -_UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] +if TYPE_CHECKING: + _StrPep695: TypeAlias = str + _UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] if compat.py38: _TypingLiteral = typing.Literal["a", "b"] @@ -158,6 +159,17 @@ class _SomeDict2(TypedDict): ) +def make_pep695_type(name, definition): + lcls = {} + exec( + f""" +type {name} = {definition} +""", + lcls, + ) + return lcls[name] + + def expect_annotation_syntax_error(name): return expect_raises_message( sa_exc.ArgumentError, @@ -863,6 +875,10 @@ class Test(decl_base): "optional", "optional_union", "optional_union_604", + "union_newtype", + "union_null_newtype", + "union_695", + "union_null_695", ], ) @testing.variation("in_map", ["yes", "no", "value"]) @@ -887,12 +903,22 @@ def test_pep695_behavior(self, decl_base, in_map, option): tat = TypeAliasType("tat", Optional[Union[str, int]]) elif option.optional_union_604: tat = TypeAliasType("tat", Optional[str | int]) + elif option.union_newtype: + # this seems to be illegal for typing but "works" + tat = NewType("tat", Union[str, int]) + elif option.union_null_newtype: + # this seems to be illegal for typing but "works" + tat = NewType("tat", Union[str, int, None]) + elif option.union_695: + tat = make_pep695_type("tat", str | int) + elif option.union_null_695: + tat = make_pep695_type("tat", str | int | None) else: option.fail() if in_map.yes: decl_base.registry.update_type_annotation_map({tat: String(99)}) - elif in_map.value: + elif in_map.value and "newtype" not in option.name: decl_base.registry.update_type_annotation_map( {tat.__value__: String(99)} ) @@ -908,7 +934,12 @@ class Test(decl_base): if in_map.yes: col = declare() length = 99 - elif in_map.value or option.optional or option.plain: + elif ( + in_map.value + and "newtype" not in option.name + or option.optional + or option.plain + ): with expect_deprecated( "Matching the provided TypeAliasType 'tat' on its " "resolved value without matching it in the " @@ -1952,6 +1983,13 @@ class User(Base): refer_union: Mapped[UnionType] refer_union_optional: Mapped[Optional[UnionType]] + # py38, 37 does not automatically flatten unions, add extra tests + # for this. maintain these in order to catch future regressions + # in the behavior of ``Union`` + unflat_union_optional_data: Mapped[ + Union[Union[Decimal, float, None], None] + ] = mapped_column() + float_data: Mapped[float] = mapped_column() decimal_data: Mapped[Decimal] = mapped_column() @@ -1975,6 +2013,7 @@ class User(Base): ("reverse_u_optional_data", True), ("refer_union", "null" in union.name), ("refer_union_optional", True), + ("unflat_union_optional_data", True), ] if compat.py310: info += [ @@ -2041,36 +2080,47 @@ class A(Base): is_true(A.__table__.c.json1.nullable) is_false(A.__table__.c.json2.nullable) - @testing.combinations( - ("not_optional",), - ("optional",), - ("optional_fwd_ref",), - ("union_none",), - ("pep604", testing.requires.python310), - ("pep604_fwd_ref", testing.requires.python310), - argnames="optional_on_json", + @testing.variation( + "option", + [ + "not_optional", + "optional", + "optional_fwd_ref", + "union_none", + ("pep604", testing.requires.python310), + ("pep604_fwd_ref", testing.requires.python310), + ], ) + @testing.variation("brackets", ["oneset", "twosets"]) @testing.combinations( "include_mc_type", "derive_from_anno", argnames="include_mc_type" ) def test_optional_styles_nested_brackets( - self, optional_on_json, include_mc_type + self, option, brackets, include_mc_type ): + """composed types test, includes tests that were added later for + #12207""" + class Base(DeclarativeBase): if testing.requires.python310.enabled: type_annotation_map = { - Dict[str, str]: JSON, - dict[str, str]: JSON, + Dict[str, Decimal]: JSON, + dict[str, Decimal]: JSON, + Union[List[int], List[str]]: JSON, + list[int] | list[str]: JSON, } else: type_annotation_map = { - Dict[str, str]: JSON, + Dict[str, Decimal]: JSON, + Union[List[int], List[str]]: JSON, } if include_mc_type == "include_mc_type": mc = mapped_column(JSON) + mc2 = mapped_column(JSON) else: mc = mapped_column() + mc2 = mapped_column() class A(Base): __tablename__ = "a" @@ -2078,21 +2128,67 @@ class A(Base): id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[str] = mapped_column() - if optional_on_json == "not_optional": - json: Mapped[Dict[str, str]] = mapped_column() # type: ignore - elif optional_on_json == "optional": - json: Mapped[Optional[Dict[str, str]]] = mc - elif optional_on_json == "optional_fwd_ref": - json: Mapped["Optional[Dict[str, str]]"] = mc - elif optional_on_json == "union_none": - json: Mapped[Union[Dict[str, str], None]] = mc - elif optional_on_json == "pep604": - json: Mapped[dict[str, str] | None] = mc - elif optional_on_json == "pep604_fwd_ref": - json: Mapped["dict[str, str] | None"] = mc + if brackets.oneset: + if option.not_optional: + json: Mapped[Dict[str, Decimal]] = mapped_column() # type: ignore # noqa: E501 + if testing.requires.python310.enabled: + json2: Mapped[dict[str, Decimal]] = mapped_column() # type: ignore # noqa: E501 + elif option.optional: + json: Mapped[Optional[Dict[str, Decimal]]] = mc + if testing.requires.python310.enabled: + json2: Mapped[Optional[dict[str, Decimal]]] = mc2 + elif option.optional_fwd_ref: + json: Mapped["Optional[Dict[str, Decimal]]"] = mc + if testing.requires.python310.enabled: + json2: Mapped["Optional[dict[str, Decimal]]"] = mc2 + elif option.union_none: + json: Mapped[Union[Dict[str, Decimal], None]] = mc + json2: Mapped[Union[None, Dict[str, Decimal]]] = mc2 + elif option.pep604: + json: Mapped[dict[str, Decimal] | None] = mc + if testing.requires.python310.enabled: + json2: Mapped[None | dict[str, Decimal]] = mc2 + elif option.pep604_fwd_ref: + json: Mapped["dict[str, Decimal] | None"] = mc + if testing.requires.python310.enabled: + json2: Mapped["None | dict[str, Decimal]"] = mc2 + elif brackets.twosets: + if option.not_optional: + json: Mapped[Union[List[int], List[str]]] = mapped_column() # type: ignore # noqa: E501 + elif option.optional: + json: Mapped[Optional[Union[List[int], List[str]]]] = mc + if testing.requires.python310.enabled: + json2: Mapped[ + Optional[Union[list[int], list[str]]] + ] = mc2 + elif option.optional_fwd_ref: + json: Mapped["Optional[Union[List[int], List[str]]]"] = mc + if testing.requires.python310.enabled: + json2: Mapped[ + "Optional[Union[list[int], list[str]]]" + ] = mc2 + elif option.union_none: + json: Mapped[Union[List[int], List[str], None]] = mc + if testing.requires.python310.enabled: + json2: Mapped[Union[None, list[int], list[str]]] = mc2 + elif option.pep604: + json: Mapped[list[int] | list[str] | None] = mc + json2: Mapped[None | list[int] | list[str]] = mc2 + elif option.pep604_fwd_ref: + json: Mapped["list[int] | list[str] | None"] = mc + json2: Mapped["None | list[int] | list[str]"] = mc2 + else: + brackets.fail() is_(A.__table__.c.json.type._type_affinity, JSON) - if optional_on_json == "not_optional": + if hasattr(A, "json2"): + is_(A.__table__.c.json2.type._type_affinity, JSON) + if option.not_optional: + is_false(A.__table__.c.json2.nullable) + else: + is_true(A.__table__.c.json2.nullable) + + if option.not_optional: is_false(A.__table__.c.json.nullable) else: is_true(A.__table__.c.json.nullable) @@ -3149,7 +3245,7 @@ class B(decl_base): back_populates="bs", primaryjoin=a_id == A.id ) elif optional_on_m2o == "union_none": - a: Mapped["Union[A, None]"] = relationship( + a: Mapped[Union[A, None]] = relationship( back_populates="bs", primaryjoin=a_id == A.id ) elif optional_on_m2o == "pep604": diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index f1970f2183b..10d91b70350 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -107,8 +107,9 @@ class _SomeDict2(TypedDict): _StrTypeAlias: TypeAlias = str -_StrPep695: TypeAlias = str -_UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] +if TYPE_CHECKING: + _StrPep695: TypeAlias = str + _UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] if compat.py38: _TypingLiteral = typing.Literal["a", "b"] @@ -149,6 +150,17 @@ class _SomeDict2(TypedDict): ) +def make_pep695_type(name, definition): + lcls = {} + exec( + f""" +type {name} = {definition} +""", + lcls, + ) + return lcls[name] + + def expect_annotation_syntax_error(name): return expect_raises_message( sa_exc.ArgumentError, @@ -854,6 +866,10 @@ class Test(decl_base): "optional", "optional_union", "optional_union_604", + "union_newtype", + "union_null_newtype", + "union_695", + "union_null_695", ], ) @testing.variation("in_map", ["yes", "no", "value"]) @@ -878,12 +894,22 @@ def test_pep695_behavior(self, decl_base, in_map, option): tat = TypeAliasType("tat", Optional[Union[str, int]]) elif option.optional_union_604: tat = TypeAliasType("tat", Optional[str | int]) + elif option.union_newtype: + # this seems to be illegal for typing but "works" + tat = NewType("tat", Union[str, int]) + elif option.union_null_newtype: + # this seems to be illegal for typing but "works" + tat = NewType("tat", Union[str, int, None]) + elif option.union_695: + tat = make_pep695_type("tat", str | int) + elif option.union_null_695: + tat = make_pep695_type("tat", str | int | None) else: option.fail() if in_map.yes: decl_base.registry.update_type_annotation_map({tat: String(99)}) - elif in_map.value: + elif in_map.value and "newtype" not in option.name: decl_base.registry.update_type_annotation_map( {tat.__value__: String(99)} ) @@ -899,7 +925,12 @@ class Test(decl_base): if in_map.yes: col = declare() length = 99 - elif in_map.value or option.optional or option.plain: + elif ( + in_map.value + and "newtype" not in option.name + or option.optional + or option.plain + ): with expect_deprecated( "Matching the provided TypeAliasType 'tat' on its " "resolved value without matching it in the " @@ -1943,6 +1974,13 @@ class User(Base): refer_union: Mapped[UnionType] refer_union_optional: Mapped[Optional[UnionType]] + # py38, 37 does not automatically flatten unions, add extra tests + # for this. maintain these in order to catch future regressions + # in the behavior of ``Union`` + unflat_union_optional_data: Mapped[ + Union[Union[Decimal, float, None], None] + ] = mapped_column() + float_data: Mapped[float] = mapped_column() decimal_data: Mapped[Decimal] = mapped_column() @@ -1966,6 +2004,7 @@ class User(Base): ("reverse_u_optional_data", True), ("refer_union", "null" in union.name), ("refer_union_optional", True), + ("unflat_union_optional_data", True), ] if compat.py310: info += [ @@ -2032,36 +2071,47 @@ class A(Base): is_true(A.__table__.c.json1.nullable) is_false(A.__table__.c.json2.nullable) - @testing.combinations( - ("not_optional",), - ("optional",), - ("optional_fwd_ref",), - ("union_none",), - ("pep604", testing.requires.python310), - ("pep604_fwd_ref", testing.requires.python310), - argnames="optional_on_json", + @testing.variation( + "option", + [ + "not_optional", + "optional", + "optional_fwd_ref", + "union_none", + ("pep604", testing.requires.python310), + ("pep604_fwd_ref", testing.requires.python310), + ], ) + @testing.variation("brackets", ["oneset", "twosets"]) @testing.combinations( "include_mc_type", "derive_from_anno", argnames="include_mc_type" ) def test_optional_styles_nested_brackets( - self, optional_on_json, include_mc_type + self, option, brackets, include_mc_type ): + """composed types test, includes tests that were added later for + #12207""" + class Base(DeclarativeBase): if testing.requires.python310.enabled: type_annotation_map = { - Dict[str, str]: JSON, - dict[str, str]: JSON, + Dict[str, Decimal]: JSON, + dict[str, Decimal]: JSON, + Union[List[int], List[str]]: JSON, + list[int] | list[str]: JSON, } else: type_annotation_map = { - Dict[str, str]: JSON, + Dict[str, Decimal]: JSON, + Union[List[int], List[str]]: JSON, } if include_mc_type == "include_mc_type": mc = mapped_column(JSON) + mc2 = mapped_column(JSON) else: mc = mapped_column() + mc2 = mapped_column() class A(Base): __tablename__ = "a" @@ -2069,21 +2119,67 @@ class A(Base): id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[str] = mapped_column() - if optional_on_json == "not_optional": - json: Mapped[Dict[str, str]] = mapped_column() # type: ignore - elif optional_on_json == "optional": - json: Mapped[Optional[Dict[str, str]]] = mc - elif optional_on_json == "optional_fwd_ref": - json: Mapped["Optional[Dict[str, str]]"] = mc - elif optional_on_json == "union_none": - json: Mapped[Union[Dict[str, str], None]] = mc - elif optional_on_json == "pep604": - json: Mapped[dict[str, str] | None] = mc - elif optional_on_json == "pep604_fwd_ref": - json: Mapped["dict[str, str] | None"] = mc + if brackets.oneset: + if option.not_optional: + json: Mapped[Dict[str, Decimal]] = mapped_column() # type: ignore # noqa: E501 + if testing.requires.python310.enabled: + json2: Mapped[dict[str, Decimal]] = mapped_column() # type: ignore # noqa: E501 + elif option.optional: + json: Mapped[Optional[Dict[str, Decimal]]] = mc + if testing.requires.python310.enabled: + json2: Mapped[Optional[dict[str, Decimal]]] = mc2 + elif option.optional_fwd_ref: + json: Mapped["Optional[Dict[str, Decimal]]"] = mc + if testing.requires.python310.enabled: + json2: Mapped["Optional[dict[str, Decimal]]"] = mc2 + elif option.union_none: + json: Mapped[Union[Dict[str, Decimal], None]] = mc + json2: Mapped[Union[None, Dict[str, Decimal]]] = mc2 + elif option.pep604: + json: Mapped[dict[str, Decimal] | None] = mc + if testing.requires.python310.enabled: + json2: Mapped[None | dict[str, Decimal]] = mc2 + elif option.pep604_fwd_ref: + json: Mapped["dict[str, Decimal] | None"] = mc + if testing.requires.python310.enabled: + json2: Mapped["None | dict[str, Decimal]"] = mc2 + elif brackets.twosets: + if option.not_optional: + json: Mapped[Union[List[int], List[str]]] = mapped_column() # type: ignore # noqa: E501 + elif option.optional: + json: Mapped[Optional[Union[List[int], List[str]]]] = mc + if testing.requires.python310.enabled: + json2: Mapped[ + Optional[Union[list[int], list[str]]] + ] = mc2 + elif option.optional_fwd_ref: + json: Mapped["Optional[Union[List[int], List[str]]]"] = mc + if testing.requires.python310.enabled: + json2: Mapped[ + "Optional[Union[list[int], list[str]]]" + ] = mc2 + elif option.union_none: + json: Mapped[Union[List[int], List[str], None]] = mc + if testing.requires.python310.enabled: + json2: Mapped[Union[None, list[int], list[str]]] = mc2 + elif option.pep604: + json: Mapped[list[int] | list[str] | None] = mc + json2: Mapped[None | list[int] | list[str]] = mc2 + elif option.pep604_fwd_ref: + json: Mapped["list[int] | list[str] | None"] = mc + json2: Mapped["None | list[int] | list[str]"] = mc2 + else: + brackets.fail() is_(A.__table__.c.json.type._type_affinity, JSON) - if optional_on_json == "not_optional": + if hasattr(A, "json2"): + is_(A.__table__.c.json2.type._type_affinity, JSON) + if option.not_optional: + is_false(A.__table__.c.json2.nullable) + else: + is_true(A.__table__.c.json2.nullable) + + if option.not_optional: is_false(A.__table__.c.json.nullable) else: is_true(A.__table__.c.json.nullable) @@ -3140,7 +3236,7 @@ class B(decl_base): back_populates="bs", primaryjoin=a_id == A.id ) elif optional_on_m2o == "union_none": - a: Mapped["Union[A, None]"] = relationship( + a: Mapped[Union[A, None]] = relationship( back_populates="bs", primaryjoin=a_id == A.id ) elif optional_on_m2o == "pep604": From b7662ce731f645c091d3d73809df817a664e7757 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 27 Dec 2024 16:59:28 -0500 Subject: [PATCH 410/544] edits and reorganization for union/pep695 typing docs also some new tests References: #11944 References: #11955 References: #11305 Change-Id: Ifaf8ede52a57336fa3875e8d86c6e22b2b8a0e14 (cherry picked from commit 0ac7cd16ea679a9c0ef2f407fa9e22dfc07c7acc) --- doc/build/orm/declarative_tables.rst | 294 +++++++++++------- .../test_tm_future_annotations_sync.py | 53 ++-- test/orm/declarative/test_typed_mapping.py | 53 ++-- 3 files changed, 226 insertions(+), 174 deletions(-) diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst index 4bb4237ac17..aba74f57932 100644 --- a/doc/build/orm/declarative_tables.rst +++ b/doc/build/orm/declarative_tables.rst @@ -368,20 +368,33 @@ while still being able to use succinct annotation-only :func:`_orm.mapped_column configurations. There are two more levels of Python-type configurability available beyond this, described in the next two sections. +.. _orm_declarative_type_map_union_types: + Union types inside the Type Map ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -SQLAlchemy supports mapping union types inside the type map to allow -mapping database types that can support multiple Python types, -such as :class:`_types.JSON` or :class:`_postgresql.JSONB`:: +.. versionchanged:: 2.0.37 The features described in this section have been + repaired and enhanced to work consistently. Prior to this change, union + types were supported in ``type_annotation_map``, however the feature + exhibited inconsistent behaviors between union syntaxes as well as in how + ``None`` was handled. Please ensure SQLAlchemy is up to date before + attempting to use the features described in this section. + +SQLAlchemy supports mapping union types inside the ``type_annotation_map`` to +allow mapping database types that can support multiple Python types, such as +:class:`_types.JSON` or :class:`_postgresql.JSONB`:: + from typing import Union from sqlalchemy import JSON from sqlalchemy.dialects import postgresql from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column from sqlalchemy.schema import CreateTable + # new style Union using a pipe operator json_list = list[int] | list[str] - json_scalar = float | str | bool | None + + # old style Union using Union explicitly + json_scalar = Union[float, str, bool] class Base(DeclarativeBase): @@ -396,19 +409,42 @@ such as :class:`_types.JSON` or :class:`_postgresql.JSONB`:: id: Mapped[int] = mapped_column(primary_key=True) list_col: Mapped[list[str] | list[int]] - scalar_col: Mapped[json_scalar] - scalar_col_not_null: Mapped[str | float | bool] -Using the union directly inside ``Mapped`` or creating a new one with the same -effective types has the same behavior: ``list_col`` will be matched to the -``json_list`` union even if it does not reference it directly (the order of the -types also does not matter). -If the union added to the type map includes ``None``, it will be ignored -when matching the ``Mapped`` type since ``None`` is only used to decide -the column nullability. It follows that both ``scalar_col`` and -``scalar_col_not_null`` will match the ``json_scalar`` union. + # uses JSON + scalar_col: Mapped[json_scalar] -The CREATE TABLE statement of the table created above is as follows: + # uses JSON and is also nullable=True + scalar_col_nullable: Mapped[json_scalar | None] + + # these forms all use JSON as well due to the json_scalar entry + scalar_col_newstyle: Mapped[float | str | bool] + scalar_col_oldstyle: Mapped[Union[float, str, bool]] + scalar_col_mixedstyle: Mapped[Optional[float | str | bool]] + +The above example maps the union of ``list[int]`` and ``list[str]`` to the Postgresql +:class:`_postgresql.JSONB` datatype, while naming a union of ``float, +str, bool`` will match to the :class:`.JSON` datatype. An equivalent +union, stated in the :class:`_orm.Mapped` construct, will match into the +corresponding entry in the type map. + +The matching of a union type is based on the contents of the union regardless +of how the individual types are named, and additionally excluding the use of +the ``None`` type. That is, ``json_scalar`` will also match to ``str | bool | +float | None``. It will **not** match to a union that is a subset or superset +of this union; that is, ``str | bool`` would not match, nor would ``str | bool +| float | int``. The individual contents of the union excluding ``None`` must +be an exact match. + +The ``None`` value is never significant as far as matching +from ``type_annotation_map`` to :class:`_orm.Mapped`, however is significant +as an indicator for nullability of the :class:`_schema.Column`. When ``None`` is present in the +union either as it is placed in the :class:`_orm.Mapped` construct. When +present in :class:`_orm.Mapped`, it indicates the :class:`_schema.Column` +would be nullable, in the absense of more specific indicators. This logic works +in the same way as indicating an ``Optional`` type as described at +:ref:`orm_declarative_mapped_column_nullability`. + +The CREATE TABLE statement for the above mapping will look as below: .. sourcecode:: pycon+sql @@ -421,6 +457,145 @@ The CREATE TABLE statement of the table created above is as follows: PRIMARY KEY (id) ) +While union types use a "loose" matching approach that matches on any equivalent +set of subtypes, Python typing also features a way to create "type aliases" +that are treated as distinct types that are non-equivalent to another type that +includes the same composition. Integration of these types with ``type_annotation_map`` +is described in the next section, :ref:`orm_declarative_type_map_pep695_types`. + +.. _orm_declarative_type_map_pep695_types: + +Support for Type Alias Types (defined by PEP 695) and NewType +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In contrast to the typing lookup described in +:ref:`orm_declarative_type_map_union_types`, Python typing also includes two +ways to create a composed type in a more formal way, using ``typing.NewType`` as +well as the ``type`` keyword introduced in :pep:`695`. These types behave +differently from ordinary type aliases (i.e. assigning a type to a variable +name), and this difference is honored in how SQLAlchemy resolves these +types from the type map. + +.. versionchanged:: 2.0.37 The behaviors described in this section for ``typing.NewType`` + as well as :pep:`695` ``type`` have been formalized and corrected. + Deprecation warnings are now emitted for "loose matching" patterns that have + worked in some 2.0 releases, but are to be removed in SQLAlchemy 2.1. + Please ensure SQLAlchemy is up to date before attempting to use the features + described in this section. + +The typing module allows the creation of "new types" using ``typing.NewType``:: + + from typing import NewType + + nstr30 = NewType("nstr30", str) + nstr50 = NewType("nstr50", str) + +Additionally, in Python 3.12, a new feature defined by :pep:`695` was introduced which +provides the ``type`` keyword to accomplish a similar task; using +``type`` produces an object that is similar in many ways to ``typing.NewType`` +which is internally referred to as ``typing.TypeAliasType``:: + + type SmallInt = int + type BigInt = int + type JsonScalar = str | float | bool | None + +For the purposes of how SQLAlchemy treats these type objects when used +for SQL type lookup inside of :class:`_orm.Mapped`, it's important to note +that Python does not consider two equivalent ``typing.TypeAliasType`` +or ``typing.NewType`` objects to be equal:: + + # two typing.NewType objects are not equal even if they are both str + >>> nstr50 == nstr30 + False + + # two TypeAliasType objects are not equal even if they are both int + >>> SmallInt == BigInt + False + + # an equivalent union is not equal to JsonScalar + >>> JsonScalar == str | float | bool | None + False + +This is the opposite behavior from how ordinary unions are compared, and +informs the correct behavior for SQLAlchemy's ``type_annotation_map``. When +using ``typing.NewType`` or :pep:`695` ``type`` objects, the type object is +expected to be explicit within the ``type_annotation_map`` for it to be matched +from a :class:`_orm.Mapped` type, where the same object must be stated in order +for a match to be made (excluding whether or not the type inside of +:class:`_orm.Mapped` also unions on ``None``). This is distinct from the +behavior described at :ref:`orm_declarative_type_map_union_types`, where a +plain ``Union`` that is referenced directly will match to other ``Unions`` +based on the composition, rather than the object identity, of a particular type +in ``type_annotation_map``. + +In the example below, the composed types for ``nstr30``, ``nstr50``, +``SmallInt``, ``BigInt``, and ``JsonScalar`` have no overlap with each other +and can be named distinctly within each :class:`_orm.Mapped` construct, and +are also all explicit in ``type_annotation_map``. Any of these types may +also be unioned with ``None`` or declared as ``Optional[]`` without affecting +the lookup, only deriving column nullability:: + + from typing import NewType + + from sqlalchemy import SmallInteger, BigInteger, JSON, String + from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + from sqlalchemy.schema import CreateTable + + nstr30 = NewType("nstr30", str) + nstr50 = NewType("nstr50", str) + type SmallInt = int + type BigInt = int + type JsonScalar = str | float | bool | None + + + class TABase(DeclarativeBase): + type_annotation_map = { + nstr30: String(30), + nstr50: String(50), + SmallInt: SmallInteger, + BigInteger: BigInteger, + JsonScalar: JSON, + } + + + class SomeClass(TABase): + __tablename__ = "some_table" + + id: Mapped[int] = mapped_column(primary_key=True) + normal_str: Mapped[str] + + short_str: Mapped[nstr30] + long_str_nullable: Mapped[nstr50 | None] + + small_int: Mapped[SmallInt] + big_int: Mapped[BigInteger] + scalar_col: Mapped[JsonScalar] + +a CREATE TABLE for the above mapping will illustrate the different variants +of integer and string we've configured, and looks like: + +.. sourcecode:: pycon+sql + + >>> print(CreateTable(SomeClass.__table__)) + {printsql}CREATE TABLE some_table ( + id INTEGER NOT NULL, + normal_str VARCHAR NOT NULL, + short_str VARCHAR(30) NOT NULL, + long_str_nullable VARCHAR(50), + small_int SMALLINT NOT NULL, + big_int BIGINT NOT NULL, + scalar_col JSON, + PRIMARY KEY (id) + ) + +Regarding nullability, the ``JsonScalar`` type includes ``None`` in its +definition, which indicates a nullable column. Similarly the +``long_str_nullable`` column applies a union of ``None`` to ``nstr50``, +which matches to the ``nstr50`` type in the ``type_annotation_map`` while +also applying nullability to the mapped column. The other columns all remain +NOT NULL as they are not indicated as optional. + + .. _orm_declarative_mapped_column_type_map_pep593: Mapping Multiple Type Configurations to Python Types @@ -510,95 +685,6 @@ us a wide degree of flexibility, the next section illustrates a second way in which ``Annotated`` may be used with Declarative that is even more open ended. -Support for Type Alias Types (defined by PEP 695) and NewType -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The typing module allows an user to create "new types" using ``typing.NewType``:: - - from typing import NewType - - nstr30 = NewType("nstr30", str) - nstr50 = NewType("nstr50", str) - -These are considered as different by the type checkers and by python:: - - >>> print(str == nstr30, nstr50 == nstr30, nstr30 == NewType("nstr30", str)) - False False False - -Another similar feature was added in Python 3.12 to create aliases, -using a new syntax to define ``typing.TypeAliasType``:: - - type SmallInt = int - type BigInt = int - type JsonScalar = str | float | bool | None - -Like ``typing.NewType``, these are treated by python as different, meaning that they are -not equal between each other even if they represent the same Python type. -In the example above, ``SmallInt`` and ``BigInt`` are not considered equal even -if they both are aliases of the python type ``int``:: - - >>> print(SmallInt == BigInt) - False - -SQLAlchemy supports using ``typing.NewType`` and ``typing.TypeAliasType`` -in the ``type_annotation_map``. They can be used to associate the same python type -to different :class:`_types.TypeEngine` types, similarly -to ``typing.Annotated``:: - - from sqlalchemy import SmallInteger, BigInteger, JSON, String - from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column - from sqlalchemy.schema import CreateTable - - - class TABase(DeclarativeBase): - type_annotation_map = { - nstr30: String(30), - nstr50: String(50), - SmallInt: SmallInteger, - BigInteger: BigInteger, - JsonScalar: JSON, - } - - - class SomeClass(TABase): - __tablename__ = "some_table" - - id: Mapped[int] = mapped_column(primary_key=True) - normal_str: Mapped[str] - - short_str: Mapped[nstr30] - long_str: Mapped[nstr50] - - small_int: Mapped[SmallInt] - big_int: Mapped[BigInteger] - scalar_col: Mapped[JsonScalar] - -a CREATE TABLE for the above mapping will illustrate the different variants -of integer and string we've configured, and looks like: - -.. sourcecode:: pycon+sql - - >>> print(CreateTable(SomeClass.__table__)) - {printsql}CREATE TABLE some_table ( - id INTEGER NOT NULL, - normal_str VARCHAR NOT NULL, - short_str VARCHAR(30) NOT NULL, - long_str VARCHAR(50) NOT NULL, - small_int SMALLINT NOT NULL, - big_int BIGINT NOT NULL, - scalar_col JSON, - PRIMARY KEY (id) - ) - -Since the ``JsonScalar`` type includes ``None`` the columns is nullable, while -``id`` and ``normal_str`` columns use the default mapping for their respective -Python type. - -As mentioned above, since ``typing.NewType`` and ``typing.TypeAliasType`` are -considered standalone types, they must be referenced directly inside ``Mapped`` -and must be added explicitly to the type map. -Failing to do so will raise an error since SQLAlchemy does not know what -SQL type to use. .. _orm_declarative_mapped_column_pep593: diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index b2356aef638..d0e5e05ac69 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -116,9 +116,6 @@ class _SomeDict2(TypedDict): _StrTypeAlias: TypeAlias = str -if TYPE_CHECKING: - _StrPep695: TypeAlias = str - _UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] if compat.py38: _TypingLiteral = typing.Literal["a", "b"] @@ -136,38 +133,24 @@ class _SomeDict2(TypedDict): _JsonPep604: TypeAlias = ( _JsonObjectPep604 | _JsonArrayPep604 | _JsonPrimitivePep604 ) + _JsonPep695 = TypeAliasType("_JsonPep695", _JsonPep604) -if compat.py312: - exec( - """ -type _UnionPep695 = _SomeDict1 | _SomeDict2 -type _StrPep695 = str - -type strtypalias_keyword = Annotated[str, mapped_column(info={"hi": "there"})] -type strtypalias_keyword_nested = int | Annotated[ - str, mapped_column(info={"hi": "there"})] -strtypalias_ta: typing.TypeAlias = Annotated[ - str, mapped_column(info={"hi": "there"})] -strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] - -type _Literal695 = Literal["to-do", "in-progress", "done"] -type _RecursiveLiteral695 = _Literal695 - -type _JsonPep695 = _JsonPep604 -""", - globals(), - ) - - -def make_pep695_type(name, definition): - lcls = {} - exec( - f""" -type {name} = {definition} -""", - lcls, +_StrPep695 = TypeAliasType("_StrPep695", str) +_UnionPep695 = TypeAliasType("_UnionPep695", Union[_SomeDict1, _SomeDict2]) +strtypalias_keyword = TypeAliasType( + "strtypalias_keyword", Annotated[str, mapped_column(info={"hi": "there"})] +) +if compat.py310: + strtypalias_keyword_nested = TypeAliasType( + "strtypalias_keyword_nested", + int | Annotated[str, mapped_column(info={"hi": "there"})], ) - return lcls[name] +strtypalias_ta: TypeAlias = Annotated[str, mapped_column(info={"hi": "there"})] +strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] +_Literal695 = TypeAliasType( + "_Literal695", Literal["to-do", "in-progress", "done"] +) +_RecursiveLiteral695 = TypeAliasType("_RecursiveLiteral695", _Literal695) def expect_annotation_syntax_error(name): @@ -910,9 +893,9 @@ def test_pep695_behavior(self, decl_base, in_map, option): # this seems to be illegal for typing but "works" tat = NewType("tat", Union[str, int, None]) elif option.union_695: - tat = make_pep695_type("tat", str | int) + tat = TypeAliasType("tat", str | int) elif option.union_null_695: - tat = make_pep695_type("tat", str | int | None) + tat = TypeAliasType("tat", str | int | None) else: option.fail() diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 10d91b70350..f44e5cd63b0 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -107,9 +107,6 @@ class _SomeDict2(TypedDict): _StrTypeAlias: TypeAlias = str -if TYPE_CHECKING: - _StrPep695: TypeAlias = str - _UnionPep695: TypeAlias = Union[_SomeDict1, _SomeDict2] if compat.py38: _TypingLiteral = typing.Literal["a", "b"] @@ -127,38 +124,24 @@ class _SomeDict2(TypedDict): _JsonPep604: TypeAlias = ( _JsonObjectPep604 | _JsonArrayPep604 | _JsonPrimitivePep604 ) + _JsonPep695 = TypeAliasType("_JsonPep695", _JsonPep604) -if compat.py312: - exec( - """ -type _UnionPep695 = _SomeDict1 | _SomeDict2 -type _StrPep695 = str - -type strtypalias_keyword = Annotated[str, mapped_column(info={"hi": "there"})] -type strtypalias_keyword_nested = int | Annotated[ - str, mapped_column(info={"hi": "there"})] -strtypalias_ta: typing.TypeAlias = Annotated[ - str, mapped_column(info={"hi": "there"})] -strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] - -type _Literal695 = Literal["to-do", "in-progress", "done"] -type _RecursiveLiteral695 = _Literal695 - -type _JsonPep695 = _JsonPep604 -""", - globals(), - ) - - -def make_pep695_type(name, definition): - lcls = {} - exec( - f""" -type {name} = {definition} -""", - lcls, +_StrPep695 = TypeAliasType("_StrPep695", str) +_UnionPep695 = TypeAliasType("_UnionPep695", Union[_SomeDict1, _SomeDict2]) +strtypalias_keyword = TypeAliasType( + "strtypalias_keyword", Annotated[str, mapped_column(info={"hi": "there"})] +) +if compat.py310: + strtypalias_keyword_nested = TypeAliasType( + "strtypalias_keyword_nested", + int | Annotated[str, mapped_column(info={"hi": "there"})], ) - return lcls[name] +strtypalias_ta: TypeAlias = Annotated[str, mapped_column(info={"hi": "there"})] +strtypalias_plain = Annotated[str, mapped_column(info={"hi": "there"})] +_Literal695 = TypeAliasType( + "_Literal695", Literal["to-do", "in-progress", "done"] +) +_RecursiveLiteral695 = TypeAliasType("_RecursiveLiteral695", _Literal695) def expect_annotation_syntax_error(name): @@ -901,9 +884,9 @@ def test_pep695_behavior(self, decl_base, in_map, option): # this seems to be illegal for typing but "works" tat = NewType("tat", Union[str, int, None]) elif option.union_695: - tat = make_pep695_type("tat", str | int) + tat = TypeAliasType("tat", str | int) elif option.union_null_695: - tat = make_pep695_type("tat", str | int | None) + tat = TypeAliasType("tat", str | int | None) else: option.fail() From 368d6cb8621d903f161ac87cb2b390e5554bb7be Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 2 Jan 2025 16:39:10 -0500 Subject: [PATCH 411/544] 2025 Change-Id: Ifb33b8df2f838851f329415fa70f494acb4ccde5 --- LICENSE | 2 +- doc/build/conf.py | 2 +- doc/build/copyright.rst | 2 +- lib/sqlalchemy/__init__.py | 2 +- lib/sqlalchemy/connectors/__init__.py | 2 +- lib/sqlalchemy/connectors/aioodbc.py | 2 +- lib/sqlalchemy/connectors/asyncio.py | 2 +- lib/sqlalchemy/connectors/pyodbc.py | 2 +- lib/sqlalchemy/cyextension/__init__.py | 2 +- lib/sqlalchemy/dialects/__init__.py | 2 +- lib/sqlalchemy/dialects/_typing.py | 2 +- lib/sqlalchemy/dialects/mssql/__init__.py | 2 +- lib/sqlalchemy/dialects/mssql/aioodbc.py | 2 +- lib/sqlalchemy/dialects/mssql/base.py | 2 +- lib/sqlalchemy/dialects/mssql/information_schema.py | 2 +- lib/sqlalchemy/dialects/mssql/json.py | 2 +- lib/sqlalchemy/dialects/mssql/provision.py | 2 +- lib/sqlalchemy/dialects/mssql/pymssql.py | 2 +- lib/sqlalchemy/dialects/mssql/pyodbc.py | 2 +- lib/sqlalchemy/dialects/mysql/__init__.py | 2 +- lib/sqlalchemy/dialects/mysql/aiomysql.py | 2 +- lib/sqlalchemy/dialects/mysql/asyncmy.py | 2 +- lib/sqlalchemy/dialects/mysql/base.py | 2 +- lib/sqlalchemy/dialects/mysql/cymysql.py | 2 +- lib/sqlalchemy/dialects/mysql/dml.py | 2 +- lib/sqlalchemy/dialects/mysql/enumerated.py | 2 +- lib/sqlalchemy/dialects/mysql/expression.py | 2 +- lib/sqlalchemy/dialects/mysql/json.py | 2 +- lib/sqlalchemy/dialects/mysql/mariadb.py | 2 +- lib/sqlalchemy/dialects/mysql/mariadbconnector.py | 2 +- lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 2 +- lib/sqlalchemy/dialects/mysql/mysqldb.py | 2 +- lib/sqlalchemy/dialects/mysql/provision.py | 2 +- lib/sqlalchemy/dialects/mysql/pymysql.py | 2 +- lib/sqlalchemy/dialects/mysql/pyodbc.py | 2 +- lib/sqlalchemy/dialects/mysql/reflection.py | 2 +- lib/sqlalchemy/dialects/mysql/reserved_words.py | 2 +- lib/sqlalchemy/dialects/mysql/types.py | 2 +- lib/sqlalchemy/dialects/oracle/__init__.py | 2 +- lib/sqlalchemy/dialects/oracle/base.py | 2 +- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 2 +- lib/sqlalchemy/dialects/oracle/dictionary.py | 2 +- lib/sqlalchemy/dialects/oracle/oracledb.py | 2 +- lib/sqlalchemy/dialects/oracle/provision.py | 2 +- lib/sqlalchemy/dialects/oracle/types.py | 2 +- lib/sqlalchemy/dialects/postgresql/__init__.py | 2 +- lib/sqlalchemy/dialects/postgresql/_psycopg_common.py | 2 +- lib/sqlalchemy/dialects/postgresql/array.py | 2 +- lib/sqlalchemy/dialects/postgresql/asyncpg.py | 2 +- lib/sqlalchemy/dialects/postgresql/base.py | 2 +- lib/sqlalchemy/dialects/postgresql/dml.py | 2 +- lib/sqlalchemy/dialects/postgresql/ext.py | 2 +- lib/sqlalchemy/dialects/postgresql/hstore.py | 2 +- lib/sqlalchemy/dialects/postgresql/json.py | 2 +- lib/sqlalchemy/dialects/postgresql/named_types.py | 2 +- lib/sqlalchemy/dialects/postgresql/operators.py | 2 +- lib/sqlalchemy/dialects/postgresql/pg8000.py | 2 +- lib/sqlalchemy/dialects/postgresql/pg_catalog.py | 2 +- lib/sqlalchemy/dialects/postgresql/provision.py | 2 +- lib/sqlalchemy/dialects/postgresql/psycopg.py | 2 +- lib/sqlalchemy/dialects/postgresql/psycopg2.py | 2 +- lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py | 2 +- lib/sqlalchemy/dialects/postgresql/ranges.py | 2 +- lib/sqlalchemy/dialects/postgresql/types.py | 2 +- lib/sqlalchemy/dialects/sqlite/__init__.py | 2 +- lib/sqlalchemy/dialects/sqlite/aiosqlite.py | 2 +- lib/sqlalchemy/dialects/sqlite/base.py | 2 +- lib/sqlalchemy/dialects/sqlite/dml.py | 2 +- lib/sqlalchemy/dialects/sqlite/json.py | 2 +- lib/sqlalchemy/dialects/sqlite/provision.py | 2 +- lib/sqlalchemy/dialects/sqlite/pysqlcipher.py | 2 +- lib/sqlalchemy/dialects/sqlite/pysqlite.py | 2 +- lib/sqlalchemy/engine/__init__.py | 2 +- lib/sqlalchemy/engine/_py_processors.py | 2 +- lib/sqlalchemy/engine/_py_row.py | 2 +- lib/sqlalchemy/engine/_py_util.py | 2 +- lib/sqlalchemy/engine/base.py | 2 +- lib/sqlalchemy/engine/characteristics.py | 2 +- lib/sqlalchemy/engine/create.py | 2 +- lib/sqlalchemy/engine/cursor.py | 2 +- lib/sqlalchemy/engine/default.py | 2 +- lib/sqlalchemy/engine/events.py | 2 +- lib/sqlalchemy/engine/interfaces.py | 2 +- lib/sqlalchemy/engine/mock.py | 2 +- lib/sqlalchemy/engine/processors.py | 2 +- lib/sqlalchemy/engine/reflection.py | 2 +- lib/sqlalchemy/engine/result.py | 2 +- lib/sqlalchemy/engine/row.py | 2 +- lib/sqlalchemy/engine/strategies.py | 2 +- lib/sqlalchemy/engine/url.py | 2 +- lib/sqlalchemy/engine/util.py | 2 +- lib/sqlalchemy/event/__init__.py | 2 +- lib/sqlalchemy/event/api.py | 2 +- lib/sqlalchemy/event/attr.py | 2 +- lib/sqlalchemy/event/base.py | 2 +- lib/sqlalchemy/event/legacy.py | 2 +- lib/sqlalchemy/event/registry.py | 2 +- lib/sqlalchemy/events.py | 2 +- lib/sqlalchemy/exc.py | 2 +- lib/sqlalchemy/ext/__init__.py | 2 +- lib/sqlalchemy/ext/associationproxy.py | 2 +- lib/sqlalchemy/ext/asyncio/__init__.py | 2 +- lib/sqlalchemy/ext/asyncio/base.py | 2 +- lib/sqlalchemy/ext/asyncio/engine.py | 2 +- lib/sqlalchemy/ext/asyncio/exc.py | 2 +- lib/sqlalchemy/ext/asyncio/result.py | 2 +- lib/sqlalchemy/ext/asyncio/scoping.py | 2 +- lib/sqlalchemy/ext/asyncio/session.py | 2 +- lib/sqlalchemy/ext/automap.py | 2 +- lib/sqlalchemy/ext/baked.py | 2 +- lib/sqlalchemy/ext/compiler.py | 2 +- lib/sqlalchemy/ext/declarative/__init__.py | 2 +- lib/sqlalchemy/ext/declarative/extensions.py | 2 +- lib/sqlalchemy/ext/horizontal_shard.py | 2 +- lib/sqlalchemy/ext/hybrid.py | 2 +- lib/sqlalchemy/ext/indexable.py | 2 +- lib/sqlalchemy/ext/instrumentation.py | 2 +- lib/sqlalchemy/ext/mutable.py | 2 +- lib/sqlalchemy/ext/mypy/__init__.py | 2 +- lib/sqlalchemy/ext/mypy/apply.py | 2 +- lib/sqlalchemy/ext/mypy/decl_class.py | 2 +- lib/sqlalchemy/ext/mypy/infer.py | 2 +- lib/sqlalchemy/ext/mypy/names.py | 2 +- lib/sqlalchemy/ext/mypy/plugin.py | 2 +- lib/sqlalchemy/ext/mypy/util.py | 2 +- lib/sqlalchemy/ext/orderinglist.py | 2 +- lib/sqlalchemy/ext/serializer.py | 2 +- lib/sqlalchemy/future/__init__.py | 2 +- lib/sqlalchemy/future/engine.py | 2 +- lib/sqlalchemy/inspection.py | 2 +- lib/sqlalchemy/log.py | 2 +- lib/sqlalchemy/orm/__init__.py | 2 +- lib/sqlalchemy/orm/_orm_constructors.py | 2 +- lib/sqlalchemy/orm/_typing.py | 2 +- lib/sqlalchemy/orm/attributes.py | 2 +- lib/sqlalchemy/orm/base.py | 2 +- lib/sqlalchemy/orm/bulk_persistence.py | 2 +- lib/sqlalchemy/orm/clsregistry.py | 2 +- lib/sqlalchemy/orm/collections.py | 2 +- lib/sqlalchemy/orm/context.py | 2 +- lib/sqlalchemy/orm/decl_api.py | 2 +- lib/sqlalchemy/orm/decl_base.py | 2 +- lib/sqlalchemy/orm/dependency.py | 2 +- lib/sqlalchemy/orm/descriptor_props.py | 2 +- lib/sqlalchemy/orm/dynamic.py | 2 +- lib/sqlalchemy/orm/evaluator.py | 2 +- lib/sqlalchemy/orm/events.py | 2 +- lib/sqlalchemy/orm/exc.py | 2 +- lib/sqlalchemy/orm/identity.py | 2 +- lib/sqlalchemy/orm/instrumentation.py | 2 +- lib/sqlalchemy/orm/interfaces.py | 2 +- lib/sqlalchemy/orm/loading.py | 2 +- lib/sqlalchemy/orm/mapped_collection.py | 2 +- lib/sqlalchemy/orm/mapper.py | 2 +- lib/sqlalchemy/orm/path_registry.py | 2 +- lib/sqlalchemy/orm/persistence.py | 2 +- lib/sqlalchemy/orm/properties.py | 2 +- lib/sqlalchemy/orm/query.py | 2 +- lib/sqlalchemy/orm/relationships.py | 2 +- lib/sqlalchemy/orm/scoping.py | 2 +- lib/sqlalchemy/orm/session.py | 2 +- lib/sqlalchemy/orm/state.py | 2 +- lib/sqlalchemy/orm/state_changes.py | 2 +- lib/sqlalchemy/orm/strategies.py | 2 +- lib/sqlalchemy/orm/strategy_options.py | 2 +- lib/sqlalchemy/orm/sync.py | 2 +- lib/sqlalchemy/orm/unitofwork.py | 2 +- lib/sqlalchemy/orm/util.py | 2 +- lib/sqlalchemy/orm/writeonly.py | 2 +- lib/sqlalchemy/pool/__init__.py | 2 +- lib/sqlalchemy/pool/base.py | 2 +- lib/sqlalchemy/pool/events.py | 2 +- lib/sqlalchemy/pool/impl.py | 2 +- lib/sqlalchemy/schema.py | 2 +- lib/sqlalchemy/sql/__init__.py | 2 +- lib/sqlalchemy/sql/_dml_constructors.py | 2 +- lib/sqlalchemy/sql/_elements_constructors.py | 2 +- lib/sqlalchemy/sql/_orm_types.py | 2 +- lib/sqlalchemy/sql/_py_util.py | 2 +- lib/sqlalchemy/sql/_selectable_constructors.py | 2 +- lib/sqlalchemy/sql/_typing.py | 2 +- lib/sqlalchemy/sql/annotation.py | 2 +- lib/sqlalchemy/sql/base.py | 2 +- lib/sqlalchemy/sql/cache_key.py | 2 +- lib/sqlalchemy/sql/coercions.py | 2 +- lib/sqlalchemy/sql/compiler.py | 2 +- lib/sqlalchemy/sql/crud.py | 2 +- lib/sqlalchemy/sql/ddl.py | 2 +- lib/sqlalchemy/sql/default_comparator.py | 2 +- lib/sqlalchemy/sql/dml.py | 2 +- lib/sqlalchemy/sql/elements.py | 2 +- lib/sqlalchemy/sql/events.py | 2 +- lib/sqlalchemy/sql/expression.py | 2 +- lib/sqlalchemy/sql/functions.py | 2 +- lib/sqlalchemy/sql/lambdas.py | 2 +- lib/sqlalchemy/sql/naming.py | 2 +- lib/sqlalchemy/sql/operators.py | 2 +- lib/sqlalchemy/sql/roles.py | 2 +- lib/sqlalchemy/sql/schema.py | 2 +- lib/sqlalchemy/sql/selectable.py | 2 +- lib/sqlalchemy/sql/sqltypes.py | 2 +- lib/sqlalchemy/sql/traversals.py | 2 +- lib/sqlalchemy/sql/type_api.py | 2 +- lib/sqlalchemy/sql/util.py | 2 +- lib/sqlalchemy/sql/visitors.py | 2 +- lib/sqlalchemy/testing/__init__.py | 2 +- lib/sqlalchemy/testing/assertions.py | 2 +- lib/sqlalchemy/testing/assertsql.py | 2 +- lib/sqlalchemy/testing/asyncio.py | 2 +- lib/sqlalchemy/testing/config.py | 2 +- lib/sqlalchemy/testing/engines.py | 2 +- lib/sqlalchemy/testing/entities.py | 2 +- lib/sqlalchemy/testing/exclusions.py | 2 +- lib/sqlalchemy/testing/fixtures/__init__.py | 2 +- lib/sqlalchemy/testing/fixtures/base.py | 2 +- lib/sqlalchemy/testing/fixtures/mypy.py | 2 +- lib/sqlalchemy/testing/fixtures/orm.py | 2 +- lib/sqlalchemy/testing/fixtures/sql.py | 2 +- lib/sqlalchemy/testing/pickleable.py | 2 +- lib/sqlalchemy/testing/plugin/__init__.py | 2 +- lib/sqlalchemy/testing/plugin/bootstrap.py | 2 +- lib/sqlalchemy/testing/plugin/plugin_base.py | 2 +- lib/sqlalchemy/testing/plugin/pytestplugin.py | 2 +- lib/sqlalchemy/testing/profiling.py | 2 +- lib/sqlalchemy/testing/provision.py | 2 +- lib/sqlalchemy/testing/requirements.py | 2 +- lib/sqlalchemy/testing/schema.py | 2 +- lib/sqlalchemy/testing/suite/__init__.py | 2 +- lib/sqlalchemy/testing/suite/test_cte.py | 2 +- lib/sqlalchemy/testing/suite/test_ddl.py | 2 +- lib/sqlalchemy/testing/suite/test_deprecations.py | 2 +- lib/sqlalchemy/testing/suite/test_dialect.py | 2 +- lib/sqlalchemy/testing/suite/test_insert.py | 2 +- lib/sqlalchemy/testing/suite/test_reflection.py | 2 +- lib/sqlalchemy/testing/suite/test_results.py | 2 +- lib/sqlalchemy/testing/suite/test_rowcount.py | 2 +- lib/sqlalchemy/testing/suite/test_select.py | 2 +- lib/sqlalchemy/testing/suite/test_sequence.py | 2 +- lib/sqlalchemy/testing/suite/test_types.py | 2 +- lib/sqlalchemy/testing/suite/test_unicode_ddl.py | 2 +- lib/sqlalchemy/testing/suite/test_update_delete.py | 2 +- lib/sqlalchemy/testing/util.py | 2 +- lib/sqlalchemy/testing/warnings.py | 2 +- lib/sqlalchemy/types.py | 2 +- lib/sqlalchemy/util/__init__.py | 2 +- lib/sqlalchemy/util/_collections.py | 2 +- lib/sqlalchemy/util/_concurrency_py3k.py | 2 +- lib/sqlalchemy/util/_has_cy.py | 2 +- lib/sqlalchemy/util/_py_collections.py | 2 +- lib/sqlalchemy/util/compat.py | 2 +- lib/sqlalchemy/util/concurrency.py | 2 +- lib/sqlalchemy/util/deprecations.py | 2 +- lib/sqlalchemy/util/langhelpers.py | 2 +- lib/sqlalchemy/util/preloaded.py | 2 +- lib/sqlalchemy/util/queue.py | 2 +- lib/sqlalchemy/util/tool_support.py | 2 +- lib/sqlalchemy/util/topological.py | 2 +- lib/sqlalchemy/util/typing.py | 2 +- 258 files changed, 258 insertions(+), 258 deletions(-) diff --git a/LICENSE b/LICENSE index 967cdc5dc10..dfe1a4d815b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2005-2024 SQLAlchemy authors and contributors . +Copyright 2005-2025 SQLAlchemy authors and contributors . Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/doc/build/conf.py b/doc/build/conf.py index ea0585dc49e..9b6bcb14920 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -235,7 +235,7 @@ # General information about the project. project = "SQLAlchemy" -copyright = "2007-2024, the SQLAlchemy authors and contributors" # noqa +copyright = "2007-2025, the SQLAlchemy authors and contributors" # noqa # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/doc/build/copyright.rst b/doc/build/copyright.rst index b3a67ccf469..54535474c42 100644 --- a/doc/build/copyright.rst +++ b/doc/build/copyright.rst @@ -6,7 +6,7 @@ Appendix: Copyright This is the MIT license: ``_ -Copyright (c) 2005-2024 Michael Bayer and contributors. +Copyright (c) 2005-2025 Michael Bayer and contributors. SQLAlchemy is a trademark of Michael Bayer. Permission is hereby granted, free of charge, to any person obtaining a copy of this diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 6360f47a5b9..c74afd9012f 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -1,5 +1,5 @@ # __init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/connectors/__init__.py b/lib/sqlalchemy/connectors/__init__.py index f1cae0b3ceb..43cd1035c62 100644 --- a/lib/sqlalchemy/connectors/__init__.py +++ b/lib/sqlalchemy/connectors/__init__.py @@ -1,5 +1,5 @@ # connectors/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/connectors/aioodbc.py b/lib/sqlalchemy/connectors/aioodbc.py index 3b5c3b4978e..39b2a8a2382 100644 --- a/lib/sqlalchemy/connectors/aioodbc.py +++ b/lib/sqlalchemy/connectors/aioodbc.py @@ -1,5 +1,5 @@ # connectors/aioodbc.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/connectors/asyncio.py b/lib/sqlalchemy/connectors/asyncio.py index 9b19bef78f6..c4f0d715413 100644 --- a/lib/sqlalchemy/connectors/asyncio.py +++ b/lib/sqlalchemy/connectors/asyncio.py @@ -1,5 +1,5 @@ # connectors/asyncio.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py index f204d80a8e9..92b7c3a4138 100644 --- a/lib/sqlalchemy/connectors/pyodbc.py +++ b/lib/sqlalchemy/connectors/pyodbc.py @@ -1,5 +1,5 @@ # connectors/pyodbc.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/cyextension/__init__.py b/lib/sqlalchemy/cyextension/__init__.py index 88a4d903967..cb8dc2c6ec3 100644 --- a/lib/sqlalchemy/cyextension/__init__.py +++ b/lib/sqlalchemy/cyextension/__init__.py @@ -1,5 +1,5 @@ # cyextension/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py index 7d5cc1c9c2f..31ce6d64b52 100644 --- a/lib/sqlalchemy/dialects/__init__.py +++ b/lib/sqlalchemy/dialects/__init__.py @@ -1,5 +1,5 @@ # dialects/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/_typing.py b/lib/sqlalchemy/dialects/_typing.py index 8e04f3b3764..4dd40d7220f 100644 --- a/lib/sqlalchemy/dialects/_typing.py +++ b/lib/sqlalchemy/dialects/_typing.py @@ -1,5 +1,5 @@ # dialects/_typing.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py index 19ab7c422c9..20140fdddb3 100644 --- a/lib/sqlalchemy/dialects/mssql/__init__.py +++ b/lib/sqlalchemy/dialects/mssql/__init__.py @@ -1,5 +1,5 @@ # dialects/mssql/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/aioodbc.py b/lib/sqlalchemy/dialects/mssql/aioodbc.py index 518d7ce0669..522ad1d6b0d 100644 --- a/lib/sqlalchemy/dialects/mssql/aioodbc.py +++ b/lib/sqlalchemy/dialects/mssql/aioodbc.py @@ -1,5 +1,5 @@ # dialects/mssql/aioodbc.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index e304073535b..916809e7684 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1,5 +1,5 @@ # dialects/mssql/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py index 0c5f2372de8..b60bb158b46 100644 --- a/lib/sqlalchemy/dialects/mssql/information_schema.py +++ b/lib/sqlalchemy/dialects/mssql/information_schema.py @@ -1,5 +1,5 @@ # dialects/mssql/information_schema.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/json.py b/lib/sqlalchemy/dialects/mssql/json.py index 305aef77d10..a2d3ce81469 100644 --- a/lib/sqlalchemy/dialects/mssql/json.py +++ b/lib/sqlalchemy/dialects/mssql/json.py @@ -1,5 +1,5 @@ # dialects/mssql/json.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/provision.py b/lib/sqlalchemy/dialects/mssql/provision.py index 1c684b1dfef..10165856e1a 100644 --- a/lib/sqlalchemy/dialects/mssql/provision.py +++ b/lib/sqlalchemy/dialects/mssql/provision.py @@ -1,5 +1,5 @@ # dialects/mssql/provision.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index c4207987bcd..301a98eb417 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -1,5 +1,5 @@ # dialects/mssql/pymssql.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index 421472c2552..cbf0adbfe08 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -1,5 +1,5 @@ # dialects/mssql/pyodbc.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py index 05f41cf3512..9174c54413a 100644 --- a/lib/sqlalchemy/dialects/mysql/__init__.py +++ b/lib/sqlalchemy/dialects/mysql/__init__.py @@ -1,5 +1,5 @@ # dialects/mysql/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/aiomysql.py b/lib/sqlalchemy/dialects/mysql/aiomysql.py index 3fdaabca6a2..bd5e7de6b4f 100644 --- a/lib/sqlalchemy/dialects/mysql/aiomysql.py +++ b/lib/sqlalchemy/dialects/mysql/aiomysql.py @@ -1,5 +1,5 @@ # dialects/mysql/aiomysql.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/asyncmy.py b/lib/sqlalchemy/dialects/mysql/asyncmy.py index 72381697fa0..9ec54e694da 100644 --- a/lib/sqlalchemy/dialects/mysql/asyncmy.py +++ b/lib/sqlalchemy/dialects/mysql/asyncmy.py @@ -1,5 +1,5 @@ # dialects/mysql/asyncmy.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index ae863f30a64..db887269c9a 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1,5 +1,5 @@ # dialects/mysql/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py index f199aa4e895..5c00ada9f94 100644 --- a/lib/sqlalchemy/dialects/mysql/cymysql.py +++ b/lib/sqlalchemy/dialects/mysql/cymysql.py @@ -1,5 +1,5 @@ # dialects/mysql/cymysql.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/dml.py b/lib/sqlalchemy/dialects/mysql/dml.py index 731d1943aa8..cceb0818f9b 100644 --- a/lib/sqlalchemy/dialects/mysql/dml.py +++ b/lib/sqlalchemy/dialects/mysql/dml.py @@ -1,5 +1,5 @@ # dialects/mysql/dml.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/enumerated.py b/lib/sqlalchemy/dialects/mysql/enumerated.py index d3c10c0021b..6745cae55e7 100644 --- a/lib/sqlalchemy/dialects/mysql/enumerated.py +++ b/lib/sqlalchemy/dialects/mysql/enumerated.py @@ -1,5 +1,5 @@ # dialects/mysql/enumerated.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/expression.py b/lib/sqlalchemy/dialects/mysql/expression.py index 8c21c748c96..b60a0888517 100644 --- a/lib/sqlalchemy/dialects/mysql/expression.py +++ b/lib/sqlalchemy/dialects/mysql/expression.py @@ -1,5 +1,5 @@ # dialects/mysql/expression.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/json.py b/lib/sqlalchemy/dialects/mysql/json.py index ebe4a34d212..8912af36631 100644 --- a/lib/sqlalchemy/dialects/mysql/json.py +++ b/lib/sqlalchemy/dialects/mysql/json.py @@ -1,5 +1,5 @@ # dialects/mysql/json.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/mariadb.py b/lib/sqlalchemy/dialects/mysql/mariadb.py index be7aebeaeb4..ac2cfbd1b00 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadb.py +++ b/lib/sqlalchemy/dialects/mysql/mariadb.py @@ -1,5 +1,5 @@ # dialects/mysql/mariadb.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py index 1730c1a6f29..2d2ad199710 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py @@ -1,5 +1,5 @@ # dialects/mysql/mariadbconnector.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index 8f4b4174184..e88f8fd71a6 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -1,5 +1,5 @@ # dialects/mysql/mysqlconnector.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index 6e7ccaa1525..3cf56c1fd09 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -1,5 +1,5 @@ # dialects/mysql/mysqldb.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/provision.py b/lib/sqlalchemy/dialects/mysql/provision.py index 836ffa1df43..7807af40975 100644 --- a/lib/sqlalchemy/dialects/mysql/provision.py +++ b/lib/sqlalchemy/dialects/mysql/provision.py @@ -1,5 +1,5 @@ # dialects/mysql/provision.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index ff62e4f0282..67cb4cdd766 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -1,5 +1,5 @@ # dialects/mysql/pymysql.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py index 9ad360bd995..6d44bd38370 100644 --- a/lib/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py @@ -1,5 +1,5 @@ # dialects/mysql/pyodbc.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py index d7622c5463d..3998be977d9 100644 --- a/lib/sqlalchemy/dialects/mysql/reflection.py +++ b/lib/sqlalchemy/dialects/mysql/reflection.py @@ -1,5 +1,5 @@ # dialects/mysql/reflection.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/reserved_words.py b/lib/sqlalchemy/dialects/mysql/reserved_words.py index 04764c17e77..34fecf42724 100644 --- a/lib/sqlalchemy/dialects/mysql/reserved_words.py +++ b/lib/sqlalchemy/dialects/mysql/reserved_words.py @@ -1,5 +1,5 @@ # dialects/mysql/reserved_words.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py index 734f6ae3723..0c05aacb7cd 100644 --- a/lib/sqlalchemy/dialects/mysql/types.py +++ b/lib/sqlalchemy/dialects/mysql/types.py @@ -1,5 +1,5 @@ # dialects/mysql/types.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py index d855122ee0c..7ceb743d616 100644 --- a/lib/sqlalchemy/dialects/oracle/__init__.py +++ b/lib/sqlalchemy/dialects/oracle/__init__.py @@ -1,5 +1,5 @@ # dialects/oracle/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 358dd9d8a6d..019b76bc0af 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -1,5 +1,5 @@ # dialects/oracle/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 6a2588883b6..0514ebbcd41 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -1,5 +1,5 @@ # dialects/oracle/cx_oracle.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/dictionary.py b/lib/sqlalchemy/dialects/oracle/dictionary.py index 63479b9fcc6..f785a66ef71 100644 --- a/lib/sqlalchemy/dialects/oracle/dictionary.py +++ b/lib/sqlalchemy/dialects/oracle/dictionary.py @@ -1,5 +1,5 @@ # dialects/oracle/dictionary.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/oracledb.py b/lib/sqlalchemy/dialects/oracle/oracledb.py index b8dd7a2f682..c09d2bae0df 100644 --- a/lib/sqlalchemy/dialects/oracle/oracledb.py +++ b/lib/sqlalchemy/dialects/oracle/oracledb.py @@ -1,5 +1,5 @@ # dialects/oracle/oracledb.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/provision.py b/lib/sqlalchemy/dialects/oracle/provision.py index 0eb6273a8c6..3587de9d011 100644 --- a/lib/sqlalchemy/dialects/oracle/provision.py +++ b/lib/sqlalchemy/dialects/oracle/provision.py @@ -1,5 +1,5 @@ # dialects/oracle/provision.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/oracle/types.py b/lib/sqlalchemy/dialects/oracle/types.py index 9318b45619a..06aeaace2f5 100644 --- a/lib/sqlalchemy/dialects/oracle/types.py +++ b/lib/sqlalchemy/dialects/oracle/types.py @@ -1,5 +1,5 @@ # dialects/oracle/types.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index 325ea886990..88935e20245 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -1,5 +1,5 @@ # dialects/postgresql/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py b/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py index 46858c9f22c..d827e054ccf 100644 --- a/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py +++ b/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py @@ -1,5 +1,5 @@ # dialects/postgresql/_psycopg_common.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index fcb98e65183..7708769cb53 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -1,5 +1,5 @@ # dialects/postgresql/array.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index a175e77d657..1761c8de53c 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -1,5 +1,5 @@ # dialects/postgresql/asyncpg.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 44b8c96a815..9bfaa277c8c 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1,5 +1,5 @@ # dialects/postgresql/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/dml.py b/lib/sqlalchemy/dialects/postgresql/dml.py index 1615506c0b2..1187b6bf5f0 100644 --- a/lib/sqlalchemy/dialects/postgresql/dml.py +++ b/lib/sqlalchemy/dialects/postgresql/dml.py @@ -1,5 +1,5 @@ # dialects/postgresql/dml.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py index a760773e247..94466ae0a13 100644 --- a/lib/sqlalchemy/dialects/postgresql/ext.py +++ b/lib/sqlalchemy/dialects/postgresql/ext.py @@ -1,5 +1,5 @@ # dialects/postgresql/ext.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index 291af36c69b..0a915b17dff 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -1,5 +1,5 @@ # dialects/postgresql/hstore.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 4e7c15ffe92..2f26b39e31e 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -1,5 +1,5 @@ # dialects/postgresql/json.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/named_types.py b/lib/sqlalchemy/dialects/postgresql/named_types.py index 320de440f86..e1b8e84ce85 100644 --- a/lib/sqlalchemy/dialects/postgresql/named_types.py +++ b/lib/sqlalchemy/dialects/postgresql/named_types.py @@ -1,5 +1,5 @@ # dialects/postgresql/named_types.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/operators.py b/lib/sqlalchemy/dialects/postgresql/operators.py index 53e175f9c54..ebcafcba991 100644 --- a/lib/sqlalchemy/dialects/postgresql/operators.py +++ b/lib/sqlalchemy/dialects/postgresql/operators.py @@ -1,5 +1,5 @@ # dialects/postgresql/operators.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index aa878c353e0..bf113230e07 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -1,5 +1,5 @@ # dialects/postgresql/pg8000.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py index 9b5562c13fc..78f390a2118 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py +++ b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py @@ -1,5 +1,5 @@ # dialects/postgresql/pg_catalog.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/provision.py b/lib/sqlalchemy/dialects/postgresql/provision.py index 38573c77ad6..c76f5f51849 100644 --- a/lib/sqlalchemy/dialects/postgresql/provision.py +++ b/lib/sqlalchemy/dialects/postgresql/provision.py @@ -1,5 +1,5 @@ # dialects/postgresql/provision.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg.py b/lib/sqlalchemy/dialects/postgresql/psycopg.py index b880bc7f41b..0554048c2bf 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg.py @@ -1,5 +1,5 @@ # dialects/postgresql/psycopg.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index d7efc2eb974..eeb7604f796 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -1,5 +1,5 @@ # dialects/postgresql/psycopg2.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py index 3cc3b69fb34..55e17607044 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py @@ -1,5 +1,5 @@ # dialects/postgresql/psycopg2cffi.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index fa0c0c5df81..93253570c1b 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -1,5 +1,5 @@ # dialects/postgresql/ranges.py -# Copyright (C) 2013-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2013-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/postgresql/types.py b/lib/sqlalchemy/dialects/postgresql/types.py index 73f9d372ab2..6fe4f576ebd 100644 --- a/lib/sqlalchemy/dialects/postgresql/types.py +++ b/lib/sqlalchemy/dialects/postgresql/types.py @@ -1,5 +1,5 @@ # dialects/postgresql/types.py -# Copyright (C) 2013-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2013-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py index 45f088e2147..7b381fa6f52 100644 --- a/lib/sqlalchemy/dialects/sqlite/__init__.py +++ b/lib/sqlalchemy/dialects/sqlite/__init__.py @@ -1,5 +1,5 @@ # dialects/sqlite/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py index 43ab2f0beef..c777bf445b0 100644 --- a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py @@ -1,5 +1,5 @@ # dialects/sqlite/aiosqlite.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 51b957cf9ac..e0c0f6e8098 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1,5 +1,5 @@ # dialects/sqlite/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/dml.py b/lib/sqlalchemy/dialects/sqlite/dml.py index 163a6ed28b2..84cdb8bec23 100644 --- a/lib/sqlalchemy/dialects/sqlite/dml.py +++ b/lib/sqlalchemy/dialects/sqlite/dml.py @@ -1,5 +1,5 @@ # dialects/sqlite/dml.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/json.py b/lib/sqlalchemy/dialects/sqlite/json.py index ec2980297e2..02f4ea4c90f 100644 --- a/lib/sqlalchemy/dialects/sqlite/json.py +++ b/lib/sqlalchemy/dialects/sqlite/json.py @@ -1,5 +1,5 @@ # dialects/sqlite/json.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/provision.py b/lib/sqlalchemy/dialects/sqlite/provision.py index f18568b0b33..97f882e7f28 100644 --- a/lib/sqlalchemy/dialects/sqlite/provision.py +++ b/lib/sqlalchemy/dialects/sqlite/provision.py @@ -1,5 +1,5 @@ # dialects/sqlite/provision.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py index 58471ac90ec..7a3dc1bae13 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -1,5 +1,5 @@ # dialects/sqlite/pysqlcipher.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index 25e6da0f521..9dafda6d9df 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -1,5 +1,5 @@ # dialects/sqlite/pysqlite.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index af0f7ee8bec..f4205d89260 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -1,5 +1,5 @@ # engine/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/_py_processors.py b/lib/sqlalchemy/engine/_py_processors.py index 2cc35b501eb..8536d53d779 100644 --- a/lib/sqlalchemy/engine/_py_processors.py +++ b/lib/sqlalchemy/engine/_py_processors.py @@ -1,5 +1,5 @@ # engine/_py_processors.py -# Copyright (C) 2010-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2025 the SQLAlchemy authors and contributors # # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com # diff --git a/lib/sqlalchemy/engine/_py_row.py b/lib/sqlalchemy/engine/_py_row.py index 4e1dd7d430d..38c60fcd276 100644 --- a/lib/sqlalchemy/engine/_py_row.py +++ b/lib/sqlalchemy/engine/_py_row.py @@ -1,5 +1,5 @@ # engine/_py_row.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/_py_util.py b/lib/sqlalchemy/engine/_py_util.py index 2be4322abbc..50badea2a94 100644 --- a/lib/sqlalchemy/engine/_py_util.py +++ b/lib/sqlalchemy/engine/_py_util.py @@ -1,5 +1,5 @@ # engine/_py_util.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index f9853ee4892..cbf11acf5ac 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -1,5 +1,5 @@ # engine/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/characteristics.py b/lib/sqlalchemy/engine/characteristics.py index 97b17fbdfb6..322c28b5aa7 100644 --- a/lib/sqlalchemy/engine/characteristics.py +++ b/lib/sqlalchemy/engine/characteristics.py @@ -1,5 +1,5 @@ # engine/characteristics.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py index 85e0b12474e..920f620bd48 100644 --- a/lib/sqlalchemy/engine/create.py +++ b/lib/sqlalchemy/engine/create.py @@ -1,5 +1,5 @@ # engine/create.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index 6eb64da839e..ff14ad8eed4 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -1,5 +1,5 @@ # engine/cursor.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index dc4c1a61263..dd4250ffc40 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -1,5 +1,5 @@ # engine/default.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/events.py b/lib/sqlalchemy/engine/events.py index e1e9b7d11a8..b759382cb27 100644 --- a/lib/sqlalchemy/engine/events.py +++ b/lib/sqlalchemy/engine/events.py @@ -1,5 +1,5 @@ # engine/events.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index a14c0708031..9fb39db78bd 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -1,5 +1,5 @@ # engine/interfaces.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/mock.py b/lib/sqlalchemy/engine/mock.py index fc59521cd26..08dba5a6456 100644 --- a/lib/sqlalchemy/engine/mock.py +++ b/lib/sqlalchemy/engine/mock.py @@ -1,5 +1,5 @@ # engine/mock.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/processors.py b/lib/sqlalchemy/engine/processors.py index 610e03d5a1c..b3f9330842d 100644 --- a/lib/sqlalchemy/engine/processors.py +++ b/lib/sqlalchemy/engine/processors.py @@ -1,5 +1,5 @@ # engine/processors.py -# Copyright (C) 2010-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2025 the SQLAlchemy authors and contributors # # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com # diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 1e03b9f2fec..5d754c6703d 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -1,5 +1,5 @@ # engine/reflection.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 60a8b6446f9..7411fd74f6a 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1,5 +1,5 @@ # engine/result.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py index de544712b74..da7ae9af277 100644 --- a/lib/sqlalchemy/engine/row.py +++ b/lib/sqlalchemy/engine/row.py @@ -1,5 +1,5 @@ # engine/row.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index 30c331e8d44..5dd7bca9a49 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -1,5 +1,5 @@ # engine/strategies.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index f87e3671cc4..bb004f11cd4 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -1,5 +1,5 @@ # engine/url.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py index 186ca4c3201..e499efa91aa 100644 --- a/lib/sqlalchemy/engine/util.py +++ b/lib/sqlalchemy/engine/util.py @@ -1,5 +1,5 @@ # engine/util.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/__init__.py b/lib/sqlalchemy/event/__init__.py index 9b54f07fc42..309b7bd33fb 100644 --- a/lib/sqlalchemy/event/__init__.py +++ b/lib/sqlalchemy/event/__init__.py @@ -1,5 +1,5 @@ # event/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py index f528d74f69f..b6ec8f6d32b 100644 --- a/lib/sqlalchemy/event/api.py +++ b/lib/sqlalchemy/event/api.py @@ -1,5 +1,5 @@ # event/api.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py index ef2b334d1b0..ec5d5822f1c 100644 --- a/lib/sqlalchemy/event/attr.py +++ b/lib/sqlalchemy/event/attr.py @@ -1,5 +1,5 @@ # event/attr.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py index cddfc982a6c..a73e86bd2a2 100644 --- a/lib/sqlalchemy/event/base.py +++ b/lib/sqlalchemy/event/base.py @@ -1,5 +1,5 @@ # event/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/legacy.py b/lib/sqlalchemy/event/legacy.py index 57e561c390d..e60fd9a5e17 100644 --- a/lib/sqlalchemy/event/legacy.py +++ b/lib/sqlalchemy/event/legacy.py @@ -1,5 +1,5 @@ # event/legacy.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py index 773620f8bbc..77fea0006f4 100644 --- a/lib/sqlalchemy/event/registry.py +++ b/lib/sqlalchemy/event/registry.py @@ -1,5 +1,5 @@ # event/registry.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 8c3bf01cf6a..ce832439516 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -1,5 +1,5 @@ # events.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index ced87df4b2d..71e5dd81e0b 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -1,5 +1,5 @@ # exc.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py index f03ed945f35..2751bcf938a 100644 --- a/lib/sqlalchemy/ext/__init__.py +++ b/lib/sqlalchemy/ext/__init__.py @@ -1,5 +1,5 @@ # ext/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index a68c298542e..8f2c19b8764 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -1,5 +1,5 @@ # ext/associationproxy.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/__init__.py b/lib/sqlalchemy/ext/asyncio/__init__.py index 78c707b26d8..7d8a04bd789 100644 --- a/lib/sqlalchemy/ext/asyncio/__init__.py +++ b/lib/sqlalchemy/ext/asyncio/__init__.py @@ -1,5 +1,5 @@ # ext/asyncio/__init__.py -# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/base.py b/lib/sqlalchemy/ext/asyncio/base.py index e534424c0f4..b53d53b1a4e 100644 --- a/lib/sqlalchemy/ext/asyncio/base.py +++ b/lib/sqlalchemy/ext/asyncio/base.py @@ -1,5 +1,5 @@ # ext/asyncio/base.py -# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 4f476483827..2c9b499f534 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -1,5 +1,5 @@ # ext/asyncio/engine.py -# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/exc.py b/lib/sqlalchemy/ext/asyncio/exc.py index 1cf6f363860..558187c0b41 100644 --- a/lib/sqlalchemy/ext/asyncio/exc.py +++ b/lib/sqlalchemy/ext/asyncio/exc.py @@ -1,5 +1,5 @@ # ext/asyncio/exc.py -# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/result.py b/lib/sqlalchemy/ext/asyncio/result.py index 745a51b81b7..c51e166d916 100644 --- a/lib/sqlalchemy/ext/asyncio/result.py +++ b/lib/sqlalchemy/ext/asyncio/result.py @@ -1,5 +1,5 @@ # ext/asyncio/result.py -# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/scoping.py b/lib/sqlalchemy/ext/asyncio/scoping.py index 35e68e522f2..7ecab37b400 100644 --- a/lib/sqlalchemy/ext/asyncio/scoping.py +++ b/lib/sqlalchemy/ext/asyncio/scoping.py @@ -1,5 +1,5 @@ # ext/asyncio/scoping.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index 550c977ab9b..bb276943cdb 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -1,5 +1,5 @@ # ext/asyncio/session.py -# Copyright (C) 2020-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2020-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index c9bc8788331..817f91d267b 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -1,5 +1,5 @@ # ext/automap.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py index c9dd63a87f8..cd3e087931e 100644 --- a/lib/sqlalchemy/ext/baked.py +++ b/lib/sqlalchemy/ext/baked.py @@ -1,5 +1,5 @@ # ext/baked.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index 199329d5b45..cc64477ed47 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -1,5 +1,5 @@ # ext/compiler.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py index 37da40377b6..0383f9d34f8 100644 --- a/lib/sqlalchemy/ext/declarative/__init__.py +++ b/lib/sqlalchemy/ext/declarative/__init__.py @@ -1,5 +1,5 @@ # ext/declarative/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/declarative/extensions.py b/lib/sqlalchemy/ext/declarative/extensions.py index 4be4262d0df..3dc6bf698c4 100644 --- a/lib/sqlalchemy/ext/declarative/extensions.py +++ b/lib/sqlalchemy/ext/declarative/extensions.py @@ -1,5 +1,5 @@ # ext/declarative/extensions.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index 56242c478f8..3ea3304eb30 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -1,5 +1,5 @@ # ext/horizontal_shard.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 471256c32f8..c1c46e7c5f5 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -1,5 +1,5 @@ # ext/hybrid.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/indexable.py b/lib/sqlalchemy/ext/indexable.py index e79f613f274..886069ce000 100644 --- a/lib/sqlalchemy/ext/indexable.py +++ b/lib/sqlalchemy/ext/indexable.py @@ -1,5 +1,5 @@ # ext/indexable.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py index 5f3c71282b7..8bb01985ecc 100644 --- a/lib/sqlalchemy/ext/instrumentation.py +++ b/lib/sqlalchemy/ext/instrumentation.py @@ -1,5 +1,5 @@ # ext/instrumentation.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index 941515e87b5..ed618c3819d 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -1,5 +1,5 @@ # ext/mutable.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/__init__.py b/lib/sqlalchemy/ext/mypy/__init__.py index de2c02ee9f1..b5827cb8d36 100644 --- a/lib/sqlalchemy/ext/mypy/__init__.py +++ b/lib/sqlalchemy/ext/mypy/__init__.py @@ -1,5 +1,5 @@ # ext/mypy/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/apply.py b/lib/sqlalchemy/ext/mypy/apply.py index 84eb9772491..02908cc14b4 100644 --- a/lib/sqlalchemy/ext/mypy/apply.py +++ b/lib/sqlalchemy/ext/mypy/apply.py @@ -1,5 +1,5 @@ # ext/mypy/apply.py -# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/decl_class.py b/lib/sqlalchemy/ext/mypy/decl_class.py index 3d578b346e9..2ce7ad56ccc 100644 --- a/lib/sqlalchemy/ext/mypy/decl_class.py +++ b/lib/sqlalchemy/ext/mypy/decl_class.py @@ -1,5 +1,5 @@ # ext/mypy/decl_class.py -# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/infer.py b/lib/sqlalchemy/ext/mypy/infer.py index 8826672f72e..26a83cca836 100644 --- a/lib/sqlalchemy/ext/mypy/infer.py +++ b/lib/sqlalchemy/ext/mypy/infer.py @@ -1,5 +1,5 @@ # ext/mypy/infer.py -# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/names.py b/lib/sqlalchemy/ext/mypy/names.py index fc3d708e7dd..319786288fd 100644 --- a/lib/sqlalchemy/ext/mypy/names.py +++ b/lib/sqlalchemy/ext/mypy/names.py @@ -1,5 +1,5 @@ # ext/mypy/names.py -# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/plugin.py b/lib/sqlalchemy/ext/mypy/plugin.py index 00eb4d1cc03..1ec2c02b9cf 100644 --- a/lib/sqlalchemy/ext/mypy/plugin.py +++ b/lib/sqlalchemy/ext/mypy/plugin.py @@ -1,5 +1,5 @@ # ext/mypy/plugin.py -# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/mypy/util.py b/lib/sqlalchemy/ext/mypy/util.py index af0882bc307..16761b9ab39 100644 --- a/lib/sqlalchemy/ext/mypy/util.py +++ b/lib/sqlalchemy/ext/mypy/util.py @@ -1,5 +1,5 @@ # ext/mypy/util.py -# Copyright (C) 2021-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2021-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py index ae904b0fc6c..3cc67b18964 100644 --- a/lib/sqlalchemy/ext/orderinglist.py +++ b/lib/sqlalchemy/ext/orderinglist.py @@ -1,5 +1,5 @@ # ext/orderinglist.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py index 9cbc61a1c36..b7032b65959 100644 --- a/lib/sqlalchemy/ext/serializer.py +++ b/lib/sqlalchemy/ext/serializer.py @@ -1,5 +1,5 @@ # ext/serializer.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/future/__init__.py b/lib/sqlalchemy/future/__init__.py index 8ce36ccbc24..ef9afb1a52b 100644 --- a/lib/sqlalchemy/future/__init__.py +++ b/lib/sqlalchemy/future/__init__.py @@ -1,5 +1,5 @@ # future/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/future/engine.py b/lib/sqlalchemy/future/engine.py index b55cda08d94..0449c3d9f31 100644 --- a/lib/sqlalchemy/future/engine.py +++ b/lib/sqlalchemy/future/engine.py @@ -1,5 +1,5 @@ # future/engine.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py index 30d531957f8..2e5b2201814 100644 --- a/lib/sqlalchemy/inspection.py +++ b/lib/sqlalchemy/inspection.py @@ -1,5 +1,5 @@ # inspection.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py index e6922b81af9..849a0bfa078 100644 --- a/lib/sqlalchemy/log.py +++ b/lib/sqlalchemy/log.py @@ -1,5 +1,5 @@ # log.py -# Copyright (C) 2006-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2006-2025 the SQLAlchemy authors and contributors # # Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk # diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index 70a11294314..7771de47eb2 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -1,5 +1,5 @@ # orm/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index 1b9f9a82159..d9e3ec37ba2 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -1,5 +1,5 @@ # orm/_orm_constructors.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/_typing.py b/lib/sqlalchemy/orm/_typing.py index f8ac0590a62..ccb8413b524 100644 --- a/lib/sqlalchemy/orm/_typing.py +++ b/lib/sqlalchemy/orm/_typing.py @@ -1,5 +1,5 @@ # orm/_typing.py -# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index f6114f9db21..8207b4cace2 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -1,5 +1,5 @@ # orm/attributes.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index b5f7dbbafb0..c84f3b1b3f8 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -1,5 +1,5 @@ # orm/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 755192384ac..402d7bede6d 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -1,5 +1,5 @@ # orm/bulk_persistence.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/clsregistry.py b/lib/sqlalchemy/orm/clsregistry.py index 382d6aef9be..70307ec7679 100644 --- a/lib/sqlalchemy/orm/clsregistry.py +++ b/lib/sqlalchemy/orm/clsregistry.py @@ -1,5 +1,5 @@ # orm/clsregistry.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index cb9456f1f3c..336b1133d99 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -1,5 +1,5 @@ # orm/collections.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 4f119e35caf..c09c03b78c2 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -1,5 +1,5 @@ # orm/context.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index a3b0ac21f0a..c32851deab2 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -1,5 +1,5 @@ # orm/decl_api.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index 073aa16c350..c480994d8fd 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -1,5 +1,5 @@ # orm/decl_base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py index 71c06fbeb19..5953062459e 100644 --- a/lib/sqlalchemy/orm/dependency.py +++ b/lib/sqlalchemy/orm/dependency.py @@ -1,5 +1,5 @@ # orm/dependency.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index 4e07050a1d6..f01cc1788b3 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -1,5 +1,5 @@ # orm/descriptor_props.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index ad1b239c13c..3c81c396f6e 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -1,5 +1,5 @@ # orm/dynamic.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py index 2c10ec55afa..57aae5a3c49 100644 --- a/lib/sqlalchemy/orm/evaluator.py +++ b/lib/sqlalchemy/orm/evaluator.py @@ -1,5 +1,5 @@ # orm/evaluator.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index d2ae616371d..f161760e6da 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -1,5 +1,5 @@ # orm/events.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py index 39dd5401128..0494edf983a 100644 --- a/lib/sqlalchemy/orm/exc.py +++ b/lib/sqlalchemy/orm/exc.py @@ -1,5 +1,5 @@ # orm/exc.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py index 23682f7ef22..1808b2d5e59 100644 --- a/lib/sqlalchemy/orm/identity.py +++ b/lib/sqlalchemy/orm/identity.py @@ -1,5 +1,5 @@ # orm/identity.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py index e9fe843360c..f87023f1809 100644 --- a/lib/sqlalchemy/orm/instrumentation.py +++ b/lib/sqlalchemy/orm/instrumentation.py @@ -1,5 +1,5 @@ # orm/instrumentation.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 2b0db34fc11..b4462e54593 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -1,5 +1,5 @@ # orm/interfaces.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index 6176d72a67e..679286f5466 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -1,5 +1,5 @@ # orm/loading.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/mapped_collection.py b/lib/sqlalchemy/orm/mapped_collection.py index 0d3079fb5ab..ca085c40376 100644 --- a/lib/sqlalchemy/orm/mapped_collection.py +++ b/lib/sqlalchemy/orm/mapped_collection.py @@ -1,5 +1,5 @@ # orm/mapped_collection.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 579e053b28b..11010efbcd5 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -1,5 +1,5 @@ # orm/mapper.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py index 4ee8ac71b84..a15f7f61b45 100644 --- a/lib/sqlalchemy/orm/path_registry.py +++ b/lib/sqlalchemy/orm/path_registry.py @@ -1,5 +1,5 @@ # orm/path_registry.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 369fc59986c..cbe8557add9 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -1,5 +1,5 @@ # orm/persistence.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 48528ef2765..a41c520cdb2 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -1,5 +1,5 @@ # orm/properties.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 6a262b43ee3..c7e1ca8ad7c 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1,5 +1,5 @@ # orm/query.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 3cdaec81110..0d0bc708941 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -1,5 +1,5 @@ # orm/relationships.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index 88a1aad1e5a..a0e9f17e4fa 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -1,5 +1,5 @@ # orm/scoping.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index abaa22e4488..6cd7cd63390 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -1,5 +1,5 @@ # orm/session.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index 9dfd7f64fe9..d4bbf920993 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -1,5 +1,5 @@ # orm/state.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/state_changes.py b/lib/sqlalchemy/orm/state_changes.py index 56963c6af1d..10e417e85d1 100644 --- a/lib/sqlalchemy/orm/state_changes.py +++ b/lib/sqlalchemy/orm/state_changes.py @@ -1,5 +1,5 @@ # orm/state_changes.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index c89a12efd66..f2d165145a1 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -1,5 +1,5 @@ # orm/strategies.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index af42f7c9923..f4f292ee7ec 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1,5 +1,5 @@ # orm/strategy_options.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py index db09a3e9027..8f85a41a2c0 100644 --- a/lib/sqlalchemy/orm/sync.py +++ b/lib/sqlalchemy/orm/sync.py @@ -1,5 +1,5 @@ # orm/sync.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py index 7e2df2b0eff..80897f29262 100644 --- a/lib/sqlalchemy/orm/unitofwork.py +++ b/lib/sqlalchemy/orm/unitofwork.py @@ -1,5 +1,5 @@ # orm/unitofwork.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index e1cc16bd98b..48282b2d562 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1,5 +1,5 @@ # orm/util.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/orm/writeonly.py b/lib/sqlalchemy/orm/writeonly.py index 5680cc70ec5..ac034a09e0a 100644 --- a/lib/sqlalchemy/orm/writeonly.py +++ b/lib/sqlalchemy/orm/writeonly.py @@ -1,5 +1,5 @@ # orm/writeonly.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/pool/__init__.py b/lib/sqlalchemy/pool/__init__.py index 29fd652931f..51bf0ec7992 100644 --- a/lib/sqlalchemy/pool/__init__.py +++ b/lib/sqlalchemy/pool/__init__.py @@ -1,5 +1,5 @@ # pool/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index 98d202789d6..34d02254392 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -1,5 +1,5 @@ # pool/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/pool/events.py b/lib/sqlalchemy/pool/events.py index b54fad125b1..4ceb260f79b 100644 --- a/lib/sqlalchemy/pool/events.py +++ b/lib/sqlalchemy/pool/events.py @@ -1,5 +1,5 @@ # pool/events.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/pool/impl.py b/lib/sqlalchemy/pool/impl.py index 157455cbe25..f2b951d8e8d 100644 --- a/lib/sqlalchemy/pool/impl.py +++ b/lib/sqlalchemy/pool/impl.py @@ -1,5 +1,5 @@ # pool/impl.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index 9edca4e5cce..32adc9bb218 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -1,5 +1,5 @@ # schema.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py index 9e0d2ca2a79..188f709d7e4 100644 --- a/lib/sqlalchemy/sql/__init__.py +++ b/lib/sqlalchemy/sql/__init__.py @@ -1,5 +1,5 @@ # sql/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_dml_constructors.py b/lib/sqlalchemy/sql/_dml_constructors.py index 3afe70e3afc..0a6f60115f1 100644 --- a/lib/sqlalchemy/sql/_dml_constructors.py +++ b/lib/sqlalchemy/sql/_dml_constructors.py @@ -1,5 +1,5 @@ # sql/_dml_constructors.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index 121386781e9..b628fcc9b52 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -1,5 +1,5 @@ # sql/_elements_constructors.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_orm_types.py b/lib/sqlalchemy/sql/_orm_types.py index bccb533ca0e..c37d805ef3f 100644 --- a/lib/sqlalchemy/sql/_orm_types.py +++ b/lib/sqlalchemy/sql/_orm_types.py @@ -1,5 +1,5 @@ # sql/_orm_types.py -# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_py_util.py b/lib/sqlalchemy/sql/_py_util.py index df372bf5d54..9e1a084a3f5 100644 --- a/lib/sqlalchemy/sql/_py_util.py +++ b/lib/sqlalchemy/sql/_py_util.py @@ -1,5 +1,5 @@ # sql/_py_util.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_selectable_constructors.py b/lib/sqlalchemy/sql/_selectable_constructors.py index 8ada82c8d4a..1660778c56f 100644 --- a/lib/sqlalchemy/sql/_selectable_constructors.py +++ b/lib/sqlalchemy/sql/_selectable_constructors.py @@ -1,5 +1,5 @@ # sql/_selectable_constructors.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index 73ed88996af..cf9129b479b 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -1,5 +1,5 @@ # sql/_typing.py -# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/annotation.py b/lib/sqlalchemy/sql/annotation.py index db382b874b6..bf445ff330d 100644 --- a/lib/sqlalchemy/sql/annotation.py +++ b/lib/sqlalchemy/sql/annotation.py @@ -1,5 +1,5 @@ # sql/annotation.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 23247dee147..6d409a9fb7e 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -1,5 +1,5 @@ # sql/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/cache_key.py b/lib/sqlalchemy/sql/cache_key.py index 1172d3c98f4..1f562f2e67b 100644 --- a/lib/sqlalchemy/sql/cache_key.py +++ b/lib/sqlalchemy/sql/cache_key.py @@ -1,5 +1,5 @@ # sql/cache_key.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index a5730652055..123b5c556e1 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -1,5 +1,5 @@ # sql/coercions.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 0551c060055..49e8ce500e8 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -1,5 +1,5 @@ # sql/compiler.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py index d1426658239..19af40ff080 100644 --- a/lib/sqlalchemy/sql/crud.py +++ b/lib/sqlalchemy/sql/crud.py @@ -1,5 +1,5 @@ # sql/crud.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index d27b43f1fa1..0950043bcba 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -1,5 +1,5 @@ # sql/ddl.py -# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2009-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py index 76131bcaa45..7fa5dafe9ce 100644 --- a/lib/sqlalchemy/sql/default_comparator.py +++ b/lib/sqlalchemy/sql/default_comparator.py @@ -1,5 +1,5 @@ # sql/default_comparator.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 51e00ca4e26..0b92e38bce1 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -1,5 +1,5 @@ # sql/dml.py -# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2009-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 62d71fbdf9a..f7d37677082 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -1,5 +1,5 @@ # sql/elements.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/events.py b/lib/sqlalchemy/sql/events.py index e9d19f337d0..601092fd912 100644 --- a/lib/sqlalchemy/sql/events.py +++ b/lib/sqlalchemy/sql/events.py @@ -1,5 +1,5 @@ # sql/events.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index ba42445d013..f8ac3a9ecad 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -1,5 +1,5 @@ # sql/expression.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index 2e86baf4985..ea02279d480 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -1,5 +1,5 @@ # sql/functions.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index 061da29707c..8d70f800e74 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -1,5 +1,5 @@ # sql/lambdas.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/naming.py b/lib/sqlalchemy/sql/naming.py index 7213ddb297e..58203e4b9a1 100644 --- a/lib/sqlalchemy/sql/naming.py +++ b/lib/sqlalchemy/sql/naming.py @@ -1,5 +1,5 @@ # sql/naming.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 33733d03fc9..d5f876cb0d8 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -1,5 +1,5 @@ # sql/operators.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/roles.py b/lib/sqlalchemy/sql/roles.py index ae70ac3a5bc..da69616dc46 100644 --- a/lib/sqlalchemy/sql/roles.py +++ b/lib/sqlalchemy/sql/roles.py @@ -1,5 +1,5 @@ # sql/roles.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index c9b57615110..173b38c5fe5 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -1,5 +1,5 @@ # sql/schema.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index e6be0ae5513..7660a1dbc74 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -1,5 +1,5 @@ # sql/selectable.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index a7d140ec6bd..ee471a6c4ec 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -1,5 +1,5 @@ # sql/sqltypes.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/traversals.py b/lib/sqlalchemy/sql/traversals.py index 8bb2939cb31..13ad28996e0 100644 --- a/lib/sqlalchemy/sql/traversals.py +++ b/lib/sqlalchemy/sql/traversals.py @@ -1,5 +1,5 @@ # sql/traversals.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 971acf30e3d..aeb804d3f9b 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -1,5 +1,5 @@ # sql/type_api.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index e7ca7b4bc2b..29cd0e2b005 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -1,5 +1,5 @@ # sql/util.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py index 286daae266d..e758350adf8 100644 --- a/lib/sqlalchemy/sql/visitors.py +++ b/lib/sqlalchemy/sql/visitors.py @@ -1,5 +1,5 @@ # sql/visitors.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py index 7fa361c9b92..4e574bbb24e 100644 --- a/lib/sqlalchemy/testing/__init__.py +++ b/lib/sqlalchemy/testing/__init__.py @@ -1,5 +1,5 @@ # testing/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py index baef79d1817..8364c15f8ff 100644 --- a/lib/sqlalchemy/testing/assertions.py +++ b/lib/sqlalchemy/testing/assertions.py @@ -1,5 +1,5 @@ # testing/assertions.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py index ae4d335a960..81c7138c4b5 100644 --- a/lib/sqlalchemy/testing/assertsql.py +++ b/lib/sqlalchemy/testing/assertsql.py @@ -1,5 +1,5 @@ # testing/assertsql.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/asyncio.py b/lib/sqlalchemy/testing/asyncio.py index f71ca57fe57..28470ba21c3 100644 --- a/lib/sqlalchemy/testing/asyncio.py +++ b/lib/sqlalchemy/testing/asyncio.py @@ -1,5 +1,5 @@ # testing/asyncio.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py index 9e88d9dd893..2eec642b777 100644 --- a/lib/sqlalchemy/testing/config.py +++ b/lib/sqlalchemy/testing/config.py @@ -1,5 +1,5 @@ # testing/config.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index 08fbe248e15..51beed98b19 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -1,5 +1,5 @@ # testing/engines.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py index 8f0f36bd1c4..5bd4f7de240 100644 --- a/lib/sqlalchemy/testing/entities.py +++ b/lib/sqlalchemy/testing/entities.py @@ -1,5 +1,5 @@ # testing/entities.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index addc4b75940..8ff9b644384 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -1,5 +1,5 @@ # testing/exclusions.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/__init__.py b/lib/sqlalchemy/testing/fixtures/__init__.py index 5981fb583d2..f2948dee8d3 100644 --- a/lib/sqlalchemy/testing/fixtures/__init__.py +++ b/lib/sqlalchemy/testing/fixtures/__init__.py @@ -1,5 +1,5 @@ # testing/fixtures/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/base.py b/lib/sqlalchemy/testing/fixtures/base.py index 0697f4902f2..09d45a0a220 100644 --- a/lib/sqlalchemy/testing/fixtures/base.py +++ b/lib/sqlalchemy/testing/fixtures/base.py @@ -1,5 +1,5 @@ # testing/fixtures/base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/mypy.py b/lib/sqlalchemy/testing/fixtures/mypy.py index 149df9f7d49..0832d89246f 100644 --- a/lib/sqlalchemy/testing/fixtures/mypy.py +++ b/lib/sqlalchemy/testing/fixtures/mypy.py @@ -1,5 +1,5 @@ # testing/fixtures/mypy.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/orm.py b/lib/sqlalchemy/testing/fixtures/orm.py index 5ddd21ec64b..77cb243a808 100644 --- a/lib/sqlalchemy/testing/fixtures/orm.py +++ b/lib/sqlalchemy/testing/fixtures/orm.py @@ -1,5 +1,5 @@ # testing/fixtures/orm.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/fixtures/sql.py b/lib/sqlalchemy/testing/fixtures/sql.py index 39e5b084465..44cf21c24fe 100644 --- a/lib/sqlalchemy/testing/fixtures/sql.py +++ b/lib/sqlalchemy/testing/fixtures/sql.py @@ -1,5 +1,5 @@ # testing/fixtures/sql.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/pickleable.py b/lib/sqlalchemy/testing/pickleable.py index 761891ad4ac..9317be63b8f 100644 --- a/lib/sqlalchemy/testing/pickleable.py +++ b/lib/sqlalchemy/testing/pickleable.py @@ -1,5 +1,5 @@ # testing/pickleable.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/plugin/__init__.py b/lib/sqlalchemy/testing/plugin/__init__.py index 0f987773195..ce960be967d 100644 --- a/lib/sqlalchemy/testing/plugin/__init__.py +++ b/lib/sqlalchemy/testing/plugin/__init__.py @@ -1,5 +1,5 @@ # testing/plugin/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/plugin/bootstrap.py b/lib/sqlalchemy/testing/plugin/bootstrap.py index d0d375458ed..2ad4d9915eb 100644 --- a/lib/sqlalchemy/testing/plugin/bootstrap.py +++ b/lib/sqlalchemy/testing/plugin/bootstrap.py @@ -1,5 +1,5 @@ # testing/plugin/bootstrap.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py index a642668be93..2dfa441413d 100644 --- a/lib/sqlalchemy/testing/plugin/plugin_base.py +++ b/lib/sqlalchemy/testing/plugin/plugin_base.py @@ -1,5 +1,5 @@ # testing/plugin/plugin_base.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py index 1a4d4bb30a1..f6d47c631ce 100644 --- a/lib/sqlalchemy/testing/plugin/pytestplugin.py +++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py @@ -1,5 +1,5 @@ # testing/plugin/pytestplugin.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py index b9093c9017a..0d90947e444 100644 --- a/lib/sqlalchemy/testing/profiling.py +++ b/lib/sqlalchemy/testing/profiling.py @@ -1,5 +1,5 @@ # testing/profiling.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index a36575935f0..3afcf119b27 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -1,5 +1,5 @@ # testing/provision.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 0554fcf38d5..bbf56a059a1 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1,5 +1,5 @@ # testing/requirements.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py index 7dfd33d4d09..0dd7de2029d 100644 --- a/lib/sqlalchemy/testing/schema.py +++ b/lib/sqlalchemy/testing/schema.py @@ -1,5 +1,5 @@ # testing/schema.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/__init__.py b/lib/sqlalchemy/testing/suite/__init__.py index a146cb3163c..8435aa004f3 100644 --- a/lib/sqlalchemy/testing/suite/__init__.py +++ b/lib/sqlalchemy/testing/suite/__init__.py @@ -1,5 +1,5 @@ # testing/suite/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_cte.py b/lib/sqlalchemy/testing/suite/test_cte.py index 5d37880e1eb..4e4d420faa1 100644 --- a/lib/sqlalchemy/testing/suite/test_cte.py +++ b/lib/sqlalchemy/testing/suite/test_cte.py @@ -1,5 +1,5 @@ # testing/suite/test_cte.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_ddl.py b/lib/sqlalchemy/testing/suite/test_ddl.py index 3d9b8ec13d0..c7e7d817d8e 100644 --- a/lib/sqlalchemy/testing/suite/test_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_ddl.py @@ -1,5 +1,5 @@ # testing/suite/test_ddl.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_deprecations.py b/lib/sqlalchemy/testing/suite/test_deprecations.py index 07970c03ecb..db0a9fc48db 100644 --- a/lib/sqlalchemy/testing/suite/test_deprecations.py +++ b/lib/sqlalchemy/testing/suite/test_deprecations.py @@ -1,5 +1,5 @@ # testing/suite/test_deprecations.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_dialect.py b/lib/sqlalchemy/testing/suite/test_dialect.py index 696472037d1..ae67cc10adc 100644 --- a/lib/sqlalchemy/testing/suite/test_dialect.py +++ b/lib/sqlalchemy/testing/suite/test_dialect.py @@ -1,5 +1,5 @@ # testing/suite/test_dialect.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index 1cff044bc3e..8467c351790 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -1,5 +1,5 @@ # testing/suite/test_insert.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 91113be9b49..54d0d449a90 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -1,5 +1,5 @@ # testing/suite/test_reflection.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py index 2b91a559dbe..a6179d85598 100644 --- a/lib/sqlalchemy/testing/suite/test_results.py +++ b/lib/sqlalchemy/testing/suite/test_results.py @@ -1,5 +1,5 @@ # testing/suite/test_results.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_rowcount.py b/lib/sqlalchemy/testing/suite/test_rowcount.py index a7dbd364f1b..59953fff59c 100644 --- a/lib/sqlalchemy/testing/suite/test_rowcount.py +++ b/lib/sqlalchemy/testing/suite/test_rowcount.py @@ -1,5 +1,5 @@ # testing/suite/test_rowcount.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index 8e1ae79b220..7eb5cd0055d 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -1,5 +1,5 @@ # testing/suite/test_select.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_sequence.py b/lib/sqlalchemy/testing/suite/test_sequence.py index 138616f1399..f0e6575370b 100644 --- a/lib/sqlalchemy/testing/suite/test_sequence.py +++ b/lib/sqlalchemy/testing/suite/test_sequence.py @@ -1,5 +1,5 @@ # testing/suite/test_sequence.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index d4c5a2250dc..de3cd53e345 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -1,5 +1,5 @@ # testing/suite/test_types.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_unicode_ddl.py b/lib/sqlalchemy/testing/suite/test_unicode_ddl.py index 1f15ab5647f..c8dd3350588 100644 --- a/lib/sqlalchemy/testing/suite/test_unicode_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_unicode_ddl.py @@ -1,5 +1,5 @@ # testing/suite/test_unicode_ddl.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/suite/test_update_delete.py b/lib/sqlalchemy/testing/suite/test_update_delete.py index fd4757f9a4a..85a8d393391 100644 --- a/lib/sqlalchemy/testing/suite/test_update_delete.py +++ b/lib/sqlalchemy/testing/suite/test_update_delete.py @@ -1,5 +1,5 @@ # testing/suite/test_update_delete.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py index d2f8f5b6184..42f077108f5 100644 --- a/lib/sqlalchemy/testing/util.py +++ b/lib/sqlalchemy/testing/util.py @@ -1,5 +1,5 @@ # testing/util.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py index baef037f73e..9be0813b584 100644 --- a/lib/sqlalchemy/testing/warnings.py +++ b/lib/sqlalchemy/testing/warnings.py @@ -1,5 +1,5 @@ # testing/warnings.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py index a5bb56cf661..c2b1ab1945f 100644 --- a/lib/sqlalchemy/types.py +++ b/lib/sqlalchemy/types.py @@ -1,5 +1,5 @@ # types.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index 33b7addb668..a9b4c3b1c0f 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -1,5 +1,5 @@ # util/__init__.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index 8a7e8ea9d07..12283eba94b 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -1,5 +1,5 @@ # util/_collections.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/_concurrency_py3k.py b/lib/sqlalchemy/util/_concurrency_py3k.py index a19607cd01c..718c077c0da 100644 --- a/lib/sqlalchemy/util/_concurrency_py3k.py +++ b/lib/sqlalchemy/util/_concurrency_py3k.py @@ -1,5 +1,5 @@ # util/_concurrency_py3k.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/_has_cy.py b/lib/sqlalchemy/util/_has_cy.py index 7713e236aca..21faed04e6b 100644 --- a/lib/sqlalchemy/util/_has_cy.py +++ b/lib/sqlalchemy/util/_has_cy.py @@ -1,5 +1,5 @@ # util/_has_cy.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/_py_collections.py b/lib/sqlalchemy/util/_py_collections.py index dfb9af2e13d..f6aefcf67c3 100644 --- a/lib/sqlalchemy/util/_py_collections.py +++ b/lib/sqlalchemy/util/_py_collections.py @@ -1,5 +1,5 @@ # util/_py_collections.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index 7620a364591..c8b5e7a2203 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -1,5 +1,5 @@ # util/compat.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/concurrency.py b/lib/sqlalchemy/util/concurrency.py index de6195de8f1..006340f5bf3 100644 --- a/lib/sqlalchemy/util/concurrency.py +++ b/lib/sqlalchemy/util/concurrency.py @@ -1,5 +1,5 @@ # util/concurrency.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py index 3a59a8a4bcd..88b68724038 100644 --- a/lib/sqlalchemy/util/deprecations.py +++ b/lib/sqlalchemy/util/deprecations.py @@ -1,5 +1,5 @@ # util/deprecations.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 00ee0deb3ff..b4086f1d579 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -1,5 +1,5 @@ # util/langhelpers.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/preloaded.py b/lib/sqlalchemy/util/preloaded.py index e91ce685450..4ea9aa90f30 100644 --- a/lib/sqlalchemy/util/preloaded.py +++ b/lib/sqlalchemy/util/preloaded.py @@ -1,5 +1,5 @@ # util/preloaded.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py index 99a68a3177a..3fb01a9a9f8 100644 --- a/lib/sqlalchemy/util/queue.py +++ b/lib/sqlalchemy/util/queue.py @@ -1,5 +1,5 @@ # util/queue.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/tool_support.py b/lib/sqlalchemy/util/tool_support.py index a203a2ab75a..407c2d45075 100644 --- a/lib/sqlalchemy/util/tool_support.py +++ b/lib/sqlalchemy/util/tool_support.py @@ -1,5 +1,5 @@ # util/tool_support.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py index aebbb436cec..393c855abca 100644 --- a/lib/sqlalchemy/util/topological.py +++ b/lib/sqlalchemy/util/topological.py @@ -1,5 +1,5 @@ # util/topological.py -# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 03ae44eaad0..62fd47c6a33 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -1,5 +1,5 @@ # util/typing.py -# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors +# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under From 3304bc58a6d6d79db7c87fe4fe569a455a35a3b0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 3 Jan 2025 12:19:27 -0500 Subject: [PATCH 412/544] guard against KeyError on subclass removal Fixed issue in event system which prevented an event listener from being attached and detached from multiple class-like objects, namely the :class:`.sessionmaker` or :class:`.scoped_session` targets that assign to :class:`.Session` subclasses. Fixes: #12216 Change-Id: I3d8969fe604adbc23add07a13741938c7f4fc8ca (cherry picked from commit e4f0afe06baa5d9b57d5b8cfe2647b943f2145e6) --- doc/build/changelog/unreleased_20/12216.rst | 9 ++ lib/sqlalchemy/event/registry.py | 6 +- test/base/test_events.py | 133 ++++++++++++++++++++ 3 files changed, 147 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12216.rst diff --git a/doc/build/changelog/unreleased_20/12216.rst b/doc/build/changelog/unreleased_20/12216.rst new file mode 100644 index 00000000000..a4126733356 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12216.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 12216 + + Fixed issue in event system which prevented an event listener from being + attached and detached from multiple class-like objects, namely the + :class:`.sessionmaker` or :class:`.scoped_session` targets that assign to + :class:`.Session` subclasses. + diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py index 77fea0006f4..d7e4b321553 100644 --- a/lib/sqlalchemy/event/registry.py +++ b/lib/sqlalchemy/event/registry.py @@ -154,7 +154,11 @@ def _removed_from_collection( if owner_ref in _collection_to_key: listener_to_key = _collection_to_key[owner_ref] - listener_to_key.pop(listen_ref) + # see #12216 - this guards against a removal that already occurred + # here. however, I cannot come up with a test that shows any negative + # side effects occurring from this removal happening, even though an + # event key may still be referenced from a clsleveldispatch here + listener_to_key.pop(listen_ref, None) def _stored_in_collection_multi( diff --git a/test/base/test_events.py b/test/base/test_events.py index 6f8456274f3..7a387e8440d 100644 --- a/test/base/test_events.py +++ b/test/base/test_events.py @@ -1271,6 +1271,107 @@ class Target: return Target + def test_two_subclasses_one_event(self): + """test #12216""" + + Target = self._fixture() + + class TargetSubclassOne(Target): + pass + + class TargetSubclassTwo(Target): + pass + + m1 = Mock() + + def my_event_one(x, y): + m1.my_event_one(x, y) + + event.listen(TargetSubclassOne, "event_one", my_event_one) + event.listen(TargetSubclassTwo, "event_one", my_event_one) + + t1 = TargetSubclassOne() + t2 = TargetSubclassTwo() + + t1.dispatch.event_one("x1a", "y1a") + t2.dispatch.event_one("x2a", "y2a") + + eq_( + m1.mock_calls, + [call.my_event_one("x1a", "y1a"), call.my_event_one("x2a", "y2a")], + ) + + event.remove(TargetSubclassOne, "event_one", my_event_one) + + t1.dispatch.event_one("x1b", "y1b") + t2.dispatch.event_one("x2b", "y2b") + + eq_( + m1.mock_calls, + [ + call.my_event_one("x1a", "y1a"), + call.my_event_one("x2a", "y2a"), + call.my_event_one("x2b", "y2b"), + ], + ) + + event.remove(TargetSubclassTwo, "event_one", my_event_one) + + t1.dispatch.event_one("x1c", "y1c") + t2.dispatch.event_one("x2c", "y2c") + + eq_( + m1.mock_calls, + [ + call.my_event_one("x1a", "y1a"), + call.my_event_one("x2a", "y2a"), + call.my_event_one("x2b", "y2b"), + ], + ) + + def test_two_subclasses_one_event_reg_cleanup(self): + """test #12216""" + + from sqlalchemy.event import registry + + Target = self._fixture() + + class TargetSubclassOne(Target): + pass + + class TargetSubclassTwo(Target): + pass + + m1 = Mock() + + def my_event_one(x, y): + m1.my_event_one(x, y) + + event.listen(TargetSubclassOne, "event_one", my_event_one) + event.listen(TargetSubclassTwo, "event_one", my_event_one) + + key1 = (id(TargetSubclassOne), "event_one", id(my_event_one)) + key2 = (id(TargetSubclassTwo), "event_one", id(my_event_one)) + + assert key1 in registry._key_to_collection + assert key2 in registry._key_to_collection + + del TargetSubclassOne + gc_collect() + + # the key remains because the gc routine would be based on deleting + # Target (I think) + assert key1 in registry._key_to_collection + assert key2 in registry._key_to_collection + + del TargetSubclassTwo + gc_collect() + + assert key1 in registry._key_to_collection + assert key2 in registry._key_to_collection + + # event.remove(TargetSubclassTwo, "event_one", my_event_one) + def test_clslevel(self): Target = self._fixture() @@ -1503,6 +1604,38 @@ def test_listener_collection_removed_cleanup(self): assert key not in registry._key_to_collection assert collection_ref not in registry._collection_to_key + @testing.requires.predictable_gc + def test_listener_collection_removed_cleanup_clslevel(self): + """test related to #12216""" + + from sqlalchemy.event import registry + + Target = self._fixture() + + m1 = Mock() + + event.listen(Target, "event_one", m1) + + key = (id(Target), "event_one", id(m1)) + + assert key in registry._key_to_collection + collection_ref = list(registry._key_to_collection[key])[0] + assert collection_ref in registry._collection_to_key + + t1 = Target() + t1.dispatch.event_one("t1") + + del t1 + + del Target + + gc_collect() + + # gc of a target class does not currently cause these collections + # to be cleaned up + assert key in registry._key_to_collection + assert collection_ref in registry._collection_to_key + def test_remove_not_listened(self): Target = self._fixture() From a89b9729650d3cdbce1704e7159d8ac3b32f4f4b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 3 Jan 2025 15:40:26 -0500 Subject: [PATCH 413/544] close unclosed sqlite result this close is hoped to address failures that have been occurring on github actions under python 3.13, although i am able to reproduce the problem on other python versions as well when running test/orm/test_events.py with the --random extension. Change-Id: If0c4110815fd8625b39b2d74de26ac965401de14 (cherry picked from commit b0d9d5a44cdd5632d209149a6a6622073acee3da) --- test/orm/test_events.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/orm/test_events.py b/test/orm/test_events.py index 5e1672b526b..2b24e47469d 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -782,7 +782,8 @@ def test_update_delete_flags(self, stmt_type, from_stmt): if from_stmt: stmt = select(User).from_statement(stmt.returning(User)) - sess.execute(stmt) + result = sess.execute(stmt) + result.close() eq_( canary.mock_calls, From 0a407f053ad7890906cf4f8e734839c6d2e479a4 Mon Sep 17 00:00:00 2001 From: CommanderKeynes Date: Tue, 7 Jan 2025 10:52:36 -0500 Subject: [PATCH 414/544] Asyncpg null query fix Adjusted the asyncpg dialect so that an empty SQL string, which is valid for PostgreSQL server, may be successfully processed at the dialect level, such as when using :meth:`.Connection.exec_driver_sql`. Pull request courtesy Andrew Jackson. Closes: #12220 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12220 Pull-request-sha: 45c94febee66b567040b1fbfa3a93079a1314f09 Change-Id: I870df9e31f4a229939e76c702724c25073329282 (cherry picked from commit 7bfb829f25c1bfe2139afe7875882298aaf345ba) --- doc/build/changelog/unreleased_20/12220.rst | 9 +++++++++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 3 ++- test/dialect/postgresql/test_dialect.py | 6 ++++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12220.rst diff --git a/doc/build/changelog/unreleased_20/12220.rst b/doc/build/changelog/unreleased_20/12220.rst new file mode 100644 index 00000000000..a4b30cca5b1 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12220.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, postgresql + :tickets: 12220 + + Adjusted the asyncpg dialect so that an empty SQL string, which is valid + for PostgreSQL server, may be successfully processed at the dialect level, + such as when using :meth:`.Connection.exec_driver_sql`. Pull request + courtesy Andrew Jackson. + diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 1761c8de53c..523c47abcd0 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -546,7 +546,8 @@ async def _prepare_and_execute(self, operation, parameters): status = prepared_stmt.get_statusmsg() reg = re.match( - r"(?:SELECT|UPDATE|DELETE|INSERT \d+) (\d+)", status + r"(?:SELECT|UPDATE|DELETE|INSERT \d+) (\d+)", + status or "", ) if reg: self.rowcount = int(reg.group(1)) diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index 892e2abc9be..109101011fc 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -1040,6 +1040,12 @@ class MiscBackendTest( __only_on__ = "postgresql" __backend__ = True + @testing.fails_on(["+psycopg2"]) + def test_empty_sql_string(self, connection): + + result = connection.exec_driver_sql("") + assert result._soft_closed + @testing.provide_metadata def test_date_reflection(self): metadata = self.metadata From f3c5a1f2f20e5e3ba759d03660da221d9a35cd78 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 9 Jan 2025 17:36:03 -0500 Subject: [PATCH 415/544] fix changelog typos Change-Id: Ib1631c23fe8ad2d4aa4a537526faf970229af56d (cherry picked from commit 84bf309e0223ea06c873a2ddb6f52fa6f5e1169c) --- doc/build/changelog/unreleased_20/10720.rst | 5 +++-- doc/build/changelog/unreleased_20/11370.rst | 19 ++++++++++--------- doc/build/changelog/unreleased_20/11724.rst | 8 ++++---- doc/build/changelog/unreleased_20/11764.rst | 8 ++++---- doc/build/changelog/unreleased_20/11944.rst | 6 ++++-- doc/build/changelog/unreleased_20/11955.rst | 17 ++++++++--------- doc/build/changelog/unreleased_20/12016.rst | 9 +++++---- doc/build/changelog/unreleased_20/12093.rst | 7 ++++--- doc/build/changelog/unreleased_20/12100.rst | 4 ++-- doc/build/changelog/unreleased_20/12207.rst | 9 +++++---- doc/build/changelog/unreleased_20/7398.rst | 6 +++--- doc/build/core/dml.rst | 6 ++++++ 12 files changed, 58 insertions(+), 46 deletions(-) diff --git a/doc/build/changelog/unreleased_20/10720.rst b/doc/build/changelog/unreleased_20/10720.rst index d676a4425d8..98ba0a0dc49 100644 --- a/doc/build/changelog/unreleased_20/10720.rst +++ b/doc/build/changelog/unreleased_20/10720.rst @@ -1,5 +1,6 @@ .. change:: :tags: usecase, mariadb - :ticket: 10720 + :tickets: 10720 - Added sql types ``INET4`` and ``INET6`` in the MariaDB dialect. + Added sql types ``INET4`` and ``INET6`` in the MariaDB dialect. Pull + request courtesy Adam Žurek. diff --git a/doc/build/changelog/unreleased_20/11370.rst b/doc/build/changelog/unreleased_20/11370.rst index 56e85531fc9..a98940f8b6a 100644 --- a/doc/build/changelog/unreleased_20/11370.rst +++ b/doc/build/changelog/unreleased_20/11370.rst @@ -4,12 +4,13 @@ Fixed issue regarding ``Union`` types that would be present in the :paramref:`_orm.registry.type_annotation_map` of a :class:`_orm.registry` - or declarative base class, where a ``Mapped[]`` element that included one - of the subtypes present in that ``Union`` would be matched to that entry, - potentially ignoring other entries that matched exactly. The correct - behavior now takes place such that an entry should only match in - ``type_annotation_map`` exactly, as a ``Union`` type is a self-contained - type. For example, an attribute with ``Mapped[float]`` would previously - match to a ``type_annotation_map`` entry ``Union[float, Decimal]``; this - will no longer match and will now only match to an entry that states - ``float``. Pull request courtesy Frazer McLean. + or declarative base class, where a :class:`.Mapped` element that included + one of the subtypes present in that ``Union`` would be matched to that + entry, potentially ignoring other entries that matched exactly. The + correct behavior now takes place such that an entry should only match in + :paramref:`_orm.registry.type_annotation_map` exactly, as a ``Union`` type + is a self-contained type. For example, an attribute with ``Mapped[float]`` + would previously match to a :paramref:`_orm.registry.type_annotation_map` + entry ``Union[float, Decimal]``; this will no longer match and will now + only match to an entry that states ``float``. Pull request courtesy Frazer + McLean. diff --git a/doc/build/changelog/unreleased_20/11724.rst b/doc/build/changelog/unreleased_20/11724.rst index 3e8c436ebbc..70ebd9e3e2f 100644 --- a/doc/build/changelog/unreleased_20/11724.rst +++ b/doc/build/changelog/unreleased_20/11724.rst @@ -1,7 +1,7 @@ .. change:: :tags: bug, postgresql - :ticket: 11724 + :tickets: 11724 - Fixes issue in `get_multi_indexes` in postgresql dialect, where an error - would be thrown when attempting to use alembic with a vector index from - the pgvecto.rs extension. + Fixes issue in :meth:`.Dialect.get_multi_indexes` in the PostgreSQL + dialect, where an error would be thrown when attempting to use alembic with + a vector index from the pgvecto.rs extension. diff --git a/doc/build/changelog/unreleased_20/11764.rst b/doc/build/changelog/unreleased_20/11764.rst index 499852b6d09..6e37f86bf16 100644 --- a/doc/build/changelog/unreleased_20/11764.rst +++ b/doc/build/changelog/unreleased_20/11764.rst @@ -4,8 +4,8 @@ Added support for the ``LIMIT`` clause with ``DELETE`` for the MySQL and MariaDB dialects, to complement the already present option for - ``UPDATE``. The :meth:`.delete.with_dialect_options` method of the - `:func:`.delete` construct accepts parameters for ``mysql_limit`` and + ``UPDATE``. The :meth:`.Delete.with_dialect_options` method of the + :func:`.delete` construct accepts parameters for ``mysql_limit`` and ``mariadb_limit``, allowing users to specify a limit on the number of rows deleted. Pull request courtesy of Pablo Nicolás Estevez. @@ -14,7 +14,7 @@ :tags: bug, mysql, mariadb Added logic to ensure that the ``mysql_limit`` and ``mariadb_limit`` - parameters of :meth:`.update.with_dialect_options` and - :meth:`.delete.with_dialect_options` when compiled to string will only + parameters of :meth:`.Update.with_dialect_options` and + :meth:`.Delete.with_dialect_options` when compiled to string will only compile if the parameter is passed as an integer; a ``ValueError`` is raised otherwise. diff --git a/doc/build/changelog/unreleased_20/11944.rst b/doc/build/changelog/unreleased_20/11944.rst index e7469180ec2..0be3cb926d7 100644 --- a/doc/build/changelog/unreleased_20/11944.rst +++ b/doc/build/changelog/unreleased_20/11944.rst @@ -2,5 +2,7 @@ :tags: bug, orm :tickets: 11944 - Fixed bug in how type unions were handled that made the behavior - of ``a | b`` different from ``Union[a, b]``. + Fixed bug in how type unions were handled within + :paramref:`_orm.registry.type_annotation_map` as well as + :class:`._orm.Mapped` that made the lookup behavior of ``a | b`` different + from that of ``Union[a, b]``. diff --git a/doc/build/changelog/unreleased_20/11955.rst b/doc/build/changelog/unreleased_20/11955.rst index eeeb2bcbddb..bf62530f99d 100644 --- a/doc/build/changelog/unreleased_20/11955.rst +++ b/doc/build/changelog/unreleased_20/11955.rst @@ -2,12 +2,11 @@ :tags: bug, orm :tickets: 11955 - Consistently handle ``TypeAliasType`` (defined in PEP 695) obtained with the - ``type X = int`` syntax introduced in python 3.12. - Now in all cases one such alias must be explicitly added to the type map for - it to be usable inside ``Mapped[]``. - This change also revises the approach added in :ticket:`11305`, now requiring - the ``TypeAliasType`` to be added to the type map. - Documentation on how unions and type alias types are handled by SQLAlchemy - has been added in the :ref:`orm_declarative_mapped_column_type_map` section - of the documentation. + Consistently handle ``TypeAliasType`` (defined in PEP 695) obtained with + the ``type X = int`` syntax introduced in python 3.12. Now in all cases one + such alias must be explicitly added to the type map for it to be usable + inside :class:`.Mapped`. This change also revises the approach added in + :ticket:`11305`, now requiring the ``TypeAliasType`` to be added to the + type map. Documentation on how unions and type alias types are handled by + SQLAlchemy has been added in the + :ref:`orm_declarative_mapped_column_type_map` section of the documentation. diff --git a/doc/build/changelog/unreleased_20/12016.rst b/doc/build/changelog/unreleased_20/12016.rst index 5fa68d03723..e89c25576d6 100644 --- a/doc/build/changelog/unreleased_20/12016.rst +++ b/doc/build/changelog/unreleased_20/12016.rst @@ -1,7 +1,8 @@ .. change:: :tags: feature, oracle - :ticket: 12016 + :tickets: 12016 - Added new table option `oracle_tablespace` to specify the `TABLESPACE` option - when creating a table in Oracle. This allows users to define the tablespace in - which the table should be created. Pull request courtesy of Miguel Grillo. + Added new table option ``oracle_tablespace`` to specify the ``TABLESPACE`` + option when creating a table in Oracle. This allows users to define the + tablespace in which the table should be created. Pull request courtesy of + Miguel Grillo. diff --git a/doc/build/changelog/unreleased_20/12093.rst b/doc/build/changelog/unreleased_20/12093.rst index b9ec3b1f88b..3c6958d9adb 100644 --- a/doc/build/changelog/unreleased_20/12093.rst +++ b/doc/build/changelog/unreleased_20/12093.rst @@ -1,6 +1,7 @@ .. change:: :tags: usecase, postgresql - :ticket: 12093 + :tickets: 12093 - The :class:`_postgresql.Range` type now supports ``__contains__``. - Pull request courtesy of Frazer McLean. + The :class:`_postgresql.Range` type now supports + :meth:`_postgresql.Range.__contains__`. Pull request courtesy of Frazer + McLean. diff --git a/doc/build/changelog/unreleased_20/12100.rst b/doc/build/changelog/unreleased_20/12100.rst index 5fc111ae495..a7526dfd0c4 100644 --- a/doc/build/changelog/unreleased_20/12100.rst +++ b/doc/build/changelog/unreleased_20/12100.rst @@ -2,5 +2,5 @@ :tags: bug, oracle :tickets: 12100 - Fixed compilation of ``TABLE`` function when used in a from clause - in Oracle Database dialect. + Fixed compilation of ``TABLE`` function when used in a ``FROM`` clause in + Oracle Database dialect. diff --git a/doc/build/changelog/unreleased_20/12207.rst b/doc/build/changelog/unreleased_20/12207.rst index a6457b90ba7..a69f8ae93a9 100644 --- a/doc/build/changelog/unreleased_20/12207.rst +++ b/doc/build/changelog/unreleased_20/12207.rst @@ -2,7 +2,8 @@ :tags: bug, orm :tickets: 12207 - Fixed issues in type handling within the ``type_annotation_map`` feature - which prevented the use of unions, using either pep-604 or ``Union`` - syntaxes under future annotations mode, which contained multiple generic - types as elements from being correctly resolvable. + Fixed issues in type handling within the + :paramref:`_orm.registry.type_annotation_map` feature which prevented the + use of unions, using either pep-604 or ``Union`` syntaxes under future + annotations mode, which contained multiple generic types as elements from + being correctly resolvable. diff --git a/doc/build/changelog/unreleased_20/7398.rst b/doc/build/changelog/unreleased_20/7398.rst index 9a27ae99a73..1241d175a3a 100644 --- a/doc/build/changelog/unreleased_20/7398.rst +++ b/doc/build/changelog/unreleased_20/7398.rst @@ -1,6 +1,6 @@ .. change:: :tags: usecase, sqlite - :ticket: 7398 + :tickets: 7398 - Added SQLite table option to enable ``STRICT`` tables. - Pull request courtesy of Guilherme Crocetti. + Added SQLite table option to enable ``STRICT`` tables. Pull request + courtesy of Guilherme Crocetti. diff --git a/doc/build/core/dml.rst b/doc/build/core/dml.rst index 7070277f14f..1724dd6985c 100644 --- a/doc/build/core/dml.rst +++ b/doc/build/core/dml.rst @@ -32,11 +32,15 @@ Class documentation for the constructors listed at .. automethod:: Delete.where + .. automethod:: Delete.with_dialect_options + .. automethod:: Delete.returning .. autoclass:: Insert :members: + .. automethod:: Insert.with_dialect_options + .. automethod:: Insert.values .. automethod:: Insert.returning @@ -48,6 +52,8 @@ Class documentation for the constructors listed at .. automethod:: Update.where + .. automethod:: Update.with_dialect_options + .. automethod:: Update.values .. autoclass:: sqlalchemy.sql.expression.UpdateBase From 6fa2689bdb73f3a52dd53a7942a953d42cfc7f94 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 9 Jan 2025 17:37:34 -0500 Subject: [PATCH 416/544] - 2.0.37 --- doc/build/changelog/changelog_20.rst | 182 +++++++++++++++++++- doc/build/changelog/unreleased_20/10720.rst | 6 - doc/build/changelog/unreleased_20/11370.rst | 16 -- doc/build/changelog/unreleased_20/11724.rst | 7 - doc/build/changelog/unreleased_20/11764.rst | 20 --- doc/build/changelog/unreleased_20/11944.rst | 8 - doc/build/changelog/unreleased_20/11955.rst | 12 -- doc/build/changelog/unreleased_20/12016.rst | 8 - doc/build/changelog/unreleased_20/12019.rst | 8 - doc/build/changelog/unreleased_20/12032.rst | 7 - doc/build/changelog/unreleased_20/12084.rst | 9 - doc/build/changelog/unreleased_20/12093.rst | 7 - doc/build/changelog/unreleased_20/12100.rst | 6 - doc/build/changelog/unreleased_20/12150.rst | 8 - doc/build/changelog/unreleased_20/12170.rst | 7 - doc/build/changelog/unreleased_20/12207.rst | 9 - doc/build/changelog/unreleased_20/12216.rst | 9 - doc/build/changelog/unreleased_20/12220.rst | 9 - doc/build/changelog/unreleased_20/7398.rst | 6 - doc/build/conf.py | 4 +- 20 files changed, 183 insertions(+), 165 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/10720.rst delete mode 100644 doc/build/changelog/unreleased_20/11370.rst delete mode 100644 doc/build/changelog/unreleased_20/11724.rst delete mode 100644 doc/build/changelog/unreleased_20/11764.rst delete mode 100644 doc/build/changelog/unreleased_20/11944.rst delete mode 100644 doc/build/changelog/unreleased_20/11955.rst delete mode 100644 doc/build/changelog/unreleased_20/12016.rst delete mode 100644 doc/build/changelog/unreleased_20/12019.rst delete mode 100644 doc/build/changelog/unreleased_20/12032.rst delete mode 100644 doc/build/changelog/unreleased_20/12084.rst delete mode 100644 doc/build/changelog/unreleased_20/12093.rst delete mode 100644 doc/build/changelog/unreleased_20/12100.rst delete mode 100644 doc/build/changelog/unreleased_20/12150.rst delete mode 100644 doc/build/changelog/unreleased_20/12170.rst delete mode 100644 doc/build/changelog/unreleased_20/12207.rst delete mode 100644 doc/build/changelog/unreleased_20/12216.rst delete mode 100644 doc/build/changelog/unreleased_20/12220.rst delete mode 100644 doc/build/changelog/unreleased_20/7398.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 15db3ab9a6a..9f1c449751b 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,187 @@ .. changelog:: :version: 2.0.37 - :include_notes_from: unreleased_20 + :released: January 9, 2025 + + .. change:: + :tags: usecase, mariadb + :tickets: 10720 + + Added sql types ``INET4`` and ``INET6`` in the MariaDB dialect. Pull + request courtesy Adam Žurek. + + .. change:: + :tags: bug, orm + :tickets: 11370 + + Fixed issue regarding ``Union`` types that would be present in the + :paramref:`_orm.registry.type_annotation_map` of a :class:`_orm.registry` + or declarative base class, where a :class:`.Mapped` element that included + one of the subtypes present in that ``Union`` would be matched to that + entry, potentially ignoring other entries that matched exactly. The + correct behavior now takes place such that an entry should only match in + :paramref:`_orm.registry.type_annotation_map` exactly, as a ``Union`` type + is a self-contained type. For example, an attribute with ``Mapped[float]`` + would previously match to a :paramref:`_orm.registry.type_annotation_map` + entry ``Union[float, Decimal]``; this will no longer match and will now + only match to an entry that states ``float``. Pull request courtesy Frazer + McLean. + + .. change:: + :tags: bug, postgresql + :tickets: 11724 + + Fixes issue in :meth:`.Dialect.get_multi_indexes` in the PostgreSQL + dialect, where an error would be thrown when attempting to use alembic with + a vector index from the pgvecto.rs extension. + + .. change:: + :tags: usecase, mysql, mariadb + :tickets: 11764 + + Added support for the ``LIMIT`` clause with ``DELETE`` for the MySQL and + MariaDB dialects, to complement the already present option for + ``UPDATE``. The :meth:`.Delete.with_dialect_options` method of the + :func:`.delete` construct accepts parameters for ``mysql_limit`` and + ``mariadb_limit``, allowing users to specify a limit on the number of rows + deleted. Pull request courtesy of Pablo Nicolás Estevez. + + + .. change:: + :tags: bug, mysql, mariadb + + Added logic to ensure that the ``mysql_limit`` and ``mariadb_limit`` + parameters of :meth:`.Update.with_dialect_options` and + :meth:`.Delete.with_dialect_options` when compiled to string will only + compile if the parameter is passed as an integer; a ``ValueError`` is + raised otherwise. + + .. change:: + :tags: bug, orm + :tickets: 11944 + + Fixed bug in how type unions were handled within + :paramref:`_orm.registry.type_annotation_map` as well as + :class:`._orm.Mapped` that made the lookup behavior of ``a | b`` different + from that of ``Union[a, b]``. + + .. change:: + :tags: bug, orm + :tickets: 11955 + + Consistently handle ``TypeAliasType`` (defined in PEP 695) obtained with + the ``type X = int`` syntax introduced in python 3.12. Now in all cases one + such alias must be explicitly added to the type map for it to be usable + inside :class:`.Mapped`. This change also revises the approach added in + :ticket:`11305`, now requiring the ``TypeAliasType`` to be added to the + type map. Documentation on how unions and type alias types are handled by + SQLAlchemy has been added in the + :ref:`orm_declarative_mapped_column_type_map` section of the documentation. + + .. change:: + :tags: feature, oracle + :tickets: 12016 + + Added new table option ``oracle_tablespace`` to specify the ``TABLESPACE`` + option when creating a table in Oracle. This allows users to define the + tablespace in which the table should be created. Pull request courtesy of + Miguel Grillo. + + .. change:: + :tags: orm, bug + :tickets: 12019 + + Fixed regression caused by an internal code change in response to recent + Mypy releases that caused the very unusual case of a list of ORM-mapped + attribute expressions passed to :meth:`.ColumnOperators.in_` to no longer + be accepted. + + .. change:: + :tags: oracle, usecase + :tickets: 12032 + + Use the connection attribute ``max_identifier_length`` available + in oracledb since version 2.5 when determining the identifier length + in the Oracle dialect. + + .. change:: + :tags: bug, sql + :tickets: 12084 + + Fixed issue in "lambda SQL" feature where the tracking of bound parameters + could be corrupted if the same lambda were evaluated across multiple + compile phases, including when using the same lambda across multiple engine + instances or with statement caching disabled. + + + .. change:: + :tags: usecase, postgresql + :tickets: 12093 + + The :class:`_postgresql.Range` type now supports + :meth:`_postgresql.Range.__contains__`. Pull request courtesy of Frazer + McLean. + + .. change:: + :tags: bug, oracle + :tickets: 12100 + + Fixed compilation of ``TABLE`` function when used in a ``FROM`` clause in + Oracle Database dialect. + + .. change:: + :tags: bug, oracle + :tickets: 12150 + + Fixed issue in oracledb / cx_oracle dialects where output type handlers for + ``CLOB`` were being routed to ``NVARCHAR`` rather than ``VARCHAR``, causing + a double conversion to take place. + + + .. change:: + :tags: bug, postgresql + :tickets: 12170 + + Fixed issue where creating a table with a primary column of + :class:`_sql.SmallInteger` and using the asyncpg driver would result in + the type being compiled to ``SERIAL`` rather than ``SMALLSERIAL``. + + .. change:: + :tags: bug, orm + :tickets: 12207 + + Fixed issues in type handling within the + :paramref:`_orm.registry.type_annotation_map` feature which prevented the + use of unions, using either pep-604 or ``Union`` syntaxes under future + annotations mode, which contained multiple generic types as elements from + being correctly resolvable. + + .. change:: + :tags: bug, orm + :tickets: 12216 + + Fixed issue in event system which prevented an event listener from being + attached and detached from multiple class-like objects, namely the + :class:`.sessionmaker` or :class:`.scoped_session` targets that assign to + :class:`.Session` subclasses. + + + .. change:: + :tags: bug, postgresql + :tickets: 12220 + + Adjusted the asyncpg dialect so that an empty SQL string, which is valid + for PostgreSQL server, may be successfully processed at the dialect level, + such as when using :meth:`.Connection.exec_driver_sql`. Pull request + courtesy Andrew Jackson. + + + .. change:: + :tags: usecase, sqlite + :tickets: 7398 + + Added SQLite table option to enable ``STRICT`` tables. Pull request + courtesy of Guilherme Crocetti. .. changelog:: :version: 2.0.36 diff --git a/doc/build/changelog/unreleased_20/10720.rst b/doc/build/changelog/unreleased_20/10720.rst deleted file mode 100644 index 98ba0a0dc49..00000000000 --- a/doc/build/changelog/unreleased_20/10720.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: usecase, mariadb - :tickets: 10720 - - Added sql types ``INET4`` and ``INET6`` in the MariaDB dialect. Pull - request courtesy Adam Žurek. diff --git a/doc/build/changelog/unreleased_20/11370.rst b/doc/build/changelog/unreleased_20/11370.rst deleted file mode 100644 index a98940f8b6a..00000000000 --- a/doc/build/changelog/unreleased_20/11370.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11370 - - Fixed issue regarding ``Union`` types that would be present in the - :paramref:`_orm.registry.type_annotation_map` of a :class:`_orm.registry` - or declarative base class, where a :class:`.Mapped` element that included - one of the subtypes present in that ``Union`` would be matched to that - entry, potentially ignoring other entries that matched exactly. The - correct behavior now takes place such that an entry should only match in - :paramref:`_orm.registry.type_annotation_map` exactly, as a ``Union`` type - is a self-contained type. For example, an attribute with ``Mapped[float]`` - would previously match to a :paramref:`_orm.registry.type_annotation_map` - entry ``Union[float, Decimal]``; this will no longer match and will now - only match to an entry that states ``float``. Pull request courtesy Frazer - McLean. diff --git a/doc/build/changelog/unreleased_20/11724.rst b/doc/build/changelog/unreleased_20/11724.rst deleted file mode 100644 index 70ebd9e3e2f..00000000000 --- a/doc/build/changelog/unreleased_20/11724.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 11724 - - Fixes issue in :meth:`.Dialect.get_multi_indexes` in the PostgreSQL - dialect, where an error would be thrown when attempting to use alembic with - a vector index from the pgvecto.rs extension. diff --git a/doc/build/changelog/unreleased_20/11764.rst b/doc/build/changelog/unreleased_20/11764.rst deleted file mode 100644 index 6e37f86bf16..00000000000 --- a/doc/build/changelog/unreleased_20/11764.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. change:: - :tags: usecase, mysql, mariadb - :tickets: 11764 - - Added support for the ``LIMIT`` clause with ``DELETE`` for the MySQL and - MariaDB dialects, to complement the already present option for - ``UPDATE``. The :meth:`.Delete.with_dialect_options` method of the - :func:`.delete` construct accepts parameters for ``mysql_limit`` and - ``mariadb_limit``, allowing users to specify a limit on the number of rows - deleted. Pull request courtesy of Pablo Nicolás Estevez. - - -.. change:: - :tags: bug, mysql, mariadb - - Added logic to ensure that the ``mysql_limit`` and ``mariadb_limit`` - parameters of :meth:`.Update.with_dialect_options` and - :meth:`.Delete.with_dialect_options` when compiled to string will only - compile if the parameter is passed as an integer; a ``ValueError`` is - raised otherwise. diff --git a/doc/build/changelog/unreleased_20/11944.rst b/doc/build/changelog/unreleased_20/11944.rst deleted file mode 100644 index 0be3cb926d7..00000000000 --- a/doc/build/changelog/unreleased_20/11944.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11944 - - Fixed bug in how type unions were handled within - :paramref:`_orm.registry.type_annotation_map` as well as - :class:`._orm.Mapped` that made the lookup behavior of ``a | b`` different - from that of ``Union[a, b]``. diff --git a/doc/build/changelog/unreleased_20/11955.rst b/doc/build/changelog/unreleased_20/11955.rst deleted file mode 100644 index bf62530f99d..00000000000 --- a/doc/build/changelog/unreleased_20/11955.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 11955 - - Consistently handle ``TypeAliasType`` (defined in PEP 695) obtained with - the ``type X = int`` syntax introduced in python 3.12. Now in all cases one - such alias must be explicitly added to the type map for it to be usable - inside :class:`.Mapped`. This change also revises the approach added in - :ticket:`11305`, now requiring the ``TypeAliasType`` to be added to the - type map. Documentation on how unions and type alias types are handled by - SQLAlchemy has been added in the - :ref:`orm_declarative_mapped_column_type_map` section of the documentation. diff --git a/doc/build/changelog/unreleased_20/12016.rst b/doc/build/changelog/unreleased_20/12016.rst deleted file mode 100644 index e89c25576d6..00000000000 --- a/doc/build/changelog/unreleased_20/12016.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: feature, oracle - :tickets: 12016 - - Added new table option ``oracle_tablespace`` to specify the ``TABLESPACE`` - option when creating a table in Oracle. This allows users to define the - tablespace in which the table should be created. Pull request courtesy of - Miguel Grillo. diff --git a/doc/build/changelog/unreleased_20/12019.rst b/doc/build/changelog/unreleased_20/12019.rst deleted file mode 100644 index 3c7c1f4d01b..00000000000 --- a/doc/build/changelog/unreleased_20/12019.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: orm, bug - :tickets: 12019 - - Fixed regression caused by an internal code change in response to recent - Mypy releases that caused the very unusual case of a list of ORM-mapped - attribute expressions passed to :meth:`.ColumnOperators.in_` to no longer - be accepted. diff --git a/doc/build/changelog/unreleased_20/12032.rst b/doc/build/changelog/unreleased_20/12032.rst deleted file mode 100644 index 5a407329807..00000000000 --- a/doc/build/changelog/unreleased_20/12032.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: oracle, usecase - :tickets: 12032 - - Use the connection attribute ``max_identifier_length`` available - in oracledb since version 2.5 when determining the identifier length - in the Oracle dialect. diff --git a/doc/build/changelog/unreleased_20/12084.rst b/doc/build/changelog/unreleased_20/12084.rst deleted file mode 100644 index 0eef5c9a1cb..00000000000 --- a/doc/build/changelog/unreleased_20/12084.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 12084 - - Fixed issue in "lambda SQL" feature where the tracking of bound parameters - could be corrupted if the same lambda were evaluated across multiple - compile phases, including when using the same lambda across multiple engine - instances or with statement caching disabled. - diff --git a/doc/build/changelog/unreleased_20/12093.rst b/doc/build/changelog/unreleased_20/12093.rst deleted file mode 100644 index 3c6958d9adb..00000000000 --- a/doc/build/changelog/unreleased_20/12093.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: usecase, postgresql - :tickets: 12093 - - The :class:`_postgresql.Range` type now supports - :meth:`_postgresql.Range.__contains__`. Pull request courtesy of Frazer - McLean. diff --git a/doc/build/changelog/unreleased_20/12100.rst b/doc/build/changelog/unreleased_20/12100.rst deleted file mode 100644 index a7526dfd0c4..00000000000 --- a/doc/build/changelog/unreleased_20/12100.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, oracle - :tickets: 12100 - - Fixed compilation of ``TABLE`` function when used in a ``FROM`` clause in - Oracle Database dialect. diff --git a/doc/build/changelog/unreleased_20/12150.rst b/doc/build/changelog/unreleased_20/12150.rst deleted file mode 100644 index a40e4623f21..00000000000 --- a/doc/build/changelog/unreleased_20/12150.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, oracle - :tickets: 12150 - - Fixed issue in oracledb / cx_oracle dialects where output type handlers for - ``CLOB`` were being routed to ``NVARCHAR`` rather than ``VARCHAR``, causing - a double conversion to take place. - diff --git a/doc/build/changelog/unreleased_20/12170.rst b/doc/build/changelog/unreleased_20/12170.rst deleted file mode 100644 index 452181efa37..00000000000 --- a/doc/build/changelog/unreleased_20/12170.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 12170 - - Fixed issue where creating a table with a primary column of - :class:`_sql.SmallInteger` and using the asyncpg driver would result in - the type being compiled to ``SERIAL`` rather than ``SMALLSERIAL``. diff --git a/doc/build/changelog/unreleased_20/12207.rst b/doc/build/changelog/unreleased_20/12207.rst deleted file mode 100644 index a69f8ae93a9..00000000000 --- a/doc/build/changelog/unreleased_20/12207.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 12207 - - Fixed issues in type handling within the - :paramref:`_orm.registry.type_annotation_map` feature which prevented the - use of unions, using either pep-604 or ``Union`` syntaxes under future - annotations mode, which contained multiple generic types as elements from - being correctly resolvable. diff --git a/doc/build/changelog/unreleased_20/12216.rst b/doc/build/changelog/unreleased_20/12216.rst deleted file mode 100644 index a4126733356..00000000000 --- a/doc/build/changelog/unreleased_20/12216.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 12216 - - Fixed issue in event system which prevented an event listener from being - attached and detached from multiple class-like objects, namely the - :class:`.sessionmaker` or :class:`.scoped_session` targets that assign to - :class:`.Session` subclasses. - diff --git a/doc/build/changelog/unreleased_20/12220.rst b/doc/build/changelog/unreleased_20/12220.rst deleted file mode 100644 index a4b30cca5b1..00000000000 --- a/doc/build/changelog/unreleased_20/12220.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 12220 - - Adjusted the asyncpg dialect so that an empty SQL string, which is valid - for PostgreSQL server, may be successfully processed at the dialect level, - such as when using :meth:`.Connection.exec_driver_sql`. Pull request - courtesy Andrew Jackson. - diff --git a/doc/build/changelog/unreleased_20/7398.rst b/doc/build/changelog/unreleased_20/7398.rst deleted file mode 100644 index 1241d175a3a..00000000000 --- a/doc/build/changelog/unreleased_20/7398.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: usecase, sqlite - :tickets: 7398 - - Added SQLite table option to enable ``STRICT`` tables. Pull request - courtesy of Guilherme Crocetti. diff --git a/doc/build/conf.py b/doc/build/conf.py index 9b6bcb14920..412d329e846 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.36" +release = "2.0.37" -release_date = "October 15, 2024" +release_date = "January 9, 2025" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 1509e023d6e899ea3d04c2fc6adc6f01c8b79fcd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 9 Jan 2025 17:44:09 -0500 Subject: [PATCH 417/544] Version 2.0.38 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 9f1c449751b..d257438a20e 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.38 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.37 :released: January 9, 2025 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index c74afd9012f..d67636cf84f 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.37" +__version__ = "2.0.38" def __go(lcls: Any) -> None: From 0c6ab45e49252167ac139c5836f0bd02b5b06b6e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 13 Jan 2025 08:14:14 -0500 Subject: [PATCH 418/544] update logging_name doc engine logging has not used hex-strings in a very long time Change-Id: I77131e3eb2f72129fde1d9ab6dd4b4e40bc313c6 (cherry picked from commit 214088c42ad61794e315174c41ee92a3c408ae6c) --- doc/build/core/engines.rst | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst index 108a939a9ea..8ac57cdaaf3 100644 --- a/doc/build/core/engines.rst +++ b/doc/build/core/engines.rst @@ -588,13 +588,16 @@ getting duplicate log lines. Setting the Logging Name ------------------------- -The logger name of instance such as an :class:`~sqlalchemy.engine.Engine` or -:class:`~sqlalchemy.pool.Pool` defaults to using a truncated hex identifier -string. To set this to a specific name, use the +The logger name for :class:`~sqlalchemy.engine.Engine` or +:class:`~sqlalchemy.pool.Pool` is set to be the module-qualified class name of the +object. This name can be further qualified with an additional name +using the :paramref:`_sa.create_engine.logging_name` and -:paramref:`_sa.create_engine.pool_logging_name` with -:func:`sqlalchemy.create_engine`; the name will be appended to the logging name -``sqlalchemy.engine.Engine``:: +:paramref:`_sa.create_engine.pool_logging_name` parameters with +:func:`sqlalchemy.create_engine`; the name will be appended to existing +class-qualified logging name. This use is recommended for applications that +make use of multiple global :class:`.Engine` instances simultaenously, so +that they may be distinguished in logging:: >>> import logging >>> from sqlalchemy import create_engine From 9eb87f47aeb591fd9d354bd9b3d2918d561e6011 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Fri, 17 Jan 2025 21:16:51 +0100 Subject: [PATCH 419/544] asyncpg: shield connection close in terminate to avoid connection leak Added an additional ``shield()`` call within the connection terminate process of the asyncpg driver, to mitigate an issue where terminate would be prevented from completing under the anyio concurrency library. Fixes #12077 Closes: #12076 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12076 Pull-request-sha: 6ae261ea7668f4c850874666efec6fef658b08c0 Change-Id: Iaec63d02b620201dc60ffdad76ba9d61d427cac1 (cherry picked from commit 2f6ca6554ddd725849dd6b2d32bf495391087bec) --- doc/build/changelog/unreleased_20/12077.rst | 7 +++++++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12077.rst diff --git a/doc/build/changelog/unreleased_20/12077.rst b/doc/build/changelog/unreleased_20/12077.rst new file mode 100644 index 00000000000..ac1c5a95e50 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12077.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: postgresql, usecase, asyncio + :tickets: 12077 + + Added an additional ``shield()`` call within the connection terminate + process of the asyncpg driver, to mitigate an issue where terminate would + be prevented from completing under the anyio concurrency library. diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 523c47abcd0..0cc1e99cca8 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -904,7 +904,7 @@ def terminate(self): try: # try to gracefully close; see #10717 # timeout added in asyncpg 0.14.0 December 2017 - self.await_(self._connection.close(timeout=2)) + self.await_(asyncio.shield(self._connection.close(timeout=2))) except ( asyncio.TimeoutError, asyncio.CancelledError, From c5f03549377f161720294386b04cf2f7b62e868d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 16 Jan 2025 12:14:02 -0500 Subject: [PATCH 420/544] set default iso to None for asyncpg pep-249 wrapper Adjusted the asyncpg connection wrapper so that the asyncpg ``.transaction()`` call sends ``None`` for isolation_level if not otherwise set in the SQLAlchemy dialect/wrapper, thereby allowing asyncpg to make use of the server level setting for isolation_level in the absense of a client-level setting. Previously, this behavior of asyncpg was blocked by a hardcoded ``read_committed``. Fixes: #12159 Change-Id: I2cd878a5059a8fefc9557a9b8e056fedaee2e9a4 (cherry picked from commit 299cdf667d5af96c5db75a923d2fd15eef2dfe26) --- doc/build/changelog/unreleased_20/12159.rst | 10 ++++ lib/sqlalchemy/dialects/postgresql/asyncpg.py | 2 +- test/dialect/postgresql/test_async_pg_py3k.py | 59 +++++++++++++++++++ 3 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12159.rst diff --git a/doc/build/changelog/unreleased_20/12159.rst b/doc/build/changelog/unreleased_20/12159.rst new file mode 100644 index 00000000000..3babbf9db72 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12159.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, postgresql + :tickets: 12159 + + Adjusted the asyncpg connection wrapper so that the asyncpg + ``.transaction()`` call sends ``None`` for isolation_level if not otherwise + set in the SQLAlchemy dialect/wrapper, thereby allowing asyncpg to make use + of the server level setting for isolation_level in the absense of a + client-level setting. Previously, this behavior of asyncpg was blocked by a + hardcoded ``read_committed``. diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 523c47abcd0..41306016058 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -717,7 +717,7 @@ def __init__( ): self.dbapi = dbapi self._connection = connection - self.isolation_level = self._isolation_setting = "read_committed" + self.isolation_level = self._isolation_setting = None self.readonly = False self.deferrable = False self._transaction = None diff --git a/test/dialect/postgresql/test_async_pg_py3k.py b/test/dialect/postgresql/test_async_pg_py3k.py index 1a85c8f89f9..98410f72e89 100644 --- a/test/dialect/postgresql/test_async_pg_py3k.py +++ b/test/dialect/postgresql/test_async_pg_py3k.py @@ -10,12 +10,14 @@ from sqlalchemy import String from sqlalchemy import Table from sqlalchemy import testing +from sqlalchemy.dialects.postgresql import asyncpg as asyncpg_dialect from sqlalchemy.dialects.postgresql import ENUM from sqlalchemy.testing import async_test from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_raises from sqlalchemy.testing import fixtures from sqlalchemy.testing import mock +from sqlalchemy.util import greenlet_spawn class AsyncPgTest(fixtures.TestBase): @@ -166,6 +168,63 @@ async def async_setup(engine, enums): ], ) + @testing.combinations( + None, + "read committed", + "repeatable read", + "serializable", + argnames="isolation_level", + ) + @async_test + async def test_honor_server_level_iso_setting( + self, async_testing_engine, isolation_level + ): + """test for #12159""" + + engine = async_testing_engine() + + arg, kw = engine.dialect.create_connect_args(engine.url) + + # 1. create an asyncpg.connection directly, set a session level + # isolation level on it (this is similar to server default isolation + # level) + raw_asyncpg_conn = await engine.dialect.dbapi.asyncpg.connect( + *arg, **kw + ) + + if isolation_level: + await raw_asyncpg_conn.execute( + f"set SESSION CHARACTERISTICS AS TRANSACTION " + f"isolation level {isolation_level}" + ) + + # 2. fetch it, confirm the setting took and matches + raw_iso_level = ( + await raw_asyncpg_conn.fetchrow("show transaction isolation level") + )[0] + if isolation_level: + eq_(raw_iso_level, isolation_level.lower()) + + # 3.build our pep-249 wrapper around asyncpg.connection + dbapi_conn = asyncpg_dialect.AsyncAdapt_asyncpg_connection( + engine.dialect.dbapi, + raw_asyncpg_conn, + ) + + # 4. show the isolation level inside of a query. this will + # call asyncpg.connection.transaction() in order to run the + # statement. + cursor = await greenlet_spawn(dbapi_conn.cursor) + await greenlet_spawn( + cursor.execute, "show transaction isolation level" + ) + row = cursor.fetchone() + + # 5. see that the raw iso level is maintained + eq_(row[0], raw_iso_level) + + await greenlet_spawn(dbapi_conn.close) + @testing.variation("trans", ["commit", "rollback"]) @async_test async def test_dont_reset_open_transaction( From 7768a33f6576424ec150c156b12a2afd1c6ffdcf Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 4 Dec 2024 09:22:14 -0500 Subject: [PATCH 421/544] document name normalize Fixes: #10789 Change-Id: I795d92c900502e4b2fde7ab11e8adb9b03d5b782 (cherry picked from commit f933b668944e6de0dbaba4d9bf4b16e2591cbb2b) --- lib/sqlalchemy/dialects/oracle/base.py | 143 +++++++++++++++++++++++-- 1 file changed, 134 insertions(+), 9 deletions(-) diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 019b76bc0af..02aa4d53663 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -153,15 +153,140 @@ ----------------- In Oracle Database, the data dictionary represents all case insensitive -identifier names using UPPERCASE text. SQLAlchemy on the other hand considers -an all-lower case identifier name to be case insensitive. The Oracle Database -dialects convert all case insensitive identifiers to and from those two formats -during schema level communication, such as reflection of tables and indexes. -Using an UPPERCASE name on the SQLAlchemy side indicates a case sensitive -identifier, and SQLAlchemy will quote the name - this will cause mismatches -against data dictionary data received from Oracle Database, so unless -identifier names have been truly created as case sensitive (i.e. using quoted -names), all lowercase names should be used on the SQLAlchemy side. +identifier names using UPPERCASE text. This is in contradiction to the +expectations of SQLAlchemy, which assume a case insensitive name is represented +as lowercase text. + +As an example of case insensitive identifier names, consider the following table: + +.. sourcecode:: sql + + CREATE TABLE MyTable (Identifier INTEGER PRIMARY KEY) + +If you were to ask Oracle Database for information about this table, the +table name would be reported as ``MYTABLE`` and the column name would +be reported as ``IDENTIFIER``. Compare to most other databases such as +PostgreSQL and MySQL which would report these names as ``mytable`` and +``identifier``. The names are **not quoted, therefore are case insensitive**. +The special casing of ``MyTable`` and ``Identifier`` would only be maintained +if they were quoted in the table definition: + +.. sourcecode:: sql + + CREATE TABLE "MyTable" ("Identifier" INTEGER PRIMARY KEY) + +When constructing a SQLAlchemy :class:`.Table` object, **an all lowercase name +is considered to be case insensitive**. So the following table assumes +case insensitive names:: + + Table("mytable", metadata, Column("identifier", Integer, primary_key=True)) + +Whereas when mixed case or UPPERCASE names are used, case sensitivity is +assumed:: + + Table("MyTable", metadata, Column("Identifier", Integer, primary_key=True)) + +A similar situation occurs at the database driver level when emitting a +textual SQL SELECT statement and looking at column names in the DBAPI +``cursor.description`` attribute. A database like PostgreSQL will normalize +case insensitive names to be lowercase:: + + >>> pg_engine = create_engine("postgresql://scott:tiger@localhost/test") + >>> pg_connection = pg_engine.connect() + >>> result = pg_connection.exec_driver_sql("SELECT 1 AS SomeName") + >>> result.cursor.description + (Column(name='somename', type_code=23),) + +Whereas Oracle normalizes them to UPPERCASE:: + + >>> oracle_engine = create_engine("oracle+oracledb://scott:tiger@oracle18c/xe") + >>> oracle_connection = oracle_engine.connect() + >>> result = oracle_connection.exec_driver_sql( + ... "SELECT 1 AS SomeName FROM DUAL" + ... ) + >>> result.cursor.description + [('SOMENAME', , 127, None, 0, -127, True)] + +In order to achieve cross-database parity for the two cases of a. table +reflection and b. textual-only SQL statement round trips, SQLAlchemy performs a step +called **name normalization** when using the Oracle dialect. This process may +also apply to other third party dialects that have similar UPPERCASE handling +of case insensitive names. + +When using name normalization, SQLAlchemy attempts to detect if a name is +case insensitive by checking if all characters are UPPERCASE letters only; +if so, then it assumes this is a case insensitive name and is delivered as +a lowercase name. + +For table reflection, a tablename that is seen represented as all UPPERCASE +in Oracle Database's catalog tables will be assumed to have a case insensitive +name. This is what allows the ``Table`` definition to use lower case names +and be equally compatible from a reflection point of view on Oracle Database +and all other databases such as PostgreSQL and MySQL:: + + # matches a table created with CREATE TABLE mytable + Table("mytable", metadata, autoload_with=some_engine) + +Above, the all lowercase name ``"mytable"`` is case insensitive; it will match +a table reported by PostgreSQL as ``"mytable"`` and a table reported by +Oracle as ``"MYTABLE"``. If name normalization were not present, it would +not be possible for the above :class:`.Table` definition to be introspectable +in a cross-database way, since we are dealing with a case insensitive name +that is not reported by each database in the same way. + +Case sensitivity can be forced on in this case, such as if we wanted to represent +the quoted tablename ``"MYTABLE"`` with that exact casing, most simply by using +that casing directly, which will be seen as a case sensitive name:: + + # matches a table created with CREATE TABLE "MYTABLE" + Table("MYTABLE", metadata, autoload_with=some_engine) + +For the unusual case of a quoted all-lowercase name, the :class:`.quoted_name` +construct may be used:: + + from sqlalchemy import quoted_name + + # matches a table created with CREATE TABLE "mytable" + Table( + quoted_name("mytable", quote=True), metadata, autoload_with=some_engine + ) + +Name normalization also takes place when handling result sets from **purely +textual SQL strings**, that have no other :class:`.Table` or :class:`.Column` +metadata associated with them. This includes SQL strings executed using +:meth:`.Connection.exec_driver_sql` and SQL strings executed using the +:func:`.text` construct which do not include :class:`.Column` metadata. + +Returning to the Oracle Database SELECT statement, we see that even though +``cursor.description`` reports the column name as ``SOMENAME``, SQLAlchemy +name normalizes this to ``somename``:: + + >>> oracle_engine = create_engine("oracle+oracledb://scott:tiger@oracle18c/xe") + >>> oracle_connection = oracle_engine.connect() + >>> result = oracle_connection.exec_driver_sql( + ... "SELECT 1 AS SomeName FROM DUAL" + ... ) + >>> result.cursor.description + [('SOMENAME', , 127, None, 0, -127, True)] + >>> result.keys() + RMKeyView(['somename']) + +The single scenario where the above behavior produces inaccurate results +is when using an all-uppercase, quoted name. SQLAlchemy has no way to determine +that a particular name in ``cursor.description`` was quoted, and is therefore +case sensitive, or was not quoted, and should be name normalized:: + + >>> result = oracle_connection.exec_driver_sql( + ... 'SELECT 1 AS "SOMENAME" FROM DUAL' + ... ) + >>> result.cursor.description + [('SOMENAME', , 127, None, 0, -127, True)] + >>> result.keys() + RMKeyView(['somename']) + +For this case, a new feature will be available in SQLAlchemy 2.1 to disable +the name normalization behavior in specific cases. + .. _oracle_max_identifier_lengths: From 49df6598926808026953210b17dff818f235abe4 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 22 Jan 2025 21:00:41 +0100 Subject: [PATCH 422/544] use arm runnes on linux instead of emulation Fixes: #12249 Change-Id: I4c56a10d09716aa5e1fc4a1688dbfdf7cfcfd2fb (cherry picked from commit 1f704fb682a0759454a474901b33e0b311044253) # Conflicts: # .github/workflows/run-test.yaml --- .github/workflows/create-wheels.yaml | 18 ++++++++++++------ .github/workflows/run-test.yaml | 18 ++++++++++++++---- 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index b8e6adffeb1..e3089cd2d1b 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -30,6 +30,7 @@ jobs: # TODO: macos-14 uses arm macs (only python 3.10+) - make arm wheel on it - "macos-13" - "ubuntu-22.04" + - "ubuntu-22.04-arm" linux_archs: # this is only meaningful on linux. windows and macos ignore exclude all but one arch - "aarch64" @@ -39,13 +40,17 @@ jobs: # create pure python build - os: ubuntu-22.04 wheel_mode: pure-python - python: "cp-311*" + python: "cp-312*" exclude: - os: "windows-2022" linux_archs: "aarch64" - os: "macos-13" linux_archs: "aarch64" + - os: "ubuntu-22.04" + linux_archs: "aarch64" + - os: "ubuntu-22.04-arm" + linux_archs: "x86_64" fail-fast: false @@ -66,11 +71,12 @@ jobs: (cat setup.cfg) | %{$_ -replace "tag_build.?=.?dev",""} | set-content setup.cfg # See details at https://cibuildwheel.readthedocs.io/en/stable/faq/#emulation - - name: Set up QEMU on linux - if: ${{ runner.os == 'Linux' }} - uses: docker/setup-qemu-action@v3 - with: - platforms: all + # no longer needed since arm runners are now available + # - name: Set up QEMU on linux + # if: ${{ runner.os == 'Linux' }} + # uses: docker/setup-qemu-action@v3 + # with: + # platforms: all - name: Build compiled wheels if: ${{ matrix.wheel_mode == 'compiled' }} diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index c037b91237f..ea1540edc1b 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -20,13 +20,14 @@ permissions: jobs: run-test: - name: test-${{ matrix.python-version }}-${{ matrix.build-type }}-${{ matrix.architecture }}-${{ matrix.os }} + name: test-${{ matrix.python-version }}-${{ matrix.os }}-${{ matrix.architecture }}-${{ matrix.build-type }} runs-on: ${{ matrix.os }} strategy: # run this job using this matrix, excluding some combinations below. matrix: os: - "ubuntu-22.04" + - "ubuntu-22.04-arm" - "windows-latest" - "macos-latest" - "macos-13" @@ -37,7 +38,7 @@ jobs: - "3.10" - "3.11" - "3.12" - - "3.13.0-alpha - 3.13" + - "3.13" - "pypy-3.10" build-type: - "cext" @@ -53,6 +54,8 @@ jobs: pytest-args: "-k 'not test_autocommit_on and not test_turn_autocommit_off_via_default_iso_level and not test_autocommit_isolation_level'" - os: "ubuntu-22.04" pytest-args: "--dbdriver pysqlite --dbdriver aiosqlite" + - os: "ubuntu-22.04-arm" + pytest-args: "--dbdriver pysqlite --dbdriver aiosqlite" exclude: @@ -61,6 +64,11 @@ jobs: architecture: x86 - os: "ubuntu-22.04" architecture: arm64 + # linux-arm do not have x86 / x64 python + - os: "ubuntu-22.04-arm" + architecture: x86 + - os: "ubuntu-22.04-arm" + architecture: x64 # windows des not have arm64 python - os: "windows-latest" architecture: arm64 @@ -80,9 +88,11 @@ jobs: architecture: arm64 - os: "macos-13" architecture: x86 - # pypy does not have cext or x86 + # pypy does not have cext or x86 or arm on linux - python-version: "pypy-3.10" build-type: "cext" + - os: "ubuntu-22.04-arm" + python-version: "pypy-3.10" - os: "windows-latest" python-version: "pypy-3.10" architecture: x86 @@ -171,7 +181,7 @@ jobs: - "3.10" - "3.11" - "3.12" - - "3.13.0-alpha - 3.13" + - "3.13" tox-env: - mypy - pep484 From 902eb4925959ddf88566df06f3b9cb78498571ca Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 29 Jan 2025 10:10:09 -0500 Subject: [PATCH 423/544] support accept for chains of joineddispatchers Fixed issue where creating an :class:`.Engine` using multiple calls to :meth:`.Engine.execution_options` where a subsequent call involved certain options such as ``isolation_level`` would lead to an internal error involving event registration. Fixes: #12289 Change-Id: Iec5fbc0eb0c5a92dda1ea762872ae992ca816685 (cherry picked from commit fc3623990eeeb415fb076ddc96a0c7974beb2050) --- doc/build/changelog/unreleased_20/12289.rst | 8 +++++ lib/sqlalchemy/event/base.py | 8 +++-- test/base/test_events.py | 35 +++++++++++++++++++++ test/engine/test_execute.py | 32 +++++++++++++++++++ 4 files changed, 80 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12289.rst diff --git a/doc/build/changelog/unreleased_20/12289.rst b/doc/build/changelog/unreleased_20/12289.rst new file mode 100644 index 00000000000..7ac111c0f50 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12289.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, engine + :tickets: 12289 + + Fixed issue where creating an :class:`.Engine` using multiple calls to + :meth:`.Engine.execution_options` where a subsequent call involved certain + options such as ``isolation_level`` would lead to an internal error + involving event registration. diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py index a73e86bd2a2..66dc12996bc 100644 --- a/lib/sqlalchemy/event/base.py +++ b/lib/sqlalchemy/event/base.py @@ -380,9 +380,11 @@ def dispatch_is(*types: Type[Any]) -> bool: return all(isinstance(target.dispatch, t) for t in types) def dispatch_parent_is(t: Type[Any]) -> bool: - return isinstance( - cast("_JoinedDispatcher[_ET]", target.dispatch).parent, t - ) + parent = cast("_JoinedDispatcher[_ET]", target.dispatch).parent + while isinstance(parent, _JoinedDispatcher): + parent = cast("_JoinedDispatcher[_ET]", parent).parent + + return isinstance(parent, t) # Mapper, ClassManager, Session override this to # also accept classes, scoped_sessions, sessionmakers, etc. diff --git a/test/base/test_events.py b/test/base/test_events.py index 7a387e8440d..ccb53f2bb37 100644 --- a/test/base/test_events.py +++ b/test/base/test_events.py @@ -978,6 +978,9 @@ class TargetElement(BaseTarget): def __init__(self, parent): self.dispatch = self.dispatch._join(parent.dispatch) + def create(self): + return TargetElement(self) + def run_event(self, arg): list(self.dispatch.event_one) self.dispatch.event_one(self, arg) @@ -1044,6 +1047,38 @@ def test_parent_class_child_class(self): [call(element, 1), call(element, 2), call(element, 3)], ) + def test_join_twice(self): + """test #12289""" + + l1 = Mock() + l2 = Mock() + + first_target_element = self.TargetFactory().create() + second_target_element = first_target_element.create() + + event.listen(second_target_element, "event_one", l2) + event.listen(first_target_element, "event_one", l1) + + second_target_element.run_event(1) + eq_( + l1.mock_calls, + [call(second_target_element, 1)], + ) + eq_( + l2.mock_calls, + [call(second_target_element, 1)], + ) + + first_target_element.run_event(2) + eq_( + l1.mock_calls, + [call(second_target_element, 1), call(first_target_element, 2)], + ) + eq_( + l2.mock_calls, + [call(second_target_element, 1)], + ) + def test_parent_class_child_instance_apply_after(self): l1 = Mock() l2 = Mock() diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 61c422bb56a..3291fa30478 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -1782,6 +1782,38 @@ def test_per_engine_plus_global(self, testing_engine): eq_(canary.be2.call_count, 1) eq_(canary.be3.call_count, 2) + @testing.requires.ad_hoc_engines + def test_option_engine_registration_issue_one(self): + """test #12289""" + + e1 = create_engine(testing.db.url) + e2 = e1.execution_options(foo="bar") + e3 = e2.execution_options(isolation_level="AUTOCOMMIT") + + eq_( + e3._execution_options, + {"foo": "bar", "isolation_level": "AUTOCOMMIT"}, + ) + + @testing.requires.ad_hoc_engines + def test_option_engine_registration_issue_two(self): + """test #12289""" + + e1 = create_engine(testing.db.url) + e2 = e1.execution_options(foo="bar") + + @event.listens_for(e2, "engine_connect") + def r1(*arg, **kw): + pass + + e3 = e2.execution_options(bat="hoho") + + @event.listens_for(e3, "engine_connect") + def r2(*arg, **kw): + pass + + eq_(e3._execution_options, {"foo": "bar", "bat": "hoho"}) + def test_emit_sql_in_autobegin(self, testing_engine): e1 = testing_engine(config.db_url) From b8bb35a4e2966693ecbb0b3f23ff00f23284a316 Mon Sep 17 00:00:00 2001 From: Martin Burchell Date: Thu, 30 Jan 2025 20:31:15 +0000 Subject: [PATCH 424/544] Fix typo python2 -> python in 2.0 major migration guide (#12250) (cherry picked from commit bc5213d8d03193aae3486dc42c258e00fd0b0769) --- doc/build/changelog/migration_20.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/changelog/migration_20.rst b/doc/build/changelog/migration_20.rst index 794d1d80fb1..523eb638101 100644 --- a/doc/build/changelog/migration_20.rst +++ b/doc/build/changelog/migration_20.rst @@ -250,7 +250,7 @@ With warnings turned on, our program now has a lot to say: .. sourcecode:: text - $ SQLALCHEMY_WARN_20=1 python2 -W always::DeprecationWarning test3.py + $ SQLALCHEMY_WARN_20=1 python -W always::DeprecationWarning test3.py test3.py:9: RemovedIn20Warning: The Engine.execute() function/method is considered legacy as of the 1.x series of SQLAlchemy and will be removed in 2.0. All statement execution in SQLAlchemy 2.0 is performed by the Connection.execute() method of Connection, or in the ORM by the Session.execute() method of Session. (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) engine.execute("CREATE TABLE foo (id integer)") /home/classic/dev/sqlalchemy/lib/sqlalchemy/engine/base.py:2856: RemovedIn20Warning: Passing a string to Connection.execute() is deprecated and will be removed in version 2.0. Use the text() construct, or the Connection.exec_driver_sql() method to invoke a driver-level SQL string. (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) From 0a3a37681dc1cebee081db47082c0a70f1effcc2 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 30 Jan 2025 21:34:00 +0100 Subject: [PATCH 425/544] remove comma in docstring Change-Id: I135c06ddc16f905835b50cb8ea41f13a1ae2e0be (cherry picked from commit 425f45fb285e5994e96a33b458f1a6aa98a8907f) --- lib/sqlalchemy/orm/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index c84f3b1b3f8..ae0ba1029d1 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -145,7 +145,7 @@ class PassiveFlag(FastIntFlag): """ NO_AUTOFLUSH = 64 - """Loader callables should disable autoflush.""", + """Loader callables should disable autoflush.""" NO_RAISE = 128 """Loader callables should not raise any assertions""" From aa120dd1b1068a19a9d805a5783ace3e7a98c82b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 30 Jan 2025 20:39:34 +0000 Subject: [PATCH 426/544] Merge "Skip mypy plugin tests if incompatible or missing" into main (cherry picked from commit 5822319e779afd26c8edff276c837491c2c10584) --- test/ext/mypy/test_mypy_plugin_py3k.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/test/ext/mypy/test_mypy_plugin_py3k.py b/test/ext/mypy/test_mypy_plugin_py3k.py index e1aa1f96551..1d75137a042 100644 --- a/test/ext/mypy/test_mypy_plugin_py3k.py +++ b/test/ext/mypy/test_mypy_plugin_py3k.py @@ -2,6 +2,13 @@ import pathlib import shutil +try: + from mypy.version import __version__ as _mypy_version_str +except ImportError: + _mypy_version = None +else: + _mypy_version = tuple(int(x) for x in _mypy_version_str.split(".")) + from sqlalchemy import testing from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures @@ -24,7 +31,15 @@ def _incremental_dirs(): return files +def _mypy_missing_or_incompatible(): + return not _mypy_version or _mypy_version > (1, 10, 1) + + class MypyPluginTest(fixtures.MypyTest): + @testing.skip_if( + _mypy_missing_or_incompatible, + "Mypy must be present and compatible (<= 1.10.1)", + ) @testing.combinations( *[ (pathlib.Path(pathname).name, pathname) @@ -75,6 +90,10 @@ def test_incremental(self, mypy_runner, per_func_cachedir, pathname): % (patchfile, result[0]), ) + @testing.skip_if( + _mypy_missing_or_incompatible, + "Mypy must be present and compatible (<= 1.10.1)", + ) @testing.combinations( *( (os.path.basename(path), path, True) From 4956ae62dce29691d3781cb85ec41e32a58db3b4 Mon Sep 17 00:00:00 2001 From: Mingyu Park Date: Sat, 1 Feb 2025 02:43:35 -0500 Subject: [PATCH 427/544] Unable to use InstrumentedAttribute to value mappings in mysql/mariadb on_duplicate_key_update Fixes: #12117 Closes: #12296 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12296 Pull-request-sha: 32a09ebd18a6f97fdb23cc8a8e212342e6c26291 Change-Id: I72701f63b13105e5dc36e63ba2651da2673f1735 (cherry picked from commit 87bf36be84fc876be3e0c436a557733c63e2ac8d) --- doc/build/changelog/unreleased_20/12117.rst | 8 ++++++++ lib/sqlalchemy/dialects/mysql/base.py | 11 ++++++++--- test/dialect/mysql/test_compiler.py | 22 +++++++++++++++++++++ 3 files changed, 38 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12117.rst diff --git a/doc/build/changelog/unreleased_20/12117.rst b/doc/build/changelog/unreleased_20/12117.rst new file mode 100644 index 00000000000..b4da4db1ef1 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12117.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, dml, mariadb, mysql + :tickets: 12117 + + Fixed a bug where the :class:`MySQLCompiler` would not properly compile statements + where :meth:`_mysql.Insert.on_duplicate_key_update` was passed values that included + :class:`InstrumentedAttribute` as keys. + Pull request courtesy of mingyu. diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index db887269c9a..1314ee3debf 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1399,9 +1399,14 @@ def visit_on_duplicate_key_update(self, on_duplicate, **kw): else: _on_dup_alias_name = "new" + on_duplicate_update = { + coercions.expect_as_key(roles.DMLColumnRole, key): value + for key, value in on_duplicate.update.items() + } + # traverses through all table columns to preserve table column order - for column in (col for col in cols if col.key in on_duplicate.update): - val = on_duplicate.update[column.key] + for column in (col for col in cols if col.key in on_duplicate_update): + val = on_duplicate_update[column.key] # TODO: this coercion should be up front. we can't cache # SQL constructs with non-bound literals buried in them @@ -1442,7 +1447,7 @@ def replace(obj): name_text = self.preparer.quote(column.name) clauses.append("%s = %s" % (name_text, value_text)) - non_matching = set(on_duplicate.update) - {c.key for c in cols} + non_matching = set(on_duplicate_update) - {c.key for c in cols} if non_matching: util.warn( "Additional column names not matching " diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index 59d604eace1..8387d4e07c6 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -54,6 +54,9 @@ from sqlalchemy.dialects.mysql import base as mysql from sqlalchemy.dialects.mysql import insert from sqlalchemy.dialects.mysql import match +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column from sqlalchemy.sql import column from sqlalchemy.sql import delete from sqlalchemy.sql import table @@ -1344,6 +1347,25 @@ def test_mysql8_on_update_dont_dup_alias_name(self): dialect=dialect, ) + def test_on_update_instrumented_attribute_dict(self): + class Base(DeclarativeBase): + pass + + class T(Base): + __tablename__ = "table" + + foo: Mapped[int] = mapped_column(Integer, primary_key=True) + + q = insert(T).values(foo=1).on_duplicate_key_update({T.foo: 2}) + self.assert_compile( + q, + ( + "INSERT INTO `table` (foo) VALUES (%s) " + "ON DUPLICATE KEY UPDATE foo = %s" + ), + {"foo": 1, "param_1": 2}, + ) + class RegexpCommon(testing.AssertsCompiledSQL): def setup_test(self): From d58ba5c5577b12f807f945a679f91aae443a25d8 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Sat, 1 Feb 2025 19:44:14 +0100 Subject: [PATCH 428/544] mypy plugin is removed on 2.1, remove the possibly Change-Id: I93a918dcafd8471d6514b297d973bbbe8100ec48 --- doc/build/orm/extensions/mypy.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/orm/extensions/mypy.rst b/doc/build/orm/extensions/mypy.rst index dbca3f35f91..b7d50c607ad 100644 --- a/doc/build/orm/extensions/mypy.rst +++ b/doc/build/orm/extensions/mypy.rst @@ -11,8 +11,8 @@ the :func:`_orm.mapped_column` construct introduced in SQLAlchemy 2.0. .. deprecated:: 2.0 - **The SQLAlchemy Mypy Plugin is DEPRECATED, and will be removed possibly - as early as the SQLAlchemy 2.1 release. We would urge users to please + **The SQLAlchemy Mypy Plugin is DEPRECATED, and will be removed in + the SQLAlchemy 2.1 release. We would urge users to please migrate away from it ASAP. The mypy plugin also works only up until mypy version 1.10.1. version 1.11.0 and greater may not work properly.** From 78445beeb5484f264adecfca5feb0c712929254a Mon Sep 17 00:00:00 2001 From: Christophe Bornet Date: Fri, 31 Jan 2025 07:42:59 -0500 Subject: [PATCH 429/544] Use AsyncAdaptedQueuePool in aiosqlite ### Description Change default pool in `aiosqlite` from `NullPool` to `AsyncAdaptedQueuePool`. This ensures consistency with pysqlite and least surprise when migrating from sync to async. See discussion in https://github.com/sqlalchemy/sqlalchemy/discussions/12285 Non regression tested by existing tests. ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [x] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #12291 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12291 Pull-request-sha: 5a0872b8d431a6937eaf05fb132578aed5723b6a Change-Id: I96b4d0b5154b34cd26d3ad89774229b0f5d8686f (cherry picked from commit 11bac714a2e83f6f903b1faf36d744854635da66) --- doc/build/changelog/unreleased_20/12285.rst | 6 ++++++ lib/sqlalchemy/dialects/sqlite/aiosqlite.py | 24 ++++++++++++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12285.rst diff --git a/doc/build/changelog/unreleased_20/12285.rst b/doc/build/changelog/unreleased_20/12285.rst new file mode 100644 index 00000000000..2c1451b3608 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12285.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: change, sqlite, aiosqlite, asyncio, pool + :tickets: 12285 + + Changed default connection pool of aiosqlite from NullPool to AsyncAdaptedQueuePool for consistency with pysqlite. + diff --git a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py index c777bf445b0..828022454d4 100644 --- a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py @@ -78,6 +78,28 @@ def do_begin(conn): with the SQLite driver, as this function necessarily will also alter the ".isolation_level" setting. +.. _aiosqlite_pooling: + +Pooling Behavior +---------------- + +The SQLAlchemy ``aiosqlite`` DBAPI establishes the connection pool differently +based on the kind of SQLite database that's requested: + +* When a ``:memory:`` SQLite database is specified, the dialect by default + will use :class:`.StaticPool`. This pool maintains a single + connection, so that all access to the engine + use the same ``:memory:`` database. +* When a file-based database is specified, the dialect will use + :class:`.AsyncAdaptedQueuePool` as the source of connections. + + .. versionchanged:: 2.0.38 + + SQLite file database engines now use :class:`.AsyncAdaptedQueuePool` by default. + Previously, :class:`.NullPool` were used. The :class:`.NullPool` class + may be used by specifying it via the + :paramref:`_sa.create_engine.poolclass` parameter. + """ # noqa import asyncio @@ -380,7 +402,7 @@ def import_dbapi(cls): @classmethod def get_pool_class(cls, url): if cls._is_url_file_db(url): - return pool.NullPool + return pool.AsyncAdaptedQueuePool else: return pool.StaticPool From 5e623b989e92a60739714486210cae5d8933db99 Mon Sep 17 00:00:00 2001 From: Martin Burchell Date: Mon, 3 Feb 2025 18:56:55 +0000 Subject: [PATCH 430/544] Fix 'polymorphic' typo in a few places (#12307) (cherry picked from commit 87c8d04d379a70aafb189f18801f0f375d99262b) --- doc/build/orm/declarative_tables.rst | 2 +- lib/sqlalchemy/orm/path_registry.py | 2 +- test/orm/test_core_compilation.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst index aba74f57932..2ec15e3bf58 100644 --- a/doc/build/orm/declarative_tables.rst +++ b/doc/build/orm/declarative_tables.rst @@ -1502,7 +1502,7 @@ mapper configuration:: __mapper_args__ = { "polymorphic_on": __table__.c.type, - "polymorhpic_identity": "person", + "polymorphic_identity": "person", } The "imperative table" form is also used when a non-:class:`_schema.Table` diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py index a15f7f61b45..388e46098d6 100644 --- a/lib/sqlalchemy/orm/path_registry.py +++ b/lib/sqlalchemy/orm/path_registry.py @@ -566,7 +566,7 @@ def __init__( # entities are used. # # here we are trying to distinguish between a path that starts - # on a the with_polymorhpic entity vs. one that starts on a + # on a with_polymorphic entity vs. one that starts on a # normal entity that introduces a with_polymorphic() in the # middle using of_type(): # diff --git a/test/orm/test_core_compilation.py b/test/orm/test_core_compilation.py index 81aa760d9b2..6af9185836b 100644 --- a/test/orm/test_core_compilation.py +++ b/test/orm/test_core_compilation.py @@ -1797,7 +1797,7 @@ class InheritedTest(_poly_fixtures._Polymorphic): run_setup_mappers = "once" -class ExplicitWithPolymorhpicTest( +class ExplicitWithPolymorphicTest( _poly_fixtures._PolymorphicUnions, AssertsCompiledSQL ): __dialect__ = "default" From b6cdcee671509b6bf9a995a18e8d34a2953f4169 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 1 Feb 2025 14:39:57 -0500 Subject: [PATCH 431/544] reorganize column collection init to be local Reorganized the internals by which the `.c` collection on a :class:`.FromClause` gets generated so that it is resilient against the collection being accessed in concurrent fashion. An example is creating a :class:`.Alias` or :class:`.Subquery` and accessing it as a module level variable. This impacts the Oracle dialect which uses such module-level global alias objects but is of general use as well. Fixes: #12302 Change-Id: I30cb07c286affce24e2d85e49f9df5b787438d86 (cherry picked from commit 3cd9a5b42f850618141ec459cffe30d0ade0f191) --- doc/build/changelog/unreleased_20/12302.rst | 10 + lib/sqlalchemy/sql/dml.py | 14 +- lib/sqlalchemy/sql/elements.py | 13 +- lib/sqlalchemy/sql/schema.py | 11 +- lib/sqlalchemy/sql/selectable.py | 192 ++++++++++++++------ test/sql/test_selectable.py | 39 ++++ 6 files changed, 222 insertions(+), 57 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12302.rst diff --git a/doc/build/changelog/unreleased_20/12302.rst b/doc/build/changelog/unreleased_20/12302.rst new file mode 100644 index 00000000000..38d45448989 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12302.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, sql + :tickets: 12302 + + Reorganized the internals by which the `.c` collection on a + :class:`.FromClause` gets generated so that it is resilient against the + collection being accessed in concurrent fashion. An example is creating a + :class:`.Alias` or :class:`.Subquery` and accessing it as a module level + variable. This impacts the Oracle dialect which uses such module-level + global alias objects but is of general use as well. diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 0b92e38bce1..f0e6edbb560 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -23,6 +23,7 @@ from typing import Optional from typing import overload from typing import Sequence +from typing import Set from typing import Tuple from typing import Type from typing import TYPE_CHECKING @@ -42,6 +43,7 @@ from .base import _generative from .base import _select_iterables from .base import ColumnCollection +from .base import ColumnSet from .base import CompileState from .base import DialectKWArgs from .base import Executable @@ -418,10 +420,16 @@ class UpdateBase( is_dml = True def _generate_fromclause_column_proxies( - self, fromclause: FromClause + self, + fromclause: FromClause, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], ) -> None: - fromclause._columns._populate_separate_keys( - col._make_proxy(fromclause) + columns._populate_separate_keys( + col._make_proxy( + fromclause, primary_key=primary_key, foreign_keys=foreign_keys + ) for col in self._all_selected_columns if is_column_element(col) ) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index f7d37677082..6f20d7efa0d 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -88,6 +88,7 @@ from ._typing import _InfoType from ._typing import _PropagateAttrsType from ._typing import _TypeEngineArgument + from .base import ColumnSet from .cache_key import _CacheKeyTraversalType from .cache_key import CacheKey from .compiler import Compiled @@ -1639,6 +1640,8 @@ def _make_proxy( self, selectable: FromClause, *, + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], name: Optional[str] = None, key: Optional[str] = None, name_is_truncatable: bool = False, @@ -4556,7 +4559,7 @@ def description(self) -> str: return self.name @HasMemoized.memoized_attribute - def _tq_key_label(self): + def _tq_key_label(self) -> Optional[str]: """table qualified label based on column key. for table-bound columns this is _; @@ -4614,6 +4617,8 @@ def _make_proxy( self, selectable: FromClause, *, + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], name: Optional[str] = None, key: Optional[str] = None, name_is_truncatable: bool = False, @@ -4794,6 +4799,8 @@ def _make_proxy( self, selectable: FromClause, *, + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], name: Optional[str] = None, compound_select_cols: Optional[Sequence[ColumnElement[Any]]] = None, **kw: Any, @@ -4806,6 +4813,8 @@ def _make_proxy( disallow_is_literal=True, name_is_truncatable=isinstance(name, _truncated_label), compound_select_cols=compound_select_cols, + primary_key=primary_key, + foreign_keys=foreign_keys, ) # there was a note here to remove this assertion, which was here @@ -5040,6 +5049,8 @@ def _make_proxy( self, selectable: FromClause, *, + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], name: Optional[str] = None, key: Optional[str] = None, name_is_truncatable: bool = False, diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 173b38c5fe5..8daa8864d2f 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -95,9 +95,11 @@ from ._typing import _InfoType from ._typing import _TextCoercedExpressionArgument from ._typing import _TypeEngineArgument + from .base import ColumnSet from .base import ReadOnlyColumnCollection from .compiler import DDLCompiler from .elements import BindParameter + from .elements import KeyedColumnElement from .functions import Function from .type_api import TypeEngine from .visitors import anon_map @@ -2617,6 +2619,8 @@ def _merge(self, other: Column[Any]) -> None: def _make_proxy( self, selectable: FromClause, + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], name: Optional[str] = None, key: Optional[str] = None, name_is_truncatable: bool = False, @@ -2686,10 +2690,13 @@ def _make_proxy( c._propagate_attrs = selectable._propagate_attrs if selectable._is_clone_of is not None: c._is_clone_of = selectable._is_clone_of.columns.get(c.key) + if self.primary_key: - selectable.primary_key.add(c) # type: ignore + primary_key.add(c) + if fk: - selectable.foreign_keys.update(fk) # type: ignore + foreign_keys.update(fk) # type: ignore + return c.key, c diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 7660a1dbc74..8aa9f41eb9f 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -240,7 +240,11 @@ def is_derived_from(self, fromclause: Optional[FromClause]) -> bool: raise NotImplementedError() def _generate_fromclause_column_proxies( - self, fromclause: FromClause + self, + fromclause: FromClause, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], ) -> None: """Populate columns into an :class:`.AliasedReturnsRows` object.""" @@ -833,10 +837,17 @@ def description(self) -> str: return getattr(self, "name", self.__class__.__name__ + " object") def _generate_fromclause_column_proxies( - self, fromclause: FromClause + self, + fromclause: FromClause, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], ) -> None: - fromclause._columns._populate_separate_keys( - col._make_proxy(fromclause) for col in self.c + columns._populate_separate_keys( + col._make_proxy( + fromclause, primary_key=primary_key, foreign_keys=foreign_keys + ) + for col in self.c ) @util.ro_non_memoized_property @@ -890,10 +901,30 @@ def c(self) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]: """ if "_columns" not in self.__dict__: - self._init_collections() - self._populate_column_collection() + self._setup_collections() return self._columns.as_readonly() + def _setup_collections(self) -> None: + assert "_columns" not in self.__dict__ + assert "primary_key" not in self.__dict__ + assert "foreign_keys" not in self.__dict__ + + _columns: ColumnCollection[Any, Any] = ColumnCollection() + primary_key = ColumnSet() + foreign_keys: Set[KeyedColumnElement[Any]] = set() + + self._populate_column_collection( + columns=_columns, + primary_key=primary_key, + foreign_keys=foreign_keys, + ) + + # assigning these three collections separately is not itself atomic, + # but greatly reduces the surface for problems + self._columns = _columns + self.primary_key = primary_key # type: ignore + self.foreign_keys = foreign_keys # type: ignore + @util.ro_non_memoized_property def entity_namespace(self) -> _EntityNamespace: """Return a namespace used for name-based access in SQL expressions. @@ -920,8 +951,7 @@ def primary_key(self) -> Iterable[NamedColumn[Any]]: iterable collection of :class:`_schema.Column` objects. """ - self._init_collections() - self._populate_column_collection() + self._setup_collections() return self.primary_key @util.ro_memoized_property @@ -938,8 +968,7 @@ def foreign_keys(self) -> Iterable[ForeignKey]: :attr:`_schema.Table.foreign_key_constraints` """ - self._init_collections() - self._populate_column_collection() + self._setup_collections() return self.foreign_keys def _reset_column_collection(self) -> None: @@ -963,20 +992,16 @@ def _reset_column_collection(self) -> None: def _select_iterable(self) -> _SelectIterable: return (c for c in self.c if not _never_select_column(c)) - def _init_collections(self) -> None: - assert "_columns" not in self.__dict__ - assert "primary_key" not in self.__dict__ - assert "foreign_keys" not in self.__dict__ - - self._columns = ColumnCollection() - self.primary_key = ColumnSet() # type: ignore - self.foreign_keys = set() # type: ignore - @property def _cols_populated(self) -> bool: return "_columns" in self.__dict__ - def _populate_column_collection(self) -> None: + def _populate_column_collection( + self, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], + ) -> None: """Called on subclasses to establish the .c collection. Each implementation has a different way of establishing @@ -1303,22 +1328,27 @@ def self_group( return FromGrouping(self) @util.preload_module("sqlalchemy.sql.util") - def _populate_column_collection(self) -> None: + def _populate_column_collection( + self, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], + ) -> None: sqlutil = util.preloaded.sql_util - columns: List[KeyedColumnElement[Any]] = [c for c in self.left.c] + [ + _columns: List[KeyedColumnElement[Any]] = [c for c in self.left.c] + [ c for c in self.right.c ] - self.primary_key.extend( # type: ignore + primary_key.extend( # type: ignore sqlutil.reduce_columns( - (c for c in columns if c.primary_key), self.onclause + (c for c in _columns if c.primary_key), self.onclause ) ) - self._columns._populate_separate_keys( - (col._tq_key_label, col) for col in columns + columns._populate_separate_keys( + (col._tq_key_label, col) for col in _columns # type: ignore ) - self.foreign_keys.update( # type: ignore - itertools.chain(*[col.foreign_keys for col in columns]) + foreign_keys.update( + itertools.chain(*[col.foreign_keys for col in _columns]) # type: ignore # noqa: E501 ) def _copy_internals( @@ -1345,7 +1375,7 @@ def _copy_internals( def replace( obj: Union[BinaryExpression[Any], ColumnClause[Any]], **kw: Any, - ) -> Optional[KeyedColumnElement[ColumnElement[Any]]]: + ) -> Optional[KeyedColumnElement[Any]]: if isinstance(obj, ColumnClause) and obj.table in new_froms: newelem = new_froms[obj.table].corresponding_column(obj) return newelem @@ -1701,8 +1731,15 @@ def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None: super()._refresh_for_new_column(column) self.element._refresh_for_new_column(column) - def _populate_column_collection(self) -> None: - self.element._generate_fromclause_column_proxies(self) + def _populate_column_collection( + self, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], + ) -> None: + self.element._generate_fromclause_column_proxies( + self, columns, primary_key=primary_key, foreign_keys=foreign_keys + ) @util.ro_non_memoized_property def description(self) -> str: @@ -2142,11 +2179,26 @@ def _init( self._suffixes = _suffixes super()._init(selectable, name=name) - def _populate_column_collection(self) -> None: + def _populate_column_collection( + self, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], + ) -> None: if self._cte_alias is not None: - self._cte_alias._generate_fromclause_column_proxies(self) + self._cte_alias._generate_fromclause_column_proxies( + self, + columns, + primary_key=primary_key, + foreign_keys=foreign_keys, + ) else: - self.element._generate_fromclause_column_proxies(self) + self.element._generate_fromclause_column_proxies( + self, + columns, + primary_key=primary_key, + foreign_keys=foreign_keys, + ) def alias(self, name: Optional[str] = None, flat: bool = False) -> CTE: """Return an :class:`_expression.Alias` of this @@ -2944,9 +2996,6 @@ class FromGrouping(GroupedElement, FromClause): def __init__(self, element: FromClause): self.element = coercions.expect(roles.FromClauseRole, element) - def _init_collections(self) -> None: - pass - @util.ro_non_memoized_property def columns( self, @@ -3107,9 +3156,6 @@ def __str__(self) -> str: def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None: pass - def _init_collections(self) -> None: - pass - @util.ro_memoized_property def description(self) -> str: return self.name @@ -3371,16 +3417,23 @@ def scalar_values(self) -> ScalarValues: """ return ScalarValues(self._column_args, self._data, self.literal_binds) - def _populate_column_collection(self) -> None: + def _populate_column_collection( + self, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], + ) -> None: for c in self._column_args: if c.table is not None and c.table is not self: - _, c = c._make_proxy(self) + _, c = c._make_proxy( + self, primary_key=primary_key, foreign_keys=foreign_keys + ) else: # if the column was used in other contexts, ensure # no memoizations of other FROM clauses. # see test_values.py -> test_auto_proxy_select_direct_col c._reset_memoizations() - self._columns.add(c) + columns.add(c) c.table = self @util.ro_non_memoized_property @@ -3496,6 +3549,9 @@ def selected_columns( def _generate_fromclause_column_proxies( self, subquery: FromClause, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], *, proxy_compound_columns: Optional[ Iterable[Sequence[ColumnElement[Any]]] @@ -3823,13 +3879,20 @@ def _ungroup(self) -> _SB: ... def _generate_fromclause_column_proxies( self, subquery: FromClause, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], *, proxy_compound_columns: Optional[ Iterable[Sequence[ColumnElement[Any]]] ] = None, ) -> None: self.element._generate_fromclause_column_proxies( - subquery, proxy_compound_columns=proxy_compound_columns + subquery, + columns, + proxy_compound_columns=proxy_compound_columns, + primary_key=primary_key, + foreign_keys=foreign_keys, ) @util.ro_non_memoized_property @@ -4513,6 +4576,9 @@ def _ensure_disambiguated_names(self) -> CompoundSelect: def _generate_fromclause_column_proxies( self, subquery: FromClause, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], *, proxy_compound_columns: Optional[ Iterable[Sequence[ColumnElement[Any]]] @@ -4553,7 +4619,11 @@ def _generate_fromclause_column_proxies( # i haven't tried to think what it means for compound nested in # compound select_0._generate_fromclause_column_proxies( - subquery, proxy_compound_columns=extra_col_iterator + subquery, + columns, + proxy_compound_columns=extra_col_iterator, + primary_key=primary_key, + foreign_keys=foreign_keys, ) def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None: @@ -5771,7 +5841,7 @@ def _copy_internals( def replace( obj: Union[BinaryExpression[Any], ColumnClause[Any]], **kw: Any, - ) -> Optional[KeyedColumnElement[ColumnElement[Any]]]: + ) -> Optional[KeyedColumnElement[Any]]: if isinstance(obj, ColumnClause) and obj.table in new_froms: newelem = new_froms[obj.table].corresponding_column(obj) return newelem @@ -6428,6 +6498,9 @@ def _ensure_disambiguated_names(self) -> Select[Any]: def _generate_fromclause_column_proxies( self, subquery: FromClause, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], *, proxy_compound_columns: Optional[ Iterable[Sequence[ColumnElement[Any]]] @@ -6445,6 +6518,8 @@ def _generate_fromclause_column_proxies( name=required_label_name, name_is_truncatable=True, compound_select_cols=extra_cols, + primary_key=primary_key, + foreign_keys=foreign_keys, ) for ( ( @@ -6470,6 +6545,8 @@ def _generate_fromclause_column_proxies( key=proxy_key, name=required_label_name, name_is_truncatable=True, + primary_key=primary_key, + foreign_keys=foreign_keys, ) for ( required_label_name, @@ -6481,7 +6558,7 @@ def _generate_fromclause_column_proxies( if is_column_element(c) ] - subquery._columns._populate_separate_keys(prox) + columns._populate_separate_keys(prox) def _needs_parens_for_grouping(self) -> bool: return self._has_row_limiting_clause or bool( @@ -7033,6 +7110,9 @@ def bindparams( def _generate_fromclause_column_proxies( self, fromclause: FromClause, + columns: ColumnCollection[str, KeyedColumnElement[Any]], + primary_key: ColumnSet, + foreign_keys: Set[KeyedColumnElement[Any]], *, proxy_compound_columns: Optional[ Iterable[Sequence[ColumnElement[Any]]] @@ -7042,15 +7122,25 @@ def _generate_fromclause_column_proxies( assert isinstance(fromclause, Subquery) if proxy_compound_columns: - fromclause._columns._populate_separate_keys( - c._make_proxy(fromclause, compound_select_cols=extra_cols) + columns._populate_separate_keys( + c._make_proxy( + fromclause, + compound_select_cols=extra_cols, + primary_key=primary_key, + foreign_keys=foreign_keys, + ) for c, extra_cols in zip( self.column_args, proxy_compound_columns ) ) else: - fromclause._columns._populate_separate_keys( - c._make_proxy(fromclause) for c in self.column_args + columns._populate_separate_keys( + c._make_proxy( + fromclause, + primary_key=primary_key, + foreign_keys=foreign_keys, + ) + for c in self.column_args ) def _scalar_type(self) -> Union[TypeEngine[Any], Any]: diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index 4a252930a38..6a7be981412 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -1,6 +1,9 @@ """Test various algorithmic properties of selectables.""" from itertools import zip_longest +import random +import threading +import time from sqlalchemy import and_ from sqlalchemy import bindparam @@ -4042,3 +4045,39 @@ def test_copy_internals_multiple_nesting(self): a3 = a2._clone() a3._copy_internals() is_(a1.corresponding_column(a3.c.c), a1.c.c) + + +class FromClauseConcurrencyTest(fixtures.TestBase): + """test for issue 12302""" + + @testing.requires.timing_intensive + def test_c_collection(self): + dictionary_meta = MetaData() + all_indexes_table = Table( + "all_indexes", + dictionary_meta, + *[Column(f"col{i}", Integer) for i in range(50)], + ) + + fails = 0 + + def use_table(): + nonlocal fails + try: + for i in range(3): + time.sleep(random.random() * 0.0001) + all_indexes.c.col35 + except: + fails += 1 + raise + + for j in range(1000): + all_indexes = all_indexes_table.alias("a_indexes") + + threads = [threading.Thread(target=use_table) for i in range(5)] + for t in threads: + t.start() + for t in threads: + t.join() + + assert not fails, "one or more runs failed" From d9d9d5316d28890774c0e66f15276735f1e030b7 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 5 Feb 2025 08:37:04 -0500 Subject: [PATCH 432/544] remove None exception in IN Fixed SQL composition bug which impacted caching where using a ``None`` value inside of an ``in_()`` expression would bypass the usual "expanded bind parameter" logic used by the IN construct, which allows proper caching to take place. Fixes: #12314 References: #12312 Change-Id: I0d2fc4e15c73407379ba368dd4ee32660fc66259 (cherry picked from commit 79505b03b61f622615be2d2bc1434671c29b0cc5) --- doc/build/changelog/unreleased_20/12314.rst | 9 +++++++++ lib/sqlalchemy/sql/coercions.py | 2 -- test/dialect/mssql/test_compiler.py | 6 +++++- test/sql/test_compare.py | 17 +++++++++++++++++ test/sql/test_operators.py | 17 ++++++++++++++++- 5 files changed, 47 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12314.rst diff --git a/doc/build/changelog/unreleased_20/12314.rst b/doc/build/changelog/unreleased_20/12314.rst new file mode 100644 index 00000000000..6d5e83adeba --- /dev/null +++ b/doc/build/changelog/unreleased_20/12314.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, sql + :tickets: 12314 + + Fixed SQL rendering bug which impacted caching where using a ``None`` value + inside of an ``in_()`` expression would bypass the usual "expanded bind + parameter" logic used by the IN construct, which allows proper caching to + take place. + diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 123b5c556e1..802ce75700b 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -859,8 +859,6 @@ def _literal_coercion(self, element, *, expr, operator, **kw): else: non_literal_expressions[o] = o - elif o is None: - non_literal_expressions[o] = elements.Null() if non_literal_expressions: return elements.ClauseList( diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py index 59b13b91e0b..eb4dba0a079 100644 --- a/test/dialect/mssql/test_compiler.py +++ b/test/dialect/mssql/test_compiler.py @@ -393,7 +393,11 @@ def test_update_to_select_schema(self): "check_post_param": {}, }, ), - (lambda t: t.c.foo.in_([None]), "sometable.foo IN (NULL)", {}), + ( + lambda t: t.c.foo.in_([None]), + "sometable.foo IN (__[POSTCOMPILE_foo_1])", + {}, + ), ) def test_strict_binds(self, expr, compiled, kw): """test the 'strict' compiler binds.""" diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index 88ac3c315ed..1adfdbf14d7 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -1248,6 +1248,23 @@ def test_cache_key_object_comparators(self, lc1, lc2, lc3): is_true(c1._generate_cache_key() != c3._generate_cache_key()) is_false(c1._generate_cache_key() == c3._generate_cache_key()) + def test_in_with_none(self): + """test #12314""" + + def fixture(): + elements = list( + random_choices([1, 2, None, 3, 4], k=random.randint(1, 7)) + ) + + # slight issue. if the first element is None and not an int, + # the type of the BindParameter goes from Integer to Nulltype. + # but if we set the left side to be Integer then it comes from + # that side, and the vast majority of in_() use cases come from + # a typed column expression, so this is fine + return (column("x", Integer).in_(elements),) + + self._run_cache_key_fixture(fixture, False) + def test_cache_key(self): for fixtures_, compare_values in [ (self.fixtures, True), diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index 8afe091925a..8ef260a179f 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -2330,8 +2330,23 @@ def test_in_27(self): ) def test_in_28(self): + """revised to test #12314""" self.assert_compile( - self.table1.c.myid.in_([None]), "mytable.myid IN (NULL)" + self.table1.c.myid.in_([None]), + "mytable.myid IN (__[POSTCOMPILE_myid_1])", + ) + + @testing.combinations( + [1, 2, None, 3], + [None, None, None], + [None, 2, 3, 3], + ) + def test_in_null_combinations(self, expr): + """test #12314""" + + self.assert_compile( + self.table1.c.myid.in_(expr), + "mytable.myid IN (__[POSTCOMPILE_myid_1])", ) @testing.combinations(True, False) From 085410d4cfabd45189408f69906409163c48e89d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 6 Feb 2025 13:59:22 -0500 Subject: [PATCH 433/544] changelog edits Change-Id: I61164f4af388d8f4f157ad6afe96ccbb668587a7 (cherry picked from commit f976e7b775eda7013338800889e125937910ad35) --- doc/build/changelog/unreleased_20/12077.rst | 7 ++++--- doc/build/changelog/unreleased_20/12117.rst | 9 +++++---- doc/build/changelog/unreleased_20/12159.rst | 12 ++++++------ doc/build/changelog/unreleased_20/12285.rst | 8 ++++++-- doc/build/changelog/unreleased_20/12289.rst | 6 +++--- doc/build/changelog/unreleased_20/12302.rst | 2 +- doc/build/changelog/unreleased_20/12314.rst | 8 ++++---- doc/build/changelog/whatsnew_20.rst | 5 +++++ 8 files changed, 34 insertions(+), 23 deletions(-) diff --git a/doc/build/changelog/unreleased_20/12077.rst b/doc/build/changelog/unreleased_20/12077.rst index ac1c5a95e50..94511b172d8 100644 --- a/doc/build/changelog/unreleased_20/12077.rst +++ b/doc/build/changelog/unreleased_20/12077.rst @@ -2,6 +2,7 @@ :tags: postgresql, usecase, asyncio :tickets: 12077 - Added an additional ``shield()`` call within the connection terminate - process of the asyncpg driver, to mitigate an issue where terminate would - be prevented from completing under the anyio concurrency library. + Added an additional ``asyncio.shield()`` call within the connection + terminate process of the asyncpg driver, to mitigate an issue where + terminate would be prevented from completing under the anyio concurrency + library. diff --git a/doc/build/changelog/unreleased_20/12117.rst b/doc/build/changelog/unreleased_20/12117.rst index b4da4db1ef1..a82ddc36f8b 100644 --- a/doc/build/changelog/unreleased_20/12117.rst +++ b/doc/build/changelog/unreleased_20/12117.rst @@ -2,7 +2,8 @@ :tags: bug, dml, mariadb, mysql :tickets: 12117 - Fixed a bug where the :class:`MySQLCompiler` would not properly compile statements - where :meth:`_mysql.Insert.on_duplicate_key_update` was passed values that included - :class:`InstrumentedAttribute` as keys. - Pull request courtesy of mingyu. + Fixed a bug where the MySQL statement compiler would not properly compile + statements where :meth:`_mysql.Insert.on_duplicate_key_update` was passed + values that included ORM-mapped attributes (e.g. + :class:`InstrumentedAttribute` objects) as keys. Pull request courtesy of + mingyu. diff --git a/doc/build/changelog/unreleased_20/12159.rst b/doc/build/changelog/unreleased_20/12159.rst index 3babbf9db72..50496759faf 100644 --- a/doc/build/changelog/unreleased_20/12159.rst +++ b/doc/build/changelog/unreleased_20/12159.rst @@ -2,9 +2,9 @@ :tags: bug, postgresql :tickets: 12159 - Adjusted the asyncpg connection wrapper so that the asyncpg - ``.transaction()`` call sends ``None`` for isolation_level if not otherwise - set in the SQLAlchemy dialect/wrapper, thereby allowing asyncpg to make use - of the server level setting for isolation_level in the absense of a - client-level setting. Previously, this behavior of asyncpg was blocked by a - hardcoded ``read_committed``. + Adjusted the asyncpg connection wrapper so that the + ``connection.transaction()`` call sent to asyncpg sends ``None`` for + ``isolation_level`` if not otherwise set in the SQLAlchemy dialect/wrapper, + thereby allowing asyncpg to make use of the server level setting for + ``isolation_level`` in the absense of a client-level setting. Previously, + this behavior of asyncpg was blocked by a hardcoded ``read_committed``. diff --git a/doc/build/changelog/unreleased_20/12285.rst b/doc/build/changelog/unreleased_20/12285.rst index 2c1451b3608..5d815f84ca2 100644 --- a/doc/build/changelog/unreleased_20/12285.rst +++ b/doc/build/changelog/unreleased_20/12285.rst @@ -1,6 +1,10 @@ .. change:: - :tags: change, sqlite, aiosqlite, asyncio, pool + :tags: bug, sqlite, aiosqlite, asyncio, pool :tickets: 12285 - Changed default connection pool of aiosqlite from NullPool to AsyncAdaptedQueuePool for consistency with pysqlite. + Changed default connection pool used by the ``aiosqlite`` dialect + from :class:`.NullPool` to :class:`.AsyncAdaptedQueuePool`; this change + should have been made when 2.0 was first released as the ``pysqlite`` + dialect was similarly changed to use :class:`.QueuePool` as detailed + in :ref:`change_7490`. diff --git a/doc/build/changelog/unreleased_20/12289.rst b/doc/build/changelog/unreleased_20/12289.rst index 7ac111c0f50..33bc0f50a0a 100644 --- a/doc/build/changelog/unreleased_20/12289.rst +++ b/doc/build/changelog/unreleased_20/12289.rst @@ -2,7 +2,7 @@ :tags: bug, engine :tickets: 12289 - Fixed issue where creating an :class:`.Engine` using multiple calls to - :meth:`.Engine.execution_options` where a subsequent call involved certain - options such as ``isolation_level`` would lead to an internal error + Fixed event-related issue where invoking :meth:`.Engine.execution_options` + on a :class:`.Engine` multiple times while making use of event-registering + parameters such as ``isolation_level`` would lead to internal errors involving event registration. diff --git a/doc/build/changelog/unreleased_20/12302.rst b/doc/build/changelog/unreleased_20/12302.rst index 38d45448989..43c1f7fafcd 100644 --- a/doc/build/changelog/unreleased_20/12302.rst +++ b/doc/build/changelog/unreleased_20/12302.rst @@ -2,7 +2,7 @@ :tags: bug, sql :tickets: 12302 - Reorganized the internals by which the `.c` collection on a + Reorganized the internals by which the ``.c`` collection on a :class:`.FromClause` gets generated so that it is resilient against the collection being accessed in concurrent fashion. An example is creating a :class:`.Alias` or :class:`.Subquery` and accessing it as a module level diff --git a/doc/build/changelog/unreleased_20/12314.rst b/doc/build/changelog/unreleased_20/12314.rst index 6d5e83adeba..626c316bf92 100644 --- a/doc/build/changelog/unreleased_20/12314.rst +++ b/doc/build/changelog/unreleased_20/12314.rst @@ -2,8 +2,8 @@ :tags: bug, sql :tickets: 12314 - Fixed SQL rendering bug which impacted caching where using a ``None`` value - inside of an ``in_()`` expression would bypass the usual "expanded bind - parameter" logic used by the IN construct, which allows proper caching to - take place. + Fixed SQL composition bug which impacted caching where using a ``None`` + value inside of an ``in_()`` expression would bypass the usual "expanded + bind parameter" logic used by the IN construct, which allows proper caching + to take place. diff --git a/doc/build/changelog/whatsnew_20.rst b/doc/build/changelog/whatsnew_20.rst index 230d5893ea3..53e9b3c8eae 100644 --- a/doc/build/changelog/whatsnew_20.rst +++ b/doc/build/changelog/whatsnew_20.rst @@ -2184,6 +2184,11 @@ hold onto database connections after they are released, did in fact have a measurable negative performance impact. As always, the pool class is customizable via the :paramref:`_sa.create_engine.poolclass` parameter. +.. versionchanged:: 2.0.37 - an equivalent change is also made for the + ``aiosqlite`` dialect, using :class:`._pool.AsyncAdaptedQueuePool` instead + of :class:`._pool.NullPool`. The ``aiosqlite`` dialect was not included + in the initial change in error. + .. seealso:: :ref:`pysqlite_threading_pooling` From b24a0181529775e406c66aa15ca59aee41e291ee Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 6 Feb 2025 15:04:30 -0500 Subject: [PATCH 434/544] this is version 2.0.38 Change-Id: I784d0ba9e4afd9a7be6dac71cd04376dedbec211 (cherry picked from commit 960aade5b6ef14966f1bcf10c9a4c95f5f5a11d3) --- doc/build/changelog/whatsnew_20.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/changelog/whatsnew_20.rst b/doc/build/changelog/whatsnew_20.rst index 53e9b3c8eae..59f3273333b 100644 --- a/doc/build/changelog/whatsnew_20.rst +++ b/doc/build/changelog/whatsnew_20.rst @@ -2184,7 +2184,7 @@ hold onto database connections after they are released, did in fact have a measurable negative performance impact. As always, the pool class is customizable via the :paramref:`_sa.create_engine.poolclass` parameter. -.. versionchanged:: 2.0.37 - an equivalent change is also made for the +.. versionchanged:: 2.0.38 - an equivalent change is also made for the ``aiosqlite`` dialect, using :class:`._pool.AsyncAdaptedQueuePool` instead of :class:`._pool.NullPool`. The ``aiosqlite`` dialect was not included in the initial change in error. From 23f5f3350974d9452f3b844617c31eb5b41474ae Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 6 Feb 2025 15:06:47 -0500 Subject: [PATCH 435/544] - 2.0.38 --- doc/build/changelog/changelog_20.rst | 73 ++++++++++++++++++++- doc/build/changelog/unreleased_20/12077.rst | 8 --- doc/build/changelog/unreleased_20/12117.rst | 9 --- doc/build/changelog/unreleased_20/12159.rst | 10 --- doc/build/changelog/unreleased_20/12285.rst | 10 --- doc/build/changelog/unreleased_20/12289.rst | 8 --- doc/build/changelog/unreleased_20/12302.rst | 10 --- doc/build/changelog/unreleased_20/12314.rst | 9 --- doc/build/conf.py | 4 +- 9 files changed, 74 insertions(+), 67 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/12077.rst delete mode 100644 doc/build/changelog/unreleased_20/12117.rst delete mode 100644 doc/build/changelog/unreleased_20/12159.rst delete mode 100644 doc/build/changelog/unreleased_20/12285.rst delete mode 100644 doc/build/changelog/unreleased_20/12289.rst delete mode 100644 doc/build/changelog/unreleased_20/12302.rst delete mode 100644 doc/build/changelog/unreleased_20/12314.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index d257438a20e..1da57c1a0b5 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,78 @@ .. changelog:: :version: 2.0.38 - :include_notes_from: unreleased_20 + :released: February 6, 2025 + + .. change:: + :tags: postgresql, usecase, asyncio + :tickets: 12077 + + Added an additional ``asyncio.shield()`` call within the connection + terminate process of the asyncpg driver, to mitigate an issue where + terminate would be prevented from completing under the anyio concurrency + library. + + .. change:: + :tags: bug, dml, mariadb, mysql + :tickets: 12117 + + Fixed a bug where the MySQL statement compiler would not properly compile + statements where :meth:`_mysql.Insert.on_duplicate_key_update` was passed + values that included ORM-mapped attributes (e.g. + :class:`InstrumentedAttribute` objects) as keys. Pull request courtesy of + mingyu. + + .. change:: + :tags: bug, postgresql + :tickets: 12159 + + Adjusted the asyncpg connection wrapper so that the + ``connection.transaction()`` call sent to asyncpg sends ``None`` for + ``isolation_level`` if not otherwise set in the SQLAlchemy dialect/wrapper, + thereby allowing asyncpg to make use of the server level setting for + ``isolation_level`` in the absense of a client-level setting. Previously, + this behavior of asyncpg was blocked by a hardcoded ``read_committed``. + + .. change:: + :tags: bug, sqlite, aiosqlite, asyncio, pool + :tickets: 12285 + + Changed default connection pool used by the ``aiosqlite`` dialect + from :class:`.NullPool` to :class:`.AsyncAdaptedQueuePool`; this change + should have been made when 2.0 was first released as the ``pysqlite`` + dialect was similarly changed to use :class:`.QueuePool` as detailed + in :ref:`change_7490`. + + + .. change:: + :tags: bug, engine + :tickets: 12289 + + Fixed event-related issue where invoking :meth:`.Engine.execution_options` + on a :class:`.Engine` multiple times while making use of event-registering + parameters such as ``isolation_level`` would lead to internal errors + involving event registration. + + .. change:: + :tags: bug, sql + :tickets: 12302 + + Reorganized the internals by which the ``.c`` collection on a + :class:`.FromClause` gets generated so that it is resilient against the + collection being accessed in concurrent fashion. An example is creating a + :class:`.Alias` or :class:`.Subquery` and accessing it as a module level + variable. This impacts the Oracle dialect which uses such module-level + global alias objects but is of general use as well. + + .. change:: + :tags: bug, sql + :tickets: 12314 + + Fixed SQL composition bug which impacted caching where using a ``None`` + value inside of an ``in_()`` expression would bypass the usual "expanded + bind parameter" logic used by the IN construct, which allows proper caching + to take place. + .. changelog:: :version: 2.0.37 diff --git a/doc/build/changelog/unreleased_20/12077.rst b/doc/build/changelog/unreleased_20/12077.rst deleted file mode 100644 index 94511b172d8..00000000000 --- a/doc/build/changelog/unreleased_20/12077.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: postgresql, usecase, asyncio - :tickets: 12077 - - Added an additional ``asyncio.shield()`` call within the connection - terminate process of the asyncpg driver, to mitigate an issue where - terminate would be prevented from completing under the anyio concurrency - library. diff --git a/doc/build/changelog/unreleased_20/12117.rst b/doc/build/changelog/unreleased_20/12117.rst deleted file mode 100644 index a82ddc36f8b..00000000000 --- a/doc/build/changelog/unreleased_20/12117.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, dml, mariadb, mysql - :tickets: 12117 - - Fixed a bug where the MySQL statement compiler would not properly compile - statements where :meth:`_mysql.Insert.on_duplicate_key_update` was passed - values that included ORM-mapped attributes (e.g. - :class:`InstrumentedAttribute` objects) as keys. Pull request courtesy of - mingyu. diff --git a/doc/build/changelog/unreleased_20/12159.rst b/doc/build/changelog/unreleased_20/12159.rst deleted file mode 100644 index 50496759faf..00000000000 --- a/doc/build/changelog/unreleased_20/12159.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 12159 - - Adjusted the asyncpg connection wrapper so that the - ``connection.transaction()`` call sent to asyncpg sends ``None`` for - ``isolation_level`` if not otherwise set in the SQLAlchemy dialect/wrapper, - thereby allowing asyncpg to make use of the server level setting for - ``isolation_level`` in the absense of a client-level setting. Previously, - this behavior of asyncpg was blocked by a hardcoded ``read_committed``. diff --git a/doc/build/changelog/unreleased_20/12285.rst b/doc/build/changelog/unreleased_20/12285.rst deleted file mode 100644 index 5d815f84ca2..00000000000 --- a/doc/build/changelog/unreleased_20/12285.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, sqlite, aiosqlite, asyncio, pool - :tickets: 12285 - - Changed default connection pool used by the ``aiosqlite`` dialect - from :class:`.NullPool` to :class:`.AsyncAdaptedQueuePool`; this change - should have been made when 2.0 was first released as the ``pysqlite`` - dialect was similarly changed to use :class:`.QueuePool` as detailed - in :ref:`change_7490`. - diff --git a/doc/build/changelog/unreleased_20/12289.rst b/doc/build/changelog/unreleased_20/12289.rst deleted file mode 100644 index 33bc0f50a0a..00000000000 --- a/doc/build/changelog/unreleased_20/12289.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 12289 - - Fixed event-related issue where invoking :meth:`.Engine.execution_options` - on a :class:`.Engine` multiple times while making use of event-registering - parameters such as ``isolation_level`` would lead to internal errors - involving event registration. diff --git a/doc/build/changelog/unreleased_20/12302.rst b/doc/build/changelog/unreleased_20/12302.rst deleted file mode 100644 index 43c1f7fafcd..00000000000 --- a/doc/build/changelog/unreleased_20/12302.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 12302 - - Reorganized the internals by which the ``.c`` collection on a - :class:`.FromClause` gets generated so that it is resilient against the - collection being accessed in concurrent fashion. An example is creating a - :class:`.Alias` or :class:`.Subquery` and accessing it as a module level - variable. This impacts the Oracle dialect which uses such module-level - global alias objects but is of general use as well. diff --git a/doc/build/changelog/unreleased_20/12314.rst b/doc/build/changelog/unreleased_20/12314.rst deleted file mode 100644 index 626c316bf92..00000000000 --- a/doc/build/changelog/unreleased_20/12314.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 12314 - - Fixed SQL composition bug which impacted caching where using a ``None`` - value inside of an ``in_()`` expression would bypass the usual "expanded - bind parameter" logic used by the IN construct, which allows proper caching - to take place. - diff --git a/doc/build/conf.py b/doc/build/conf.py index 412d329e846..695f9104678 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.37" +release = "2.0.38" -release_date = "January 9, 2025" +release_date = "February 6, 2025" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 5e59923e3097778d85120285b914eab761353d04 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 6 Feb 2025 15:10:27 -0500 Subject: [PATCH 436/544] Version 2.0.39 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 1da57c1a0b5..e5e9a87d9af 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.39 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.38 :released: February 6, 2025 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index d67636cf84f..5b9c095ce9c 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.38" +__version__ = "2.0.39" def __go(lcls: Any) -> None: From 701eef33388d2eeb139a6ece3558c4fc9e4d7d6a Mon Sep 17 00:00:00 2001 From: Augustin Prolongeau Date: Fri, 7 Feb 2025 00:18:57 +0100 Subject: [PATCH 437/544] doc(reconecting_engine): fix re-raise after attempts (#12306) * doc(reconecting_engine): fix re-raise after attempts * move re-raise after connection invalidation/rollback, reword log message (cherry picked from commit 47dab9181c86e6a944411470885f6fe18a1fc15f) --- doc/build/faq/connections.rst | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/doc/build/faq/connections.rst b/doc/build/faq/connections.rst index d93a4b1af76..1f3bf1ba140 100644 --- a/doc/build/faq/connections.rst +++ b/doc/build/faq/connections.rst @@ -259,10 +259,10 @@ statement executions:: except engine.dialect.dbapi.Error as raw_dbapi_err: connection = context.root_connection if engine.dialect.is_disconnect(raw_dbapi_err, connection, cursor_obj): - if retry > num_retries: - raise engine.logger.error( - "disconnection error, retrying operation", + "disconnection error, attempt %d/%d", + retry + 1, + num_retries + 1, exc_info=True, ) connection.invalidate() @@ -275,6 +275,9 @@ statement executions:: if trans: trans.rollback() + if retry == num_retries: + raise + time.sleep(retry_interval) context.cursor = cursor_obj = connection.connection.cursor() else: From 11012fcebf37e6f9f9447d20bb8dcc20a2c8b328 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 8 Feb 2025 11:38:53 -0500 Subject: [PATCH 438/544] implement is_derived_from() for DML Fixed bug where using DML returning such as :meth:`.Insert.returning` with an ORM model that has :func:`_orm.column_property` constructs that contain subqueries would fail with an internal error. Fixes: #12326 Change-Id: I419f645769a346c229944b30ac8fd4a0efe1646d (cherry picked from commit b281402140683279c2aca2363f2acdb94929507f) --- doc/build/changelog/unreleased_20/12326.rst | 7 ++ lib/sqlalchemy/sql/dml.py | 10 ++ test/orm/dml/test_bulk_statements.py | 121 ++++++++++++++++++++ 3 files changed, 138 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/12326.rst diff --git a/doc/build/changelog/unreleased_20/12326.rst b/doc/build/changelog/unreleased_20/12326.rst new file mode 100644 index 00000000000..88e5de2f751 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12326.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, orm + :tickets: 12326 + + Fixed bug where using DML returning such as :meth:`.Insert.returning` with + an ORM model that has :func:`_orm.column_property` constructs that contain + subqueries would fail with an internal error. diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index f0e6edbb560..f5071146be2 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -695,6 +695,16 @@ def return_defaults( return self + def is_derived_from(self, fromclause: Optional[FromClause]) -> bool: + """Return ``True`` if this :class:`.ReturnsRows` is + 'derived' from the given :class:`.FromClause`. + + Since these are DMLs, we dont want such statements ever being adapted + so we return False for derives. + + """ + return False + @_generative def returning( self, diff --git a/test/orm/dml/test_bulk_statements.py b/test/orm/dml/test_bulk_statements.py index 992a18947b7..6d69b2250c3 100644 --- a/test/orm/dml/test_bulk_statements.py +++ b/test/orm/dml/test_bulk_statements.py @@ -277,6 +277,86 @@ class User(decl_base): ), ) + @testing.requires.insert_returning + @testing.variation( + "insert_type", + [("values", testing.requires.multivalues_inserts), "bulk"], + ) + def test_returning_col_property( + self, decl_base, insert_type: testing.Variation + ): + """test #12326""" + + class User(ComparableEntity, decl_base): + __tablename__ = "user" + + id: Mapped[int] = mapped_column( + primary_key=True, autoincrement=False + ) + name: Mapped[str] + age: Mapped[int] + + decl_base.metadata.create_all(testing.db) + + a_alias = aliased(User) + User.colprop = column_property( + select(func.max(a_alias.age)) + .where(a_alias.id != User.id) + .scalar_subquery() + ) + + sess = fixture_session() + + if insert_type.values: + stmt = insert(User).values( + [ + dict(id=1, name="john", age=25), + dict(id=2, name="jack", age=47), + dict(id=3, name="jill", age=29), + dict(id=4, name="jane", age=37), + ], + ) + params = None + elif insert_type.bulk: + stmt = insert(User) + params = [ + dict(id=1, name="john", age=25), + dict(id=2, name="jack", age=47), + dict(id=3, name="jill", age=29), + dict(id=4, name="jane", age=37), + ] + else: + insert_type.fail() + + stmt = stmt.returning(User) + + result = sess.execute(stmt, params=params) + + # the RETURNING doesn't have the column property in it. + # so to load these, they are all lazy loaded + with self.sql_execution_asserter() as asserter: + eq_( + result.scalars().all(), + [ + User(id=1, name="john", age=25, colprop=47), + User(id=2, name="jack", age=47, colprop=37), + User(id=3, name="jill", age=29, colprop=47), + User(id=4, name="jane", age=37, colprop=47), + ], + ) + + # assert they're all lazy loaded + asserter.assert_( + *[ + CompiledSQL( + 'SELECT (SELECT max(user_1.age) AS max_1 FROM "user" ' + 'AS user_1 WHERE user_1.id != "user".id) AS anon_1 ' + 'FROM "user" WHERE "user".id = :pk_1' + ) + for i in range(4) + ] + ) + @testing.requires.insert_returning @testing.requires.returning_star @testing.variation( @@ -1080,6 +1160,47 @@ class User(decl_base): ], ) + @testing.requires.update_returning + def test_returning_col_property(self, decl_base): + """test #12326""" + + class User(ComparableEntity, decl_base): + __tablename__ = "user" + + id: Mapped[int] = mapped_column( + primary_key=True, autoincrement=False + ) + name: Mapped[str] + age: Mapped[int] + + decl_base.metadata.create_all(testing.db) + + a_alias = aliased(User) + User.colprop = column_property( + select(func.max(a_alias.age)) + .where(a_alias.id != User.id) + .scalar_subquery() + ) + + sess = fixture_session() + + sess.execute( + insert(User), + [ + dict(id=1, name="john", age=25), + dict(id=2, name="jack", age=47), + dict(id=3, name="jill", age=29), + dict(id=4, name="jane", age=37), + ], + ) + + stmt = ( + update(User).values(age=30).where(User.age == 29).returning(User) + ) + + row = sess.execute(stmt).one() + eq_(row[0], User(id=3, name="jill", age=30, colprop=47)) + class BulkDMLReturningInhTest: use_sentinel = False From 93271d202792e8aa7f17373686fb3ae496d479da Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 9 Feb 2025 18:09:21 -0500 Subject: [PATCH 439/544] only use _DMLReturningColFilter for "bulk insert", not other DML Fixed bug in ORM enabled UPDATE (and theoretically DELETE) where using a multi-table DML statement would not allow ORM mapped columns from mappers other than the primary UPDATE mapper to be named in the RETURNING clause; they would be omitted instead and cause a column not found exception. Fixes: #12328 Change-Id: I2223ee506eec447823a3a545eecad1a7a03364a9 (cherry picked from commit 1c7e3f9c94b2e6c441ba635a88573bc4cd88ad7d) --- doc/build/changelog/unreleased_20/12328.rst | 8 +++ lib/sqlalchemy/orm/context.py | 44 +++++++++----- lib/sqlalchemy/orm/query.py | 11 +++- test/orm/dml/test_update_delete_where.py | 65 +++++++++++++++++++++ test/requirements.py | 7 +++ 5 files changed, 118 insertions(+), 17 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12328.rst diff --git a/doc/build/changelog/unreleased_20/12328.rst b/doc/build/changelog/unreleased_20/12328.rst new file mode 100644 index 00000000000..9d9b70965e8 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12328.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, orm + :tickets: 12328 + + Fixed bug in ORM enabled UPDATE (and theoretically DELETE) where using a + multi-table DML statement would not allow ORM mapped columns from mappers + other than the primary UPDATE mapper to be named in the RETURNING clause; + they would be omitted instead and cause a column not found exception. diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index c09c03b78c2..3e32d3c9111 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -148,10 +148,11 @@ class default_load_options(Options): def __init__( self, compile_state: CompileState, - statement: Union[Select[Any], FromStatement[Any]], + statement: Union[Select[Any], FromStatement[Any], UpdateBase], user_passed_query: Union[ Select[Any], FromStatement[Any], + UpdateBase, ], params: _CoreSingleExecuteParams, session: Session, @@ -413,8 +414,8 @@ class default_compile_options(CacheableOptions): attributes: Dict[Any, Any] global_attributes: Dict[Any, Any] - statement: Union[Select[Any], FromStatement[Any]] - select_statement: Union[Select[Any], FromStatement[Any]] + statement: Union[Select[Any], FromStatement[Any], UpdateBase] + select_statement: Union[Select[Any], FromStatement[Any], UpdateBase] _entities: List[_QueryEntity] _polymorphic_adapters: Dict[_InternalEntityType, ORMAdapter] compile_options: Union[ @@ -654,8 +655,14 @@ def _create_entities_collection(cls, query, legacy): ) -class DMLReturningColFilter: - """an adapter used for the DML RETURNING case. +class _DMLBulkInsertReturningColFilter: + """an adapter used for the DML RETURNING case specifically + for ORM bulk insert (or any hypothetical DML that is splitting out a class + hierarchy among multiple DML statements....ORM bulk insert is the only + example right now) + + its main job is to limit the columns in a RETURNING to only a specific + mapped table in a hierarchy. Has a subset of the interface used by :class:`.ORMAdapter` and is used for :class:`._QueryEntity` @@ -851,14 +858,20 @@ def _get_current_adapter(self): return None def setup_dml_returning_compile_state(self, dml_mapper): - """used by BulkORMInsert (and Update / Delete?) to set up a handler + """used by BulkORMInsert, Update, Delete to set up a handler for RETURNING to return ORM objects and expressions """ target_mapper = self.statement._propagate_attrs.get( "plugin_subject", None ) - adapter = DMLReturningColFilter(target_mapper, dml_mapper) + + if self.statement.is_insert: + adapter = _DMLBulkInsertReturningColFilter( + target_mapper, dml_mapper + ) + else: + adapter = None if self.compile_options._is_star and (len(self._entities) != 1): raise sa_exc.CompileError( @@ -2535,7 +2548,7 @@ def setup_compile_state(self, compile_state: ORMCompileState) -> None: def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: DMLReturningColFilter, + adapter: Optional[_DMLBulkInsertReturningColFilter], ) -> None: raise NotImplementedError() @@ -2737,7 +2750,7 @@ def row_processor(self, context, result): def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: DMLReturningColFilter, + adapter: Optional[_DMLBulkInsertReturningColFilter], ) -> None: loading._setup_entity_query( compile_state, @@ -2896,7 +2909,7 @@ def setup_compile_state(self, compile_state): def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: DMLReturningColFilter, + adapter: Optional[_DMLBulkInsertReturningColFilter], ) -> None: return self.setup_compile_state(compile_state) @@ -3086,7 +3099,7 @@ def corresponds_to(self, entity): def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: DMLReturningColFilter, + adapter: Optional[_DMLBulkInsertReturningColFilter], ) -> None: return self.setup_compile_state(compile_state) @@ -3203,10 +3216,13 @@ def corresponds_to(self, entity): def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: DMLReturningColFilter, + adapter: Optional[_DMLBulkInsertReturningColFilter], ) -> None: - self._fetch_column = self.column - column = adapter(self.column, False) + + self._fetch_column = column = self.column + if adapter: + column = adapter(column, False) + if column is not None: compile_state.dedupe_columns.add(column) compile_state.primary_columns.append(column) diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index c7e1ca8ad7c..4dbb3009b39 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -134,6 +134,7 @@ from ..sql._typing import _TypedColumnClauseArgument as _TCCA from ..sql.base import CacheableOptions from ..sql.base import ExecutableOption + from ..sql.dml import UpdateBase from ..sql.elements import ColumnElement from ..sql.elements import Label from ..sql.selectable import _ForUpdateOfArgument @@ -492,7 +493,7 @@ def _get_select_statement_only(self) -> Select[_T]: return cast("Select[_T]", self.statement) @property - def statement(self) -> Union[Select[_T], FromStatement[_T]]: + def statement(self) -> Union[Select[_T], FromStatement[_T], UpdateBase]: """The full SELECT statement represented by this Query. The statement by default will not have disambiguating labels @@ -520,6 +521,8 @@ def statement(self) -> Union[Select[_T], FromStatement[_T]]: # from there, it starts to look much like Query itself won't be # passed into the execute process and won't generate its own cache # key; this will all occur in terms of the ORM-enabled Select. + stmt: Union[Select[_T], FromStatement[_T], UpdateBase] + if not self._compile_options._set_base_alias: # if we don't have legacy top level aliasing features in use # then convert to a future select() directly @@ -789,7 +792,7 @@ def scalar_subquery(self) -> ScalarSelect[Any]: ) @property - def selectable(self) -> Union[Select[_T], FromStatement[_T]]: + def selectable(self) -> Union[Select[_T], FromStatement[_T], UpdateBase]: """Return the :class:`_expression.Select` object emitted by this :class:`_query.Query`. @@ -800,7 +803,9 @@ def selectable(self) -> Union[Select[_T], FromStatement[_T]]: """ return self.__clause_element__() - def __clause_element__(self) -> Union[Select[_T], FromStatement[_T]]: + def __clause_element__( + self, + ) -> Union[Select[_T], FromStatement[_T], UpdateBase]: return ( self._with_compile_options( _enable_eagerloads=False, _render_for_subquery=True diff --git a/test/orm/dml/test_update_delete_where.py b/test/orm/dml/test_update_delete_where.py index 7d06a8618cd..387ce161b86 100644 --- a/test/orm/dml/test_update_delete_where.py +++ b/test/orm/dml/test_update_delete_where.py @@ -78,6 +78,7 @@ def define_tables(cls, metadata): metadata, Column("id", Integer, primary_key=True), Column("user_id", ForeignKey("users.id")), + Column("email_address", String(50)), ) m = MetaData() @@ -118,6 +119,24 @@ def insert_data(cls, connection): ], ) + @testing.fixture + def addresses_data( + self, + ): + addresses = self.tables.addresses + + with testing.db.begin() as connection: + connection.execute( + addresses.insert(), + [ + dict(id=1, user_id=1, email_address="jo1"), + dict(id=2, user_id=1, email_address="jo2"), + dict(id=3, user_id=2, email_address="ja1"), + dict(id=4, user_id=3, email_address="ji1"), + dict(id=5, user_id=4, email_address="jan1"), + ], + ) + @classmethod def setup_mappers(cls): User = cls.classes.User @@ -1324,6 +1343,52 @@ def test_update_evaluate_w_explicit_returning(self): ), ) + @testing.requires.update_from_returning + # can't use evaluate because it can't match the col->col in the WHERE + @testing.combinations("fetch", "auto", argnames="synchronize_session") + def test_update_from_multi_returning( + self, synchronize_session, addresses_data + ): + """test #12327""" + User = self.classes.User + Address = self.classes.Address + + sess = fixture_session() + + john, jack, jill, jane = sess.query(User).order_by(User.id).all() + + with self.sql_execution_asserter() as asserter: + stmt = ( + update(User) + .where(User.id == Address.user_id) + .filter(User.age > 29) + .values({"age": User.age - 10}) + .returning( + User.id, Address.email_address, func.char_length(User.name) + ) + .execution_options(synchronize_session=synchronize_session) + ) + + rows = sess.execute(stmt).all() + eq_(set(rows), {(2, "ja1", 4), (4, "jan1", 4)}) + + # these are simple values, these are now evaluated even with + # the "fetch" strategy, new in 1.4, so there is no expiry + eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27]) + + asserter.assert_( + CompiledSQL( + "UPDATE users SET age_int=(users.age_int - %(age_int_1)s) " + "FROM addresses " + "WHERE users.id = addresses.user_id AND " + "users.age_int > %(age_int_2)s " + "RETURNING users.id, addresses.email_address, " + "char_length(users.name) AS char_length_1", + [{"age_int_1": 10, "age_int_2": 29}], + dialect="postgresql", + ), + ) + @testing.requires.update_returning @testing.combinations("update", "delete", argnames="crud_type") def test_fetch_w_explicit_returning(self, crud_type): diff --git a/test/requirements.py b/test/requirements.py index 67635c6554e..98a98cd74e6 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -493,6 +493,13 @@ def update_from(self): "Backend does not support UPDATE..FROM", ) + @property + def update_from_returning(self): + """Target must support UPDATE..FROM syntax where RETURNING can + return columns from the non-primary FROM clause""" + + return self.update_returning + self.update_from + skip_if("sqlite") + @property def update_from_using_alias(self): """Target must support UPDATE..FROM syntax against an alias""" From c22ad5737717b5dca3b0e52ee967ea1c6945cace Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 10 Feb 2025 15:26:24 -0500 Subject: [PATCH 440/544] reconcile #12326 and #12328 These two issues both involve ORM DML RETURNING. The looser column inclusion rules given in #12328 then included a correlated subquery column_property given in #12326, which does not work in RETURNING. so re-tighten UPDATE/DELETE with a more specific rule to cut out local mapped props that are not persisted columns, but still allow other mapped props through without blocking them. Fixes: #12326 Change-Id: I8fe7b8ab9b85907e562648433fdb3c7ba160c0d0 (cherry picked from commit e88788bb0c1fa596ab63cb787b0438213040b10a) --- lib/sqlalchemy/orm/context.py | 63 +++++++++++++++++++++++++++-------- 1 file changed, 50 insertions(+), 13 deletions(-) diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 3e32d3c9111..5e91cdf9e14 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -655,14 +655,8 @@ def _create_entities_collection(cls, query, legacy): ) -class _DMLBulkInsertReturningColFilter: - """an adapter used for the DML RETURNING case specifically - for ORM bulk insert (or any hypothetical DML that is splitting out a class - hierarchy among multiple DML statements....ORM bulk insert is the only - example right now) - - its main job is to limit the columns in a RETURNING to only a specific - mapped table in a hierarchy. +class _DMLReturningColFilter: + """a base for an adapter used for the DML RETURNING cases Has a subset of the interface used by :class:`.ORMAdapter` and is used for :class:`._QueryEntity` @@ -696,6 +690,21 @@ def __call__(self, col, as_filter): else: return None + def adapt_check_present(self, col): + raise NotImplementedError() + + +class _DMLBulkInsertReturningColFilter(_DMLReturningColFilter): + """an adapter used for the DML RETURNING case specifically + for ORM bulk insert (or any hypothetical DML that is splitting out a class + hierarchy among multiple DML statements....ORM bulk insert is the only + example right now) + + its main job is to limit the columns in a RETURNING to only a specific + mapped table in a hierarchy. + + """ + def adapt_check_present(self, col): mapper = self.mapper prop = mapper._columntoproperty.get(col, None) @@ -704,6 +713,30 @@ def adapt_check_present(self, col): return mapper.local_table.c.corresponding_column(col) +class _DMLUpdateDeleteReturningColFilter(_DMLReturningColFilter): + """an adapter used for the DML RETURNING case specifically + for ORM enabled UPDATE/DELETE + + its main job is to limit the columns in a RETURNING to include + only direct persisted columns from the immediate selectable, not + expressions like column_property(), or to also allow columns from other + mappers for the UPDATE..FROM use case. + + """ + + def adapt_check_present(self, col): + mapper = self.mapper + prop = mapper._columntoproperty.get(col, None) + if prop is not None: + # if the col is from the immediate mapper, only return a persisted + # column, not any kind of column_property expression + return mapper.persist_selectable.c.corresponding_column(col) + + # if the col is from some other mapper, just return it, assume the + # user knows what they are doing + return col + + @sql.base.CompileState.plugin_for("orm", "orm_from_statement") class ORMFromStatementCompileState(ORMCompileState): _from_obj_alias = None @@ -870,6 +903,10 @@ def setup_dml_returning_compile_state(self, dml_mapper): adapter = _DMLBulkInsertReturningColFilter( target_mapper, dml_mapper ) + elif self.statement.is_update or self.statement.is_delete: + adapter = _DMLUpdateDeleteReturningColFilter( + target_mapper, dml_mapper + ) else: adapter = None @@ -2548,7 +2585,7 @@ def setup_compile_state(self, compile_state: ORMCompileState) -> None: def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: Optional[_DMLBulkInsertReturningColFilter], + adapter: Optional[_DMLReturningColFilter], ) -> None: raise NotImplementedError() @@ -2750,7 +2787,7 @@ def row_processor(self, context, result): def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: Optional[_DMLBulkInsertReturningColFilter], + adapter: Optional[_DMLReturningColFilter], ) -> None: loading._setup_entity_query( compile_state, @@ -2909,7 +2946,7 @@ def setup_compile_state(self, compile_state): def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: Optional[_DMLBulkInsertReturningColFilter], + adapter: Optional[_DMLReturningColFilter], ) -> None: return self.setup_compile_state(compile_state) @@ -3099,7 +3136,7 @@ def corresponds_to(self, entity): def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: Optional[_DMLBulkInsertReturningColFilter], + adapter: Optional[_DMLReturningColFilter], ) -> None: return self.setup_compile_state(compile_state) @@ -3216,7 +3253,7 @@ def corresponds_to(self, entity): def setup_dml_returning_compile_state( self, compile_state: ORMCompileState, - adapter: Optional[_DMLBulkInsertReturningColFilter], + adapter: Optional[_DMLReturningColFilter], ) -> None: self._fetch_column = column = self.column From 8b5ac364323b4a29605715649b50676c22f8bef6 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 10 Feb 2025 21:44:00 +0100 Subject: [PATCH 441/544] skip 3.7 on linux arm machines Change-Id: I6c1b4750b6df662d698a51f18bdfe08a305edc93 --- .github/workflows/run-test.yaml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index ea1540edc1b..b4dea776f05 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -59,17 +59,20 @@ jobs: exclude: - # linux do not have x86 / arm64 python + # linux does not have x86 / arm64 python - os: "ubuntu-22.04" architecture: x86 - os: "ubuntu-22.04" architecture: arm64 - # linux-arm do not have x86 / x64 python + # linux-arm does not have x86 / x64 python - os: "ubuntu-22.04-arm" architecture: x86 - os: "ubuntu-22.04-arm" architecture: x64 - # windows des not have arm64 python + # linux-arm does not have 3.7 python + - os: "ubuntu-22.04-arm" + python-version: "3.7" + # windows does not have arm64 python - os: "windows-latest" architecture: arm64 # macos: latests uses arm macs. only 3.10+; no x86/x64 From b947785feacbb45ec193880cb0a98c5acd2bcb5a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 9 Feb 2025 18:30:11 -0500 Subject: [PATCH 442/544] try pytest 8.3 we've been pinned under 8.2 for unclear reasons (but likely reasons). see what 8.3 does. current pypi release is 8.3.4 Change-Id: I601335f5604a37e07fd3bb0abb99160e055dd95c (cherry picked from commit 6d78ad98d97dfd3a0917b3bccc29a655405e10a2) --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index e8229efde61..ca7177b2ece 100644 --- a/tox.ini +++ b/tox.ini @@ -50,7 +50,7 @@ install_command= python -I -m pip install --only-binary=pymssql {opts} {packages} deps= - pytest>=7.0.0,<8.2 + pytest>=7.0.0,<8.4 # tracked by https://github.com/pytest-dev/pytest-xdist/issues/907 pytest-xdist!=3.3.0 @@ -204,7 +204,7 @@ extras = [testenv:mypy] deps= - pytest>=7.0.0rc1,<8 + pytest>=7.0.0rc1,<8.4 pytest-xdist greenlet != 0.4.17 importlib_metadata; python_version < '3.8' From ed8d20d2d701a7ab661ff5d541e406ef327ecf74 Mon Sep 17 00:00:00 2001 From: allenyuchen Date: Wed, 12 Feb 2025 12:35:58 -0500 Subject: [PATCH 443/544] fix(AsyncResult): Fix scalar method error due to missing attribute Fixed bug where :meth:`_asyncio.AsyncResult.scalar`, :meth:`_asyncio.AsyncResult.scalar_one_or_none`, and :meth:`_asyncio.AsyncResult.scalar_one` would raise an ``AttributeError`` due to a missing internal attribute. Pull request courtesy Allen Ho. Fixes: #12338 Closes: #12339 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12339 Pull-request-sha: 63ba43365e9624a75e3f206e6b0f4569e3940da6 Change-Id: I44a949e4a942a080338037cd570d4b1dc0d7550d (cherry picked from commit ca092e73a254a3914fd93ca98340ba7762d4cee9) --- doc/build/changelog/unreleased_20/12338.rst | 8 ++++++++ lib/sqlalchemy/ext/asyncio/result.py | 1 + test/ext/asyncio/test_engine_py3k.py | 20 ++++++++++++++++++++ 3 files changed, 29 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/12338.rst diff --git a/doc/build/changelog/unreleased_20/12338.rst b/doc/build/changelog/unreleased_20/12338.rst new file mode 100644 index 00000000000..6a71f08d736 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12338.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, asyncio + :tickets: 12338 + + Fixed bug where :meth:`_asyncio.AsyncResult.scalar`, + :meth:`_asyncio.AsyncResult.scalar_one_or_none`, and + :meth:`_asyncio.AsyncResult.scalar_one` would raise an ``AttributeError`` + due to a missing internal attribute. Pull request courtesy Allen Ho. diff --git a/lib/sqlalchemy/ext/asyncio/result.py b/lib/sqlalchemy/ext/asyncio/result.py index c51e166d916..8003f66afe2 100644 --- a/lib/sqlalchemy/ext/asyncio/result.py +++ b/lib/sqlalchemy/ext/asyncio/result.py @@ -93,6 +93,7 @@ def __init__(self, real_result: Result[_TP]): self._metadata = real_result._metadata self._unique_filter_state = real_result._unique_filter_state + self._source_supports_scalars = real_result._source_supports_scalars self._post_creational_filter = None # BaseCursorResult pre-generates the "_row_getter". Use that diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index 227307e086f..6be408ecaea 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -1376,6 +1376,26 @@ async def test_cursor_close(self, async_engine, case): await conn.run_sync(lambda _: cursor.close()) + @async_test + @testing.variation("case", ["scalar_one", "scalar_one_or_none", "scalar"]) + async def test_stream_scalar(self, async_engine, case: testing.Variation): + users = self.tables.users + async with async_engine.connect() as conn: + result = await conn.stream( + select(users).limit(1).order_by(users.c.user_name) + ) + + if case.scalar_one: + u1 = await result.scalar_one() + elif case.scalar_one_or_none: + u1 = await result.scalar_one_or_none() + elif case.scalar: + u1 = await result.scalar() + else: + case.fail() + + eq_(u1, 1) + class TextSyncDBAPI(fixtures.TestBase): __requires__ = ("asyncio",) From 253b3694b7abc3b8fee82e9a83a719047885d94a Mon Sep 17 00:00:00 2001 From: Mingyu Park Date: Fri, 7 Feb 2025 14:45:26 -0500 Subject: [PATCH 444/544] Support generic types for union and union_all Support generic types for compound selects (:func:`_sql.union`, :func:`_sql.union_all`, :meth:`_sql.Select.union`, :meth:`_sql.Select.union_all`, etc) returning the type of the first select. Fixes: #11922 Closes: #12320 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12320 Pull-request-sha: f914a19f7201cec292056e900436d8c8431b9f87 Change-Id: I4fffa5d3fe93dd3a293b078360e326fea4207c5d (cherry picked from commit fc44b5078b74081b0df94cca9d21b89ed578caf3) --- doc/build/changelog/unreleased_20/11922.rst | 8 ++ .../sql/_selectable_constructors.py | 108 +++++++++++++++--- lib/sqlalchemy/sql/_typing.py | 5 +- lib/sqlalchemy/sql/selectable.py | 67 +++++------ .../plain_files/sql/common_sql_element.py | 79 +++++++++++++ 5 files changed, 218 insertions(+), 49 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11922.rst diff --git a/doc/build/changelog/unreleased_20/11922.rst b/doc/build/changelog/unreleased_20/11922.rst new file mode 100644 index 00000000000..f0e7e3d9787 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11922.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: typing, usecase + :tickets: 11922 + + Support generic types for compound selects (:func:`_sql.union`, + :func:`_sql.union_all`, :meth:`_sql.Select.union`, + :meth:`_sql.Select.union_all`, etc) returning the type of the first select. + Pull request courtesy of Mingyu Park. diff --git a/lib/sqlalchemy/sql/_selectable_constructors.py b/lib/sqlalchemy/sql/_selectable_constructors.py index 1660778c56f..69427334a32 100644 --- a/lib/sqlalchemy/sql/_selectable_constructors.py +++ b/lib/sqlalchemy/sql/_selectable_constructors.py @@ -12,7 +12,6 @@ from typing import overload from typing import Tuple from typing import TYPE_CHECKING -from typing import TypeVar from typing import Union from . import coercions @@ -47,6 +46,7 @@ from ._typing import _T7 from ._typing import _T8 from ._typing import _T9 + from ._typing import _TP from ._typing import _TypedColumnClauseArgument as _TCCA from .functions import Function from .selectable import CTE @@ -55,9 +55,6 @@ from .selectable import SelectBase -_T = TypeVar("_T", bound=Any) - - def alias( selectable: FromClause, name: Optional[str] = None, flat: bool = False ) -> NamedFromClause: @@ -106,9 +103,28 @@ def cte( ) +# TODO: mypy requires the _TypedSelectable overloads in all compound select +# constructors since _SelectStatementForCompoundArgument includes +# untyped args that make it return CompoundSelect[Unpack[tuple[Never, ...]]] +# pyright does not have this issue +_TypedSelectable = Union["Select[_TP]", "CompoundSelect[_TP]"] + + +@overload def except_( - *selects: _SelectStatementForCompoundArgument, -) -> CompoundSelect: + *selects: _TypedSelectable[_TP], +) -> CompoundSelect[_TP]: ... + + +@overload +def except_( + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: ... + + +def except_( + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: r"""Return an ``EXCEPT`` of multiple selectables. The returned object is an instance of @@ -121,9 +137,21 @@ def except_( return CompoundSelect._create_except(*selects) +@overload +def except_all( + *selects: _TypedSelectable[_TP], +) -> CompoundSelect[_TP]: ... + + +@overload +def except_all( + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: ... + + def except_all( - *selects: _SelectStatementForCompoundArgument, -) -> CompoundSelect: + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: r"""Return an ``EXCEPT ALL`` of multiple selectables. The returned object is an instance of @@ -181,9 +209,21 @@ def exists( return Exists(__argument) +@overload +def intersect( + *selects: _TypedSelectable[_TP], +) -> CompoundSelect[_TP]: ... + + +@overload +def intersect( + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: ... + + def intersect( - *selects: _SelectStatementForCompoundArgument, -) -> CompoundSelect: + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: r"""Return an ``INTERSECT`` of multiple selectables. The returned object is an instance of @@ -196,9 +236,21 @@ def intersect( return CompoundSelect._create_intersect(*selects) +@overload +def intersect_all( + *selects: _TypedSelectable[_TP], +) -> CompoundSelect[_TP]: ... + + +@overload def intersect_all( - *selects: _SelectStatementForCompoundArgument, -) -> CompoundSelect: + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: ... + + +def intersect_all( + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: r"""Return an ``INTERSECT ALL`` of multiple selectables. The returned object is an instance of @@ -557,9 +609,21 @@ class via the return TableSample._factory(selectable, sampling, name=name, seed=seed) +@overload +def union( + *selects: _TypedSelectable[_TP], +) -> CompoundSelect[_TP]: ... + + +@overload def union( - *selects: _SelectStatementForCompoundArgument, -) -> CompoundSelect: + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: ... + + +def union( + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: r"""Return a ``UNION`` of multiple selectables. The returned object is an instance of @@ -579,9 +643,21 @@ def union( return CompoundSelect._create_union(*selects) +@overload +def union_all( + *selects: _TypedSelectable[_TP], +) -> CompoundSelect[_TP]: ... + + +@overload +def union_all( + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: ... + + def union_all( - *selects: _SelectStatementForCompoundArgument, -) -> CompoundSelect: + *selects: _SelectStatementForCompoundArgument[_TP], +) -> CompoundSelect[_TP]: r"""Return a ``UNION ALL`` of multiple selectables. The returned object is an instance of diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index cf9129b479b..b1af53f7777 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -56,6 +56,7 @@ from .roles import FromClauseRole from .schema import Column from .selectable import Alias + from .selectable import CompoundSelect from .selectable import CTE from .selectable import FromClause from .selectable import Join @@ -247,7 +248,9 @@ def dialect(self) -> Dialect: ... """ _SelectStatementForCompoundArgument = Union[ - "SelectBase", roles.CompoundElementRole + "Select[_TP]", + "CompoundSelect[_TP]", + roles.CompoundElementRole, ] """SELECT statement acceptable by ``union()`` and other SQL set operations""" diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 8aa9f41eb9f..5db1e729e7a 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -47,6 +47,7 @@ from . import visitors from ._typing import _ColumnsClauseArgument from ._typing import _no_kw +from ._typing import _T from ._typing import _TP from ._typing import is_column_element from ._typing import is_select_statement @@ -101,9 +102,9 @@ from ..util.typing import Protocol from ..util.typing import Self + and_ = BooleanClauseList.and_ -_T = TypeVar("_T", bound=Any) if TYPE_CHECKING: from ._typing import _ColumnExpressionArgument @@ -286,7 +287,7 @@ class ExecutableReturnsRows(Executable, ReturnsRows): class TypedReturnsRows(ExecutableReturnsRows, Generic[_TP]): - """base for executable statements that return rows.""" + """base for a typed executable statements that return rows.""" class Selectable(ReturnsRows): @@ -2224,7 +2225,7 @@ def alias(self, name: Optional[str] = None, flat: bool = False) -> CTE: _suffixes=self._suffixes, ) - def union(self, *other: _SelectStatementForCompoundArgument) -> CTE: + def union(self, *other: _SelectStatementForCompoundArgument[Any]) -> CTE: r"""Return a new :class:`_expression.CTE` with a SQL ``UNION`` of the original CTE against the given selectables provided as positional arguments. @@ -2253,7 +2254,9 @@ def union(self, *other: _SelectStatementForCompoundArgument) -> CTE: _suffixes=self._suffixes, ) - def union_all(self, *other: _SelectStatementForCompoundArgument) -> CTE: + def union_all( + self, *other: _SelectStatementForCompoundArgument[Any] + ) -> CTE: r"""Return a new :class:`_expression.CTE` with a SQL ``UNION ALL`` of the original CTE against the given selectables provided as positional arguments. @@ -4448,7 +4451,7 @@ class _CompoundSelectKeyword(Enum): INTERSECT_ALL = "INTERSECT ALL" -class CompoundSelect(HasCompileState, GenerativeSelect, ExecutableReturnsRows): +class CompoundSelect(HasCompileState, GenerativeSelect, TypedReturnsRows[_TP]): """Forms the basis of ``UNION``, ``UNION ALL``, and other SELECT-based set operations. @@ -4495,7 +4498,7 @@ class CompoundSelect(HasCompileState, GenerativeSelect, ExecutableReturnsRows): def __init__( self, keyword: _CompoundSelectKeyword, - *selects: _SelectStatementForCompoundArgument, + *selects: _SelectStatementForCompoundArgument[_TP], ): self.keyword = keyword self.selects = [ @@ -4509,38 +4512,38 @@ def __init__( @classmethod def _create_union( - cls, *selects: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + cls, *selects: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: return CompoundSelect(_CompoundSelectKeyword.UNION, *selects) @classmethod def _create_union_all( - cls, *selects: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + cls, *selects: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: return CompoundSelect(_CompoundSelectKeyword.UNION_ALL, *selects) @classmethod def _create_except( - cls, *selects: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + cls, *selects: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: return CompoundSelect(_CompoundSelectKeyword.EXCEPT, *selects) @classmethod def _create_except_all( - cls, *selects: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + cls, *selects: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: return CompoundSelect(_CompoundSelectKeyword.EXCEPT_ALL, *selects) @classmethod def _create_intersect( - cls, *selects: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + cls, *selects: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: return CompoundSelect(_CompoundSelectKeyword.INTERSECT, *selects) @classmethod def _create_intersect_all( - cls, *selects: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + cls, *selects: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: return CompoundSelect(_CompoundSelectKeyword.INTERSECT_ALL, *selects) def _scalar_type(self) -> TypeEngine[Any]: @@ -4557,7 +4560,7 @@ def is_derived_from(self, fromclause: Optional[FromClause]) -> bool: return True return False - def set_label_style(self, style: SelectLabelStyle) -> CompoundSelect: + def set_label_style(self, style: SelectLabelStyle) -> Self: if self._label_style is not style: self = self._generate() select_0 = self.selects[0].set_label_style(style) @@ -4565,7 +4568,7 @@ def set_label_style(self, style: SelectLabelStyle) -> CompoundSelect: return self - def _ensure_disambiguated_names(self) -> CompoundSelect: + def _ensure_disambiguated_names(self) -> Self: new_select = self.selects[0]._ensure_disambiguated_names() if new_select is not self.selects[0]: self = self._generate() @@ -6585,8 +6588,8 @@ def self_group( return SelectStatementGrouping(self) def union( - self, *other: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + self, *other: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: r"""Return a SQL ``UNION`` of this select() construct against the given selectables provided as positional arguments. @@ -6604,8 +6607,8 @@ def union( return CompoundSelect._create_union(self, *other) def union_all( - self, *other: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + self, *other: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: r"""Return a SQL ``UNION ALL`` of this select() construct against the given selectables provided as positional arguments. @@ -6623,8 +6626,8 @@ def union_all( return CompoundSelect._create_union_all(self, *other) def except_( - self, *other: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + self, *other: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: r"""Return a SQL ``EXCEPT`` of this select() construct against the given selectable provided as positional arguments. @@ -6639,8 +6642,8 @@ def except_( return CompoundSelect._create_except(self, *other) def except_all( - self, *other: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + self, *other: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: r"""Return a SQL ``EXCEPT ALL`` of this select() construct against the given selectables provided as positional arguments. @@ -6655,8 +6658,8 @@ def except_all( return CompoundSelect._create_except_all(self, *other) def intersect( - self, *other: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + self, *other: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: r"""Return a SQL ``INTERSECT`` of this select() construct against the given selectables provided as positional arguments. @@ -6674,8 +6677,8 @@ def intersect( return CompoundSelect._create_intersect(self, *other) def intersect_all( - self, *other: _SelectStatementForCompoundArgument - ) -> CompoundSelect: + self, *other: _SelectStatementForCompoundArgument[_TP] + ) -> CompoundSelect[_TP]: r"""Return a SQL ``INTERSECT ALL`` of this select() construct against the given selectables provided as positional arguments. diff --git a/test/typing/plain_files/sql/common_sql_element.py b/test/typing/plain_files/sql/common_sql_element.py index fb0add31d81..d5b8f883400 100644 --- a/test/typing/plain_files/sql/common_sql_element.py +++ b/test/typing/plain_files/sql/common_sql_element.py @@ -11,14 +11,21 @@ from sqlalchemy import asc from sqlalchemy import Column from sqlalchemy import column +from sqlalchemy import ColumnElement from sqlalchemy import desc +from sqlalchemy import except_ +from sqlalchemy import except_all from sqlalchemy import Integer +from sqlalchemy import intersect +from sqlalchemy import intersect_all from sqlalchemy import literal from sqlalchemy import MetaData from sqlalchemy import select from sqlalchemy import SQLColumnExpression from sqlalchemy import String from sqlalchemy import Table +from sqlalchemy import union +from sqlalchemy import union_all from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column @@ -176,3 +183,75 @@ def core_expr(email: str) -> SQLColumnExpression[bool]: literal("5"): "q", column("q"): "q", } + +# compound selects (issue #11922): + +str_col = ColumnElement[str]() +int_col = ColumnElement[int]() + +first_stmt = select(str_col, int_col) +second_stmt = select(str_col, int_col) +third_stmt = select(int_col, str_col) + +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(union(first_stmt, second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(union_all(first_stmt, second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(except_(first_stmt, second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(except_all(first_stmt, second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(intersect(first_stmt, second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(intersect_all(first_stmt, second_stmt)) + +# EXPECTED_TYPE: Result[tuple[str, int]] +reveal_type(Session().execute(union(first_stmt, second_stmt))) +# EXPECTED_TYPE: Result[tuple[str, int]] +reveal_type(Session().execute(union_all(first_stmt, second_stmt))) + +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.union(second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.union_all(second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.except_(second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.except_all(second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.intersect(second_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.intersect_all(second_stmt)) + +# TODO: the following do not error because _SelectStatementForCompoundArgument +# includes untyped elements so the type checker falls back on them when +# the type does not match. Also for the standalone functions mypy +# looses the plot and returns a random type back. See TODO in the +# overloads + +# EXPECTED_TYPE: CompoundSelect[Never] +reveal_type(union(first_stmt, third_stmt)) +# EXPECTED_TYPE: CompoundSelect[Never] +reveal_type(union_all(first_stmt, third_stmt)) +# EXPECTED_TYPE: CompoundSelect[Never] +reveal_type(except_(first_stmt, third_stmt)) +# EXPECTED_TYPE: CompoundSelect[Never] +reveal_type(except_all(first_stmt, third_stmt)) +# EXPECTED_TYPE: CompoundSelect[Never] +reveal_type(intersect(first_stmt, third_stmt)) +# EXPECTED_TYPE: CompoundSelect[Never] +reveal_type(intersect_all(first_stmt, third_stmt)) + +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.union(third_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.union_all(third_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.except_(third_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.except_all(third_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.intersect(third_stmt)) +# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +reveal_type(first_stmt.intersect_all(third_stmt)) From 66f9f7d8094a309a70e17fc7027e43fbfeb2d2ec Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 13 Feb 2025 19:54:11 +0100 Subject: [PATCH 445/544] various improvements to the docs - add create table with partition examples in mysql Change-Id: Idc5c35519a0812f1d63be95c14afb9ce2b00ea93 (cherry picked from commit 9f11b63109cc4d1c5c0f268424fa83bb61460710) --- doc/build/orm/declarative_tables.rst | 2 +- lib/sqlalchemy/dialects/mysql/base.py | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst index 2ec15e3bf58..a4b5cbfe66c 100644 --- a/doc/build/orm/declarative_tables.rst +++ b/doc/build/orm/declarative_tables.rst @@ -423,7 +423,7 @@ allow mapping database types that can support multiple Python types, such as The above example maps the union of ``list[int]`` and ``list[str]`` to the Postgresql :class:`_postgresql.JSONB` datatype, while naming a union of ``float, -str, bool`` will match to the :class:`.JSON` datatype. An equivalent +str, bool`` will match to the :class:`_types.JSON` datatype. An equivalent union, stated in the :class:`_orm.Mapped` construct, will match into the corresponding entry in the type map. diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 1314ee3debf..4a52d1b67a7 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -182,6 +182,31 @@ constraints, all participating ``CREATE TABLE`` statements must specify a transactional engine, which in the vast majority of cases is ``InnoDB``. +Partitioning can similarly be specified using similar options. +In the example below the create table will specify ``PARTITION_BY``, +``PARTITIONS``, ``SUBPARTITIONS`` and ``SUBPARTITION_BY``:: + + # can also use mariadb_* prefix + Table( + "testtable", + MetaData(), + Column("id", Integer(), primary_key=True, autoincrement=True), + Column("other_id", Integer(), primary_key=True, autoincrement=False), + mysql_partitions="2", + mysql_partition_by="KEY(other_id)", + mysql_subpartition_by="HASH(some_expr)", + mysql_subpartitions="2", + ) + +This will render: + +.. sourcecode:: sql + + CREATE TABLE testtable ( + id INTEGER NOT NULL AUTO_INCREMENT, + other_id INTEGER NOT NULL, + PRIMARY KEY (id, other_id) + )PARTITION BY KEY(other_id) PARTITIONS 2 SUBPARTITION BY HASH(some_expr) SUBPARTITIONS 2 Case Sensitivity and Table Reflection ------------------------------------- From 7b884a5408dc573d2917d084e5fd45f00886cd8a Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 13 Feb 2025 23:17:12 +0100 Subject: [PATCH 446/544] minor docs fixes Change-Id: I7379bc6904daac711063734d2f43aa5f6e734a0f (cherry picked from commit 13677447a3185f68f613173a23110eade050d6e8) --- doc/build/core/pooling.rst | 2 +- test/dialect/postgresql/test_query.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst index 526782b0551..1a4865ba2b9 100644 --- a/doc/build/core/pooling.rst +++ b/doc/build/core/pooling.rst @@ -558,7 +558,7 @@ close these connections out. The difference between FIFO and LIFO is basically whether or not its desirable for the pool to keep a full set of connections ready to go even during idle periods:: - engine = create_engine("postgreql://", pool_use_lifo=True, pool_pre_ping=True) + engine = create_engine("postgresql://", pool_use_lifo=True, pool_pre_ping=True) Above, we also make use of the :paramref:`_sa.create_engine.pool_pre_ping` flag so that connections which are closed from the server side are gracefully diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py index 9198fb96aea..f8bb9dbc79d 100644 --- a/test/dialect/postgresql/test_query.py +++ b/test/dialect/postgresql/test_query.py @@ -1242,7 +1242,7 @@ def test_tuple_containment(self, connection): class ExtractTest(fixtures.TablesTest): """The rationale behind this test is that for many years we've had a system of embedding type casts into the expressions rendered by visit_extract() - on the postgreql platform. The reason for this cast is not clear. + on the postgresql platform. The reason for this cast is not clear. So here we try to produce a wide range of cases to ensure that these casts are not needed; see [ticket:2740]. From dae5c618f378db2f10b9f4c332071319e909c69b Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 17 Feb 2025 21:11:50 +0100 Subject: [PATCH 447/544] Include status in the Pool docs Change-Id: I0a4bfc10f4cd0b7dbd3bf49e0575048b622fa4e8 (cherry picked from commit 890d5873397577865f5012319cdb4db9f793f98c) --- lib/sqlalchemy/pool/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index 34d02254392..32fdc414a74 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -468,6 +468,7 @@ def _do_return_conn(self, record: ConnectionPoolEntry) -> None: raise NotImplementedError() def status(self) -> str: + """Returns a brief description of the state of this pool.""" raise NotImplementedError() From 1e2e25c03d60e723d6de37366580f644e2c42c73 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 18 Feb 2025 10:20:32 -0500 Subject: [PATCH 448/544] apply _propagate_attrs in _construct_for_list Fixed issue where the "is ORM" flag of a :func:`.select` or other ORM statement would not be propagated to the ORM :class:`.Session` based on a multi-part operator expression alone, e.g. such as ``Cls.attr + Cls.attr + Cls.attr`` or similar, leading to ORM behaviors not taking place for such statements. Fixes: #12357 Change-Id: I61130eeb3c7a32c1830731fd9ad4eb99a64abf7d (cherry picked from commit d0873ec7735f8238d74b860d6a8a85d55b2dbd1d) --- doc/build/changelog/unreleased_20/12357.rst | 9 +++++++++ lib/sqlalchemy/sql/elements.py | 4 ++++ test/orm/test_core_compilation.py | 8 ++++++++ 3 files changed, 21 insertions(+) create mode 100644 doc/build/changelog/unreleased_20/12357.rst diff --git a/doc/build/changelog/unreleased_20/12357.rst b/doc/build/changelog/unreleased_20/12357.rst new file mode 100644 index 00000000000..79fd888ba32 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12357.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, orm + :tickets: 12357 + + Fixed issue where the "is ORM" flag of a :func:`.select` or other ORM + statement would not be propagated to the ORM :class:`.Session` based on a + multi-part operator expression alone, e.g. such as ``Cls.attr + Cls.attr + + Cls.attr`` or similar, leading to ORM behaviors not taking place for such + statements. diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 6f20d7efa0d..fde503aaf9b 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2977,6 +2977,10 @@ def _construct_for_list( self.clauses = clauses self.operator = operator self.type = type_ + for c in clauses: + if c._propagate_attrs: + self._propagate_attrs = c._propagate_attrs + break return self def _negate(self) -> Any: diff --git a/test/orm/test_core_compilation.py b/test/orm/test_core_compilation.py index 6af9185836b..a961962d916 100644 --- a/test/orm/test_core_compilation.py +++ b/test/orm/test_core_compilation.py @@ -368,6 +368,14 @@ class PropagateAttrsTest(QueryTest): def propagate_cases(): return testing.combinations( (lambda: select(1), False), + (lambda User: select(User.id), True), + (lambda User: select(User.id + User.id), True), + (lambda User: select(User.id + User.id + User.id), True), + (lambda User: select(sum([User.id] * 10, User.id)), True), # type: ignore # noqa: E501 + ( + lambda User: select(literal_column("3") + User.id + User.id), + True, + ), (lambda User: select(func.count(User.id)), True), ( lambda User: select(1).select_from(select(User).subquery()), From b2724f74213a274d24ed23096575cfce0dfb5b8e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 20 Feb 2025 12:50:25 -0500 Subject: [PATCH 449/544] check that two CTEs aren't just annotated forms of the same thing Fixed issue where using :func:`_orm.aliased` around a :class:`.CTE` construct could cause inappropriate "duplicate CTE" errors in cases where that aliased construct appeared multiple times in a single statement. Fixes: #12364 Change-Id: I9625cd83e9baf5312cdc644b38951353708d3b86 (cherry picked from commit 42ddb1fd5f1e29682bcd6ccc7b835999aafec12e) --- doc/build/changelog/unreleased_20/12364.rst | 7 +++ lib/sqlalchemy/sql/compiler.py | 25 ++++++++--- test/sql/test_cte.py | 49 +++++++++++++++++++-- 3 files changed, 71 insertions(+), 10 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12364.rst diff --git a/doc/build/changelog/unreleased_20/12364.rst b/doc/build/changelog/unreleased_20/12364.rst new file mode 100644 index 00000000000..59f5d24f067 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12364.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, orm + :tickets: 12364 + + Fixed issue where using :func:`_orm.aliased` around a :class:`.CTE` + construct could cause inappropriate "duplicate CTE" errors in cases where + that aliased construct appeared multiple times in a single statement. diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 49e8ce500e8..da476849ea0 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -4074,15 +4074,28 @@ def visit_cte( del self.level_name_by_cte[existing_cte_reference_cte] else: - # if the two CTEs are deep-copy identical, consider them - # the same, **if** they are clones, that is, they came from - # the ORM or other visit method if ( - cte._is_clone_of is not None - or existing_cte._is_clone_of is not None - ) and cte.compare(existing_cte): + # if the two CTEs have the same hash, which we expect + # here means that one/both is an annotated of the other + (hash(cte) == hash(existing_cte)) + # or... + or ( + ( + # if they are clones, i.e. they came from the ORM + # or some other visit method + cte._is_clone_of is not None + or existing_cte._is_clone_of is not None + ) + # and are deep-copy identical + and cte.compare(existing_cte) + ) + ): + # then consider these two CTEs the same is_new_cte = False else: + # otherwise these are two CTEs that either will render + # differently, or were indicated separately by the user, + # with the same name raise exc.CompileError( "Multiple, unrelated CTEs found with " "the same name: %r" % cte_name diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index 383f2adaabd..d0ecc38c86f 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -8,6 +8,7 @@ from sqlalchemy import testing from sqlalchemy import text from sqlalchemy import true +from sqlalchemy import union_all from sqlalchemy import update from sqlalchemy.dialects import mssql from sqlalchemy.engine import default @@ -492,16 +493,22 @@ def test_recursive_union_alias_four(self): ) @testing.combinations(True, False, argnames="identical") - @testing.combinations(True, False, argnames="use_clone") - def test_conflicting_names(self, identical, use_clone): + @testing.variation("clone_type", ["none", "clone", "annotated"]) + def test_conflicting_names(self, identical, clone_type): """test a flat out name conflict.""" s1 = select(1) c1 = s1.cte(name="cte1", recursive=True) - if use_clone: + if clone_type.clone: c2 = c1._clone() if not identical: c2 = c2.union(select(2)) + elif clone_type.annotated: + # this does not seem to trigger the issue that was fixed in + # #12364 howver is still a worthy test + c2 = c1._annotate({"foo": "bar"}) + if not identical: + c2 = c2.union(select(2)) else: if identical: s2 = select(1) @@ -511,12 +518,20 @@ def test_conflicting_names(self, identical, use_clone): s = select(c1, c2) - if use_clone and identical: + if clone_type.clone and identical: self.assert_compile( s, 'WITH RECURSIVE cte1("1") AS (SELECT 1) SELECT cte1.1, ' 'cte1.1 AS "1_1" FROM cte1', ) + elif clone_type.annotated and identical: + # annotated seems to have a slightly different rendering + # scheme here + self.assert_compile( + s, + 'WITH RECURSIVE cte1("1") AS (SELECT 1) SELECT cte1.1, ' + 'cte1.1 AS "1__1" FROM cte1', + ) else: assert_raises_message( CompileError, @@ -524,6 +539,32 @@ def test_conflicting_names(self, identical, use_clone): s.compile, ) + @testing.variation("annotated", [True, False]) + def test_cte_w_annotated(self, annotated): + """test #12364""" + + A = table("a", column("i"), column("j")) + B = table("b", column("i"), column("j")) + + a = select(A).where(A.c.i > A.c.j).cte("filtered_a") + + if annotated: + a = a._annotate({"foo": "bar"}) + + a1 = select(a.c.i, literal(1).label("j")) + b = select(B).join(a, a.c.i == B.c.i).where(B.c.j.is_not(None)) + + query = union_all(a1, b) + self.assert_compile( + query, + "WITH filtered_a AS " + "(SELECT a.i AS i, a.j AS j FROM a WHERE a.i > a.j) " + "SELECT filtered_a.i, :param_1 AS j FROM filtered_a " + "UNION ALL SELECT b.i, b.j " + "FROM b JOIN filtered_a ON filtered_a.i = b.i " + "WHERE b.j IS NOT NULL", + ) + def test_with_recursive_no_name_currently_buggy(self): s1 = select(1) c1 = s1.cte(name="cte1", recursive=True) From adfb332e2f3cf5b39435618b2c151f74d0cb5173 Mon Sep 17 00:00:00 2001 From: KingOfKaste <47917339+KingOfKaste@users.noreply.github.com> Date: Thu, 20 Feb 2025 14:31:42 -0500 Subject: [PATCH 450/544] Fix SQLite error for table with "WITHOUT ROWID" & "STRICT" Fixed issue that omitted the comma between multiple SQLite table extension clauses, currently ``WITHOUT ROWID`` and ``STRICT``, when both options :paramref:`.Table.sqlite_with_rowid` and :paramref:`.Table.sqlite_strict` were configured at their non-default settings at the same time. Pull request courtesy david-fed. Fixes: #12368 Closes: #12369 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12369 Pull-request-sha: 3c9ceffe8279f5d961a44e6d468f21881bcbc75c Change-Id: I1a44fd2d655d0e6eaad8213a360879daca9e4f11 (cherry picked from commit 48ad8c81115bd01d733fe1a4f78c8c30d7c2abbb) --- doc/build/changelog/unreleased_20/12368.rst | 9 +++++++++ lib/sqlalchemy/dialects/sqlite/base.py | 18 ++++++++++++------ test/dialect/test_sqlite.py | 14 ++++++++++++++ 3 files changed, 35 insertions(+), 6 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12368.rst diff --git a/doc/build/changelog/unreleased_20/12368.rst b/doc/build/changelog/unreleased_20/12368.rst new file mode 100644 index 00000000000..b02f0fb0a9d --- /dev/null +++ b/doc/build/changelog/unreleased_20/12368.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, sqlite + :tickets: 12368 + + Fixed issue that omitted the comma between multiple SQLite table extension + clauses, currently ``WITH ROWID`` and ``STRICT``, when both options + :paramref:`.Table.sqlite_with_rowid` and :paramref:`.Table.sqlite_strict` + were configured at their non-default settings at the same time. Pull + request courtesy david-fed. diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index e0c0f6e8098..96b2414ccec 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1758,12 +1758,18 @@ def visit_create_index( return text def post_create_table(self, table): - text = "" - if table.dialect_options["sqlite"]["with_rowid"] is False: - text += "\n WITHOUT ROWID" - if table.dialect_options["sqlite"]["strict"] is True: - text += "\n STRICT" - return text + table_options = [] + + if not table.dialect_options["sqlite"]["with_rowid"]: + table_options.append("WITHOUT ROWID") + + if table.dialect_options["sqlite"]["strict"]: + table_options.append("STRICT") + + if table_options: + return "\n " + ",\n ".join(table_options) + else: + return "" class SQLiteTypeCompiler(compiler.GenericTypeCompiler): diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index f73ba4025a1..819bf8aa06b 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -1153,6 +1153,20 @@ def test_create_table_strict(self): "CREATE TABLE atable (id INTEGER) STRICT", ) + def test_create_table_without_rowid_strict(self): + m = MetaData() + table = Table( + "atable", + m, + Column("id", Integer), + sqlite_with_rowid=False, + sqlite_strict=True, + ) + self.assert_compile( + schema.CreateTable(table), + "CREATE TABLE atable (id INTEGER) WITHOUT ROWID, STRICT", + ) + class OnConflictDDLTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = sqlite.dialect() From 8686885cc78900586dfa8e1ca860e544db79c0d5 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 24 Feb 2025 12:27:50 +0100 Subject: [PATCH 451/544] fix docs typo Fixes: #12371 Change-Id: I86e6e34d407223d66b2cbcb21ec10dc292676449 (cherry picked from commit 15b1e14db21d2fa0bbc7b68e80883efb6334ad30) --- lib/sqlalchemy/sql/schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 8daa8864d2f..a6c24ce618d 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -2839,7 +2839,7 @@ def __init__( :param ondelete: Optional string. If set, emit ON DELETE when issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. + SET NULL and RESTRICT. :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when issuing DDL for this constraint. @@ -4628,7 +4628,7 @@ def __init__( :param ondelete: Optional string. If set, emit ON DELETE when issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. + SET NULL and RESTRICT. :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when issuing DDL for this constraint. From b6251cb996c59723a2580d2672d9cd4070048c69 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 25 Feb 2025 23:06:55 +0100 Subject: [PATCH 452/544] improve rowmapping key type the accepted keys are also orm attributes, column elements, functions etc, not only columns Change-Id: I354de9b9668bc02b8b305a3c1f065744b28f8030 (cherry picked from commit b2ee1df06b138fc9588ea312d4a477699ec9b5d0) --- lib/sqlalchemy/engine/result.py | 6 +++--- lib/sqlalchemy/orm/mapper.py | 9 +++++---- test/typing/plain_files/sql/typed_results.py | 18 ++++++++++++++++-- 3 files changed, 24 insertions(+), 9 deletions(-) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 7411fd74f6a..3c81fc60520 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -52,11 +52,11 @@ from sqlalchemy.cyextension.resultproxy import tuplegetter as tuplegetter if typing.TYPE_CHECKING: - from ..sql.schema import Column + from ..sql.elements import SQLCoreOperations from ..sql.type_api import _ResultProcessorType -_KeyType = Union[str, "Column[Any]"] -_KeyIndexType = Union[str, "Column[Any]", int] +_KeyType = Union[str, "SQLCoreOperations[Any]"] +_KeyIndexType = Union[_KeyType, int] # is overridden in cursor using _CursorKeyMapRecType _KeyMapRecType = Any diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 11010efbcd5..eab2be558f6 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -3440,7 +3440,7 @@ def _result_has_identity_key(self, result, adapter=None): def identity_key_from_row( self, - row: Optional[Union[Row[Any], RowMapping]], + row: Union[Row[Any], RowMapping], identity_token: Optional[Any] = None, adapter: Optional[ORMAdapter] = None, ) -> _IdentityKeyType[_O]: @@ -3459,14 +3459,15 @@ def identity_key_from_row( if adapter: pk_cols = [adapter.columns[c] for c in pk_cols] + mapping: RowMapping if hasattr(row, "_mapping"): - mapping = row._mapping # type: ignore + mapping = row._mapping else: - mapping = cast("Mapping[Any, Any]", row) + mapping = row # type: ignore[assignment] return ( self._identity_class, - tuple(mapping[column] for column in pk_cols), # type: ignore + tuple(mapping[column] for column in pk_cols), identity_token, ) diff --git a/test/typing/plain_files/sql/typed_results.py b/test/typing/plain_files/sql/typed_results.py index c7842a7e799..9ed591815af 100644 --- a/test/typing/plain_files/sql/typed_results.py +++ b/test/typing/plain_files/sql/typed_results.py @@ -9,6 +9,7 @@ from sqlalchemy import Column from sqlalchemy import column from sqlalchemy import create_engine +from sqlalchemy import func from sqlalchemy import insert from sqlalchemy import Integer from sqlalchemy import MetaData @@ -118,9 +119,22 @@ def t_result_ctxmanager() -> None: reveal_type(r4) -def t_core_mappings() -> None: +def t_mappings() -> None: r = connection.execute(select(t_user)).mappings().one() - r.get(t_user.c.id) + r["name"] # string + r.get(t_user.c.id) # column + + r2 = connection.execute(select(User)).mappings().one() + r2[User.id] # orm attribute + r2[User.__table__.c.id] # form clause column + + m2 = User.id * 2 + s2 = User.__table__.c.id + 2 + fn = func.abs(User.id) + r3 = connection.execute(select(m2, s2, fn)).mappings().one() + r3[m2] # col element + r3[s2] # also col element + r3[fn] # function def t_entity_varieties() -> None: From efe7c6ed64835901583f27e228f8cf265f35370f Mon Sep 17 00:00:00 2001 From: Karol Gongola Date: Wed, 26 Feb 2025 05:06:16 -0500 Subject: [PATCH 453/544] Add more `requires` to tests for easier dialect tests management ### Description I am just going through starrocks dialect tests. I have figured out that adding some requires for tests may be useful also for other dialects. So this is a proposal of adding them to sqlalchemy. Please let me know if it is aligned with your approach. ### Checklist This pull request is: - [x] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #12362 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12362 Pull-request-sha: 932d341f5f16f0c5cadc39d3a67b0f10297177ce Change-Id: If9fa9f7477040620d131dcbe087fb4b50fd08a08 (cherry picked from commit 24b86ad6e50d4a6723a45b2580f416ca981bab55) --- lib/sqlalchemy/testing/requirements.py | 6 ++++++ lib/sqlalchemy/testing/suite/test_reflection.py | 4 ++++ lib/sqlalchemy/testing/suite/test_select.py | 9 +++++++++ lib/sqlalchemy/testing/suite/test_types.py | 4 ++++ 4 files changed, 23 insertions(+) diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index bbf56a059a1..93541dca70e 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1826,3 +1826,9 @@ def supports_bitwise_xor(self): def supports_bitwise_shift(self): """Target database supports bitwise left or right shift""" return exclusions.closed() + + @property + def like_escapes(self): + """Target backend supports custom ESCAPE characters + with LIKE comparisons""" + return exclusions.open() diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 54d0d449a90..2837e9fe0a3 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -220,6 +220,7 @@ def test_has_table_view_schema(self, connection): class HasIndexTest(fixtures.TablesTest): __backend__ = True + __requires__ = ("index_reflection",) @classmethod def define_tables(cls, metadata): @@ -298,6 +299,7 @@ class BizarroCharacterFKResolutionTest(fixtures.TestBase): """tests for #10275""" __backend__ = True + __requires__ = ("foreign_key_constraint_reflection",) @testing.combinations( ("id",), ("(3)",), ("col%p",), ("[brack]",), argnames="columnname" @@ -474,11 +476,13 @@ def test_get_pk_constraint(self, name): assert insp.get_pk_constraint(name) @quote_fixtures + @testing.requires.foreign_key_constraint_reflection def test_get_foreign_keys(self, name): insp = inspect(config.db) assert insp.get_foreign_keys(name) @quote_fixtures + @testing.requires.index_reflection def test_get_indexes(self, name): insp = inspect(config.db) assert insp.get_indexes(name) diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index 7eb5cd0055d..b9e8b11efec 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -1541,6 +1541,7 @@ def test_startswith_unescaped(self): col = self.tables.some_table.c.data self._test(col.startswith("ab%c"), {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}) + @testing.requires.like_escapes def test_startswith_autoescape(self): col = self.tables.some_table.c.data self._test(col.startswith("ab%c", autoescape=True), {3}) @@ -1552,10 +1553,12 @@ def test_startswith_sqlexpr(self): {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, ) + @testing.requires.like_escapes def test_startswith_escape(self): col = self.tables.some_table.c.data self._test(col.startswith("ab##c", escape="#"), {7}) + @testing.requires.like_escapes def test_startswith_autoescape_escape(self): col = self.tables.some_table.c.data self._test(col.startswith("ab%c", autoescape=True, escape="#"), {3}) @@ -1571,14 +1574,17 @@ def test_endswith_sqlexpr(self): col.endswith(literal_column("'e%fg'")), {1, 2, 3, 4, 5, 6, 7, 8, 9} ) + @testing.requires.like_escapes def test_endswith_autoescape(self): col = self.tables.some_table.c.data self._test(col.endswith("e%fg", autoescape=True), {6}) + @testing.requires.like_escapes def test_endswith_escape(self): col = self.tables.some_table.c.data self._test(col.endswith("e##fg", escape="#"), {9}) + @testing.requires.like_escapes def test_endswith_autoescape_escape(self): col = self.tables.some_table.c.data self._test(col.endswith("e%fg", autoescape=True, escape="#"), {6}) @@ -1588,14 +1594,17 @@ def test_contains_unescaped(self): col = self.tables.some_table.c.data self._test(col.contains("b%cde"), {1, 2, 3, 4, 5, 6, 7, 8, 9}) + @testing.requires.like_escapes def test_contains_autoescape(self): col = self.tables.some_table.c.data self._test(col.contains("b%cde", autoescape=True), {3}) + @testing.requires.like_escapes def test_contains_escape(self): col = self.tables.some_table.c.data self._test(col.contains("b##cde", escape="#"), {7}) + @testing.requires.like_escapes def test_contains_autoescape_escape(self): col = self.tables.some_table.c.data self._test(col.contains("b%cd", autoescape=True, escape="#"), {3}) diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index de3cd53e345..5f1bf75d504 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -299,6 +299,7 @@ def test_literal_complex(self, literal_round_trip): class BinaryTest(_LiteralRoundTripFixture, fixtures.TablesTest): __backend__ = True + __requires__ = ("binary_literals",) @classmethod def define_tables(cls, metadata): @@ -1483,6 +1484,7 @@ def default(self, o): return datatype, compare_value, p_s + @testing.requires.legacy_unconditional_json_extract @_index_fixtures(False) def test_index_typed_access(self, datatype, value): data_table = self.tables.data_table @@ -1504,6 +1506,7 @@ def test_index_typed_access(self, datatype, value): eq_(roundtrip, compare_value) is_(type(roundtrip), type(compare_value)) + @testing.requires.legacy_unconditional_json_extract @_index_fixtures(True) def test_index_typed_comparison(self, datatype, value): data_table = self.tables.data_table @@ -1528,6 +1531,7 @@ def test_index_typed_comparison(self, datatype, value): # make sure we get a row even if value is None eq_(row, (compare_value,)) + @testing.requires.legacy_unconditional_json_extract @_index_fixtures(True) def test_path_typed_comparison(self, datatype, value): data_table = self.tables.data_table From a4207e8a4a29854c506179dd6a83def42b35f6df Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 27 Feb 2025 12:04:12 -0500 Subject: [PATCH 454/544] allow control of constraint isolation w/ add/drop constraint Added new parameters :paramref:`.AddConstraint.isolate_from_table` and :paramref:`.DropConstraint.isolate_from_table`, defaulting to True, which both document and allow to be controllable the long-standing behavior of these two constructs blocking the given constraint from being included inline within the "CREATE TABLE" sequence, under the assumption that separate add/drop directives were to be used. Fixes: #12382 Change-Id: I53c4170ccb5803f69945ba7aa3d3a143131508eb (cherry picked from commit d6f11d9030b325d5afabf87869a6e3542edda54b) --- doc/build/changelog/unreleased_20/12382.rst | 10 ++++ lib/sqlalchemy/sql/ddl.py | 66 ++++++++++++++++++--- test/sql/test_constraints.py | 36 +++++++---- 3 files changed, 94 insertions(+), 18 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12382.rst diff --git a/doc/build/changelog/unreleased_20/12382.rst b/doc/build/changelog/unreleased_20/12382.rst new file mode 100644 index 00000000000..80f46309695 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12382.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, sql + :tickets: 12382 + + Added new parameters :paramref:`.AddConstraint.isolate_from_table` and + :paramref:`.DropConstraint.isolate_from_table`, defaulting to True, which + both document and allow to be controllable the long-standing behavior of + these two constructs blocking the given constraint from being included + inline within the "CREATE TABLE" sequence, under the assumption that + separate add/drop directives were to be used. diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 0950043bcba..81a49151cc3 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -751,11 +751,33 @@ class AddConstraint(_CreateBase): __visit_name__ = "add_constraint" - def __init__(self, element): + def __init__( + self, + element: Constraint, + *, + isolate_from_table: bool = True, + ): + """Construct a new :class:`.AddConstraint` construct. + + :param element: a :class:`.Constraint` object + + :param isolate_from_table: optional boolean, defaults to True. Has + the effect of the incoming constraint being isolated from being + included in a CREATE TABLE sequence when associated with a + :class:`.Table`. + + .. versionadded:: 2.0.39 - added + :paramref:`.AddConstraint.isolate_from_table`, defaulting + to True. Previously, the behavior of this parameter was implicitly + turned on in all cases. + + """ super().__init__(element) - element._create_rule = util.portable_instancemethod( - self._create_rule_disable - ) + + if isolate_from_table: + element._create_rule = util.portable_instancemethod( + self._create_rule_disable + ) class DropConstraint(_DropBase): @@ -763,12 +785,40 @@ class DropConstraint(_DropBase): __visit_name__ = "drop_constraint" - def __init__(self, element, cascade=False, if_exists=False, **kw): + def __init__( + self, + element: Constraint, + *, + cascade: bool = False, + if_exists: bool = False, + isolate_from_table: bool = True, + **kw: Any, + ): + """Construct a new :class:`.DropConstraint` construct. + + :param element: a :class:`.Constraint` object + :param cascade: optional boolean, indicates backend-specific + "CASCADE CONSTRAINT" directive should be rendered if available + :param if_exists: optional boolean, indicates backend-specific + "IF EXISTS" directive should be rendered if available + :param isolate_from_table: optional boolean, defaults to True. Has + the effect of the incoming constraint being isolated from being + included in a CREATE TABLE sequence when associated with a + :class:`.Table`. + + .. versionadded:: 2.0.39 - added + :paramref:`.DropConstraint.isolate_from_table`, defaulting + to True. Previously, the behavior of this parameter was implicitly + turned on in all cases. + + """ self.cascade = cascade super().__init__(element, if_exists=if_exists, **kw) - element._create_rule = util.portable_instancemethod( - self._create_rule_disable - ) + + if isolate_from_table: + element._create_rule = util.portable_instancemethod( + self._create_rule_disable + ) class SetTableComment(_CreateDropBase): diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py index 93c385ba4d7..ebd44cdcb57 100644 --- a/test/sql/test_constraints.py +++ b/test/sql/test_constraints.py @@ -1219,7 +1219,11 @@ def test_render_ck_constraint_external(self): "CHECK (a < b) DEFERRABLE INITIALLY DEFERRED", ) - def test_external_ck_constraint_cancels_internal(self): + @testing.variation("isolate", [True, False]) + @testing.variation("type_", ["add", "drop"]) + def test_external_ck_constraint_cancels_internal( + self, isolate: testing.Variation, type_: testing.Variation + ): t, t2 = self._constraint_create_fixture() constraint = CheckConstraint( @@ -1230,15 +1234,27 @@ def test_external_ck_constraint_cancels_internal(self): table=t, ) - schema.AddConstraint(constraint) - - # once we make an AddConstraint, - # inline compilation of the CONSTRAINT - # is disabled - self.assert_compile( - schema.CreateTable(t), - "CREATE TABLE tbl (a INTEGER, b INTEGER)", - ) + if type_.add: + cls = schema.AddConstraint + elif type_.drop: + cls = schema.DropConstraint + else: + type_.fail() + + if not isolate: + cls(constraint, isolate_from_table=False) + self.assert_compile( + schema.CreateTable(t), + "CREATE TABLE tbl (a INTEGER, b INTEGER, " + "CONSTRAINT my_test_constraint CHECK (a < b) " + "DEFERRABLE INITIALLY DEFERRED)", + ) + else: + cls(constraint) + self.assert_compile( + schema.CreateTable(t), + "CREATE TABLE tbl (a INTEGER, b INTEGER)", + ) def test_render_drop_constraint(self): t, t2 = self._constraint_create_fixture() From b7e0ebe1ebbe6c0f97247a0854fc9ccfd9f763b1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 3 Mar 2025 17:01:15 -0500 Subject: [PATCH 455/544] ensure compiler is not optional in create_for_statement() this involved moving some methods around and changing the target of legacy orm/query.py calling upon this method to use an ORM-specific method instead Change-Id: Ib977f08e52398d0e082acf7d88abecb9908ca8b6 (cherry picked from commit d9b4d8ff3aae504402d324f3ebf0b8faff78f5dc) --- lib/sqlalchemy/engine/_processors_cy.c | 11358 ++++++++++ lib/sqlalchemy/engine/_row_cy.c | 11171 ++++++++++ lib/sqlalchemy/engine/_util_cy.c | 8853 ++++++++ lib/sqlalchemy/orm/context.py | 55 +- lib/sqlalchemy/orm/query.py | 4 +- lib/sqlalchemy/sql/_util_cy.c | 11241 ++++++++++ lib/sqlalchemy/sql/base.py | 5 +- lib/sqlalchemy/sql/elements.py | 11 +- lib/sqlalchemy/sql/selectable.py | 5 +- lib/sqlalchemy/util/_collections_cy.c | 24882 ++++++++++++++++++++++ lib/sqlalchemy/util/_immutabledict_cy.c | 15840 ++++++++++++++ test/ext/test_hybrid.py | 5 +- test/orm/test_froms.py | 4 +- 13 files changed, 83406 insertions(+), 28 deletions(-) create mode 100644 lib/sqlalchemy/engine/_processors_cy.c create mode 100644 lib/sqlalchemy/engine/_row_cy.c create mode 100644 lib/sqlalchemy/engine/_util_cy.c create mode 100644 lib/sqlalchemy/sql/_util_cy.c create mode 100644 lib/sqlalchemy/util/_collections_cy.c create mode 100644 lib/sqlalchemy/util/_immutabledict_cy.c diff --git a/lib/sqlalchemy/engine/_processors_cy.c b/lib/sqlalchemy/engine/_processors_cy.c new file mode 100644 index 00000000000..46d69c1a4d0 --- /dev/null +++ b/lib/sqlalchemy/engine/_processors_cy.c @@ -0,0 +1,11358 @@ +/* Generated by Cython 3.0.11 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "name": "sqlalchemy.engine._processors_cy", + "sources": [ + "lib/sqlalchemy/engine/_processors_cy.py" + ] + }, + "module_name": "sqlalchemy.engine._processors_cy" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#if defined(CYTHON_LIMITED_API) && 0 + #ifndef Py_LIMITED_API + #if CYTHON_LIMITED_API+0 > 0x03030000 + #define Py_LIMITED_API CYTHON_LIMITED_API + #else + #define Py_LIMITED_API 0x03030000 + #endif + #endif +#endif + +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.7+ or Python 3.3+. +#else +#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API +#define __PYX_EXTRA_ABI_MODULE_NAME "limited" +#else +#define __PYX_EXTRA_ABI_MODULE_NAME "" +#endif +#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME +#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI +#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." +#define CYTHON_HEX_VERSION 0x03000BF0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #define HAVE_LONG_LONG +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX +#if defined(GRAALVM_PYTHON) + /* For very preliminary testing purposes. Most variables are set the same as PyPy. + The existence of this section does not imply that anything works or is even tested */ + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 1 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(PYPY_VERSION) + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #if PY_VERSION_HEX < 0x03090000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(CYTHON_LIMITED_API) + #ifdef Py_LIMITED_API + #undef __PYX_LIMITED_VERSION_HEX + #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API + #endif + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 1 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_CLINE_IN_TRACEBACK + #define CYTHON_CLINE_IN_TRACEBACK 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 1 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #endif + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 1 + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #ifndef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) + #endif + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #if PY_VERSION_HEX < 0x030400a1 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #elif !defined(CYTHON_USE_TP_FINALIZE) + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #if PY_VERSION_HEX < 0x030600B1 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #elif !defined(CYTHON_USE_DICT_VERSIONS) + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) + #endif + #if PY_VERSION_HEX < 0x030700A3 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK 1 + #endif + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if !defined(CYTHON_VECTORCALL) +#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) +#endif +#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(maybe_unused) + #define CYTHON_UNUSED [[maybe_unused]] + #endif + #endif + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR + #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_USE_CPP_STD_MOVE + #if defined(__cplusplus) && (\ + __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) + #define CYTHON_USE_CPP_STD_MOVE 1 + #else + #define CYTHON_USE_CPP_STD_MOVE 0 + #endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + #endif + #endif + #if _MSC_VER < 1300 + #ifdef _WIN64 + typedef unsigned long long __pyx_uintptr_t; + #else + typedef unsigned int __pyx_uintptr_t; + #endif + #else + #ifdef _WIN64 + typedef unsigned __int64 __pyx_uintptr_t; + #else + typedef unsigned __int32 __pyx_uintptr_t; + #endif + #endif +#else + #include + typedef uintptr_t __pyx_uintptr_t; +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif +#ifdef __cplusplus + template + struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; + #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) +#else + #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) +#endif +#if CYTHON_COMPILING_IN_PYPY == 1 + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) +#else + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) +#endif +#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_DefaultClassType PyClass_Type + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if CYTHON_COMPILING_IN_LIMITED_API + static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *exception_table = NULL; + PyObject *types_module=NULL, *code_type=NULL, *result=NULL; + #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 + PyObject *version_info; + PyObject *py_minor_version = NULL; + #endif + long minor_version = 0; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 + minor_version = 11; + #else + if (!(version_info = PySys_GetObject("version_info"))) goto end; + if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; + minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); + if (minor_version == -1 && PyErr_Occurred()) goto end; + #endif + if (!(types_module = PyImport_ImportModule("types"))) goto end; + if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; + if (minor_version <= 7) { + (void)p; + result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else if (minor_version <= 10) { + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else { + if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); + } + end: + Py_XDECREF(code_type); + Py_XDECREF(exception_table); + Py_XDECREF(types_module); + if (type) { + PyErr_Restore(type, value, traceback); + } + return result; + } + #ifndef CO_OPTIMIZED + #define CO_OPTIMIZED 0x0001 + #endif + #ifndef CO_NEWLOCALS + #define CO_NEWLOCALS 0x0002 + #endif + #ifndef CO_VARARGS + #define CO_VARARGS 0x0004 + #endif + #ifndef CO_VARKEYWORDS + #define CO_VARKEYWORDS 0x0008 + #endif + #ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x0200 + #endif + #ifndef CO_GENERATOR + #define CO_GENERATOR 0x0020 + #endif + #ifndef CO_COROUTINE + #define CO_COROUTINE 0x0080 + #endif +#elif PY_VERSION_HEX >= 0x030B0000 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyCodeObject *result; + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); + if (!empty_bytes) return NULL; + result = + #if PY_VERSION_HEX >= 0x030C0000 + PyUnstable_Code_NewWithPosOnlyArgs + #else + PyCode_NewWithPosOnlyArgs + #endif + (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); + Py_DECREF(empty_bytes); + return result; + } +#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#endif +#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) + #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) +#else + #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) + #define __Pyx_Py_Is(x, y) Py_Is(x, y) +#else + #define __Pyx_Py_Is(x, y) ((x) == (y)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) + #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) +#else + #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) + #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) +#else + #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) + #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) +#else + #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) +#endif +#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) +#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) +#else + #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) +#endif +#ifndef CO_COROUTINE + #define CO_COROUTINE 0x80 +#endif +#ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x200 +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef Py_TPFLAGS_SEQUENCE + #define Py_TPFLAGS_SEQUENCE 0 +#endif +#ifndef Py_TPFLAGS_MAPPING + #define Py_TPFLAGS_MAPPING 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif +#endif +#if CYTHON_METH_FASTCALL + #define __Pyx_METH_FASTCALL METH_FASTCALL + #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast + #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords +#else + #define __Pyx_METH_FASTCALL METH_VARARGS + #define __Pyx_PyCFunction_FastCall PyCFunction + #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords +#endif +#if CYTHON_VECTORCALL + #define __pyx_vectorcallfunc vectorcallfunc + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET + #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) +#elif CYTHON_BACKPORT_VECTORCALL + typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames); + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) +#else + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) +#endif +#if PY_MAJOR_VERSION >= 0x030900B1 +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) +#else +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) +#endif +#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) +#elif !CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) +#endif +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) +static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { + return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; +} +#endif +static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { +#if CYTHON_COMPILING_IN_LIMITED_API + return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; +#else + return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +#endif +} +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) +#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) + typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); +#else + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) + #define __Pyx_PyCMethod PyCMethod +#endif +#ifndef METH_METHOD + #define METH_METHOD 0x200 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyThreadState_Current PyThreadState_Get() +#elif !CYTHON_FAST_THREAD_STATE + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) +{ + void *result; + result = PyModule_GetState(op); + if (!result) + Py_FatalError("Couldn't find the module state"); + return result; +} +#endif +#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) +#else + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if PY_MAJOR_VERSION < 3 + #if CYTHON_COMPILING_IN_PYPY + #if PYPY_VERSION_NUM < 0x07030600 + #if defined(__cplusplus) && __cplusplus >= 201402L + [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] + #elif defined(__GNUC__) || defined(__clang__) + __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) + #elif defined(_MSC_VER) + __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) + #endif + static CYTHON_INLINE int PyGILState_Check(void) { + return 0; + } + #else // PYPY_VERSION_NUM < 0x07030600 + #endif // PYPY_VERSION_NUM < 0x07030600 + #else + static CYTHON_INLINE int PyGILState_Check(void) { + PyThreadState * tstate = _PyThreadState_Current; + return tstate && (tstate == PyGILState_GetThisThreadState()); + } + #endif +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { + PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); + if (res == NULL) PyErr_Clear(); + return res; +} +#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) +#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#else +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { +#if CYTHON_COMPILING_IN_PYPY + return PyDict_GetItem(dict, name); +#else + PyDictEntry *ep; + PyDictObject *mp = (PyDictObject*) dict; + long hash = ((PyStringObject *) name)->ob_shash; + assert(hash != -1); + ep = (mp->ma_lookup)(mp, name, hash); + if (ep == NULL) { + return NULL; + } + return ep->me_value; +#endif +} +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#endif +#if CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) + #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) + #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) +#else + #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) + #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) + #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) +#else + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) +#endif +#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 +#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ + assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ + PyObject_GC_Del(obj);\ + Py_DECREF(type);\ +} +#else +#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) + #define __Pyx_PyUnicode_DATA(u) ((void*)u) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) +#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_READY(op) (0) + #else + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #else + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #if !defined(PyUnicode_DecodeUnicodeEscape) + #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) + #endif + #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) + #undef PyUnicode_Contains + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) + #endif + #if !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) + #endif + #if !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) + #endif +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#if CYTHON_COMPILING_IN_CPYTHON + #define __Pyx_PySequence_ListKeepNew(obj)\ + (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) +#else + #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) +#else + #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) + #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) +#endif +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) +#else + static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { + PyObject *module = PyImport_AddModule(name); + Py_XINCREF(module); + return module; + } +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define __Pyx_Py3Int_Check(op) PyLong_Check(op) + #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#else + #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) + #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) + #if !defined(_USE_MATH_DEFINES) + #define _USE_MATH_DEFINES + #endif +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifdef CYTHON_EXTERN_C + #undef __PYX_EXTERN_C + #define __PYX_EXTERN_C CYTHON_EXTERN_C +#elif defined(__PYX_EXTERN_C) + #ifdef _MSC_VER + #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") + #else + #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. + #endif +#else + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__sqlalchemy__engine___processors_cy +#define __PYX_HAVE_API__sqlalchemy__engine___processors_cy +/* Early includes */ +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_VERSION_HEX >= 0x030C00A7 + #ifndef _PyLong_SIGN_MASK + #define _PyLong_SIGN_MASK 3 + #endif + #ifndef _PyLong_NON_SIZE_BITS + #define _PyLong_NON_SIZE_BITS 3 + #endif + #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) + #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) + #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) + #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) + #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_SignedDigitCount(x)\ + ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) + #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) + #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) + #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) + #else + #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) + #endif + typedef Py_ssize_t __Pyx_compact_pylong; + typedef size_t __Pyx_compact_upylong; + #else + #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) + #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) + #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) + #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) + #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) + #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) + #define __Pyx_PyLong_CompactValue(x)\ + ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) + typedef sdigit __Pyx_compact_pylong; + typedef digit __Pyx_compact_upylong; + #endif + #if PY_VERSION_HEX >= 0x030C00A5 + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) + #else + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) + #endif +#endif +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +#include +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = (char) c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#include +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +#if !CYTHON_USE_MODULE_STATE +static PyObject *__pyx_m = NULL; +#endif +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm = __FILE__; +static const char *__pyx_filename; + +/* #### Code section: filename_table ### */ + +static const char *__pyx_f[] = { + "lib/sqlalchemy/engine/_processors_cy.py", + "", +}; +/* #### Code section: utility_code_proto_before_types ### */ +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + +/* #### Code section: numeric_typedefs ### */ +/* #### Code section: complex_type_declarations ### */ +/* #### Code section: type_declarations ### */ + +/*--- Type declarations ---*/ +struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory; + +/* "sqlalchemy/engine/_processors_cy.py":78 + * + * @cython.cclass + * class to_decimal_processor_factory: # <<<<<<<<<<<<<< + * type_: type + * format_: str + */ +struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory { + PyObject_HEAD + PyTypeObject *type_; + PyObject *format_; +}; + +/* #### Code section: utility_code_proto ### */ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, Py_ssize_t); + void (*DECREF)(void*, PyObject*, Py_ssize_t); + void (*GOTREF)(void*, PyObject*, Py_ssize_t); + void (*GIVEREF)(void*, PyObject*, Py_ssize_t); + void* (*SetupContext)(const char*, Py_ssize_t, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + } + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) + #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() +#endif + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContextNogil() + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_Py_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; Py_XDECREF(tmp);\ + } while (0) +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* TupleAndListFromArray.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); +static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); +#endif + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* fastcall.proto */ +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) +#elif CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) +#else + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) +#endif +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) + #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) +#else + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) +#define __Pyx_KwValues_VARARGS(args, nargs) NULL +#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) +#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) +#if CYTHON_METH_FASTCALL + #define __Pyx_Arg_FASTCALL(args, i) args[i] + #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) + #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) + static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + #else + #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) + #endif + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) +#else + #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS + #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS + #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS + #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS + #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS + #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) + #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) +#else +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) +#endif + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, + const char* function_name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* PyObject_Unicode.proto */ +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyObject_Unicode(obj)\ + (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Str(obj)) +#else +#define __Pyx_PyObject_Unicode(obj)\ + (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Unicode(obj)) +#endif + +/* pybytes_as_double.proto */ +static double __Pyx_SlowPyString_AsDouble(PyObject *obj); +static double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length); +static CYTHON_INLINE double __Pyx_PyBytes_AsDouble(PyObject *obj) { + char* as_c_string; + Py_ssize_t size; +#if CYTHON_ASSUME_SAFE_MACROS + as_c_string = PyBytes_AS_STRING(obj); + size = PyBytes_GET_SIZE(obj); +#else + if (PyBytes_AsStringAndSize(obj, &as_c_string, &size) < 0) { + return (double)-1; + } +#endif + return __Pyx__PyBytes_AsDouble(obj, as_c_string, size); +} +static CYTHON_INLINE double __Pyx_PyByteArray_AsDouble(PyObject *obj) { + char* as_c_string; + Py_ssize_t size; +#if CYTHON_ASSUME_SAFE_MACROS + as_c_string = PyByteArray_AS_STRING(obj); + size = PyByteArray_GET_SIZE(obj); +#else + as_c_string = PyByteArray_AsString(obj); + if (as_c_string == NULL) { + return (double)-1; + } + size = PyByteArray_Size(obj); +#endif + return __Pyx__PyBytes_AsDouble(obj, as_c_string, size); +} + +/* pyunicode_as_double.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY && CYTHON_ASSUME_SAFE_MACROS +static const char* __Pyx__PyUnicode_AsDouble_Copy(const void* data, const int kind, char* buffer, Py_ssize_t start, Py_ssize_t end) { + int last_was_punctuation; + Py_ssize_t i; + last_was_punctuation = 1; + for (i=start; i <= end; i++) { + Py_UCS4 chr = PyUnicode_READ(kind, data, i); + int is_punctuation = (chr == '_') | (chr == '.'); + *buffer = (char)chr; + buffer += (chr != '_'); + if (unlikely(chr > 127)) goto parse_failure; + if (unlikely(last_was_punctuation & is_punctuation)) goto parse_failure; + last_was_punctuation = is_punctuation; + } + if (unlikely(last_was_punctuation)) goto parse_failure; + *buffer = '\0'; + return buffer; +parse_failure: + return NULL; +} +static double __Pyx__PyUnicode_AsDouble_inf_nan(const void* data, int kind, Py_ssize_t start, Py_ssize_t length) { + int matches = 1; + Py_UCS4 chr; + Py_UCS4 sign = PyUnicode_READ(kind, data, start); + int is_signed = (sign == '-') | (sign == '+'); + start += is_signed; + length -= is_signed; + switch (PyUnicode_READ(kind, data, start)) { + #ifdef Py_NAN + case 'n': + case 'N': + if (unlikely(length != 3)) goto parse_failure; + chr = PyUnicode_READ(kind, data, start+1); + matches &= (chr == 'a') | (chr == 'A'); + chr = PyUnicode_READ(kind, data, start+2); + matches &= (chr == 'n') | (chr == 'N'); + if (unlikely(!matches)) goto parse_failure; + return (sign == '-') ? -Py_NAN : Py_NAN; + #endif + case 'i': + case 'I': + if (unlikely(length < 3)) goto parse_failure; + chr = PyUnicode_READ(kind, data, start+1); + matches &= (chr == 'n') | (chr == 'N'); + chr = PyUnicode_READ(kind, data, start+2); + matches &= (chr == 'f') | (chr == 'F'); + if (likely(length == 3 && matches)) + return (sign == '-') ? -Py_HUGE_VAL : Py_HUGE_VAL; + if (unlikely(length != 8)) goto parse_failure; + chr = PyUnicode_READ(kind, data, start+3); + matches &= (chr == 'i') | (chr == 'I'); + chr = PyUnicode_READ(kind, data, start+4); + matches &= (chr == 'n') | (chr == 'N'); + chr = PyUnicode_READ(kind, data, start+5); + matches &= (chr == 'i') | (chr == 'I'); + chr = PyUnicode_READ(kind, data, start+6); + matches &= (chr == 't') | (chr == 'T'); + chr = PyUnicode_READ(kind, data, start+7); + matches &= (chr == 'y') | (chr == 'Y'); + if (unlikely(!matches)) goto parse_failure; + return (sign == '-') ? -Py_HUGE_VAL : Py_HUGE_VAL; + case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': + break; + default: + goto parse_failure; + } + return 0.0; +parse_failure: + return -1.0; +} +static double __Pyx_PyUnicode_AsDouble_WithSpaces(PyObject *obj) { + double value; + const char *last; + char *end; + Py_ssize_t start, length = PyUnicode_GET_LENGTH(obj); + const int kind = PyUnicode_KIND(obj); + const void* data = PyUnicode_DATA(obj); + start = 0; + while (Py_UNICODE_ISSPACE(PyUnicode_READ(kind, data, start))) + start++; + while (start < length - 1 && Py_UNICODE_ISSPACE(PyUnicode_READ(kind, data, length - 1))) + length--; + length -= start; + if (unlikely(length <= 0)) goto fallback; + value = __Pyx__PyUnicode_AsDouble_inf_nan(data, kind, start, length); + if (unlikely(value == -1.0)) goto fallback; + if (value != 0.0) return value; + if (length < 40) { + char number[40]; + last = __Pyx__PyUnicode_AsDouble_Copy(data, kind, number, start, start + length); + if (unlikely(!last)) goto fallback; + value = PyOS_string_to_double(number, &end, NULL); + } else { + char *number = (char*) PyMem_Malloc((length + 1) * sizeof(char)); + if (unlikely(!number)) goto fallback; + last = __Pyx__PyUnicode_AsDouble_Copy(data, kind, number, start, start + length); + if (unlikely(!last)) { + PyMem_Free(number); + goto fallback; + } + value = PyOS_string_to_double(number, &end, NULL); + PyMem_Free(number); + } + if (likely(end == last) || (value == (double)-1 && PyErr_Occurred())) { + return value; + } +fallback: + return __Pyx_SlowPyString_AsDouble(obj); +} +#endif +static CYTHON_INLINE double __Pyx_PyUnicode_AsDouble(PyObject *obj) { +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY && CYTHON_ASSUME_SAFE_MACROS + if (unlikely(__Pyx_PyUnicode_READY(obj) == -1)) + return (double)-1; + if (likely(PyUnicode_IS_ASCII(obj))) { + const char *s; + Py_ssize_t length; + s = PyUnicode_AsUTF8AndSize(obj, &length); + return __Pyx__PyBytes_AsDouble(obj, s, length); + } + return __Pyx_PyUnicode_AsDouble_WithSpaces(obj); +#else + return __Pyx_SlowPyString_AsDouble(obj); +#endif +} + +/* pynumber_float.proto */ +static CYTHON_INLINE PyObject* __Pyx__PyNumber_Float(PyObject* obj); +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : __Pyx__PyNumber_Float(x)) + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#if PY_VERSION_HEX >= 0x030C00A6 +#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) +#else +#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) +#endif +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) +#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) do {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#if !CYTHON_VECTORCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if !CYTHON_VECTORCALL +#if PY_VERSION_HEX >= 0x03080000 + #include "frameobject.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif + #define __Pxy_PyFrame_Initialize_Offsets() + #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) +#else + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif +#endif +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectFastCall.proto */ +#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* PyObjectFormatSimple.proto */ +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + PyObject_Format(s, f)) +#elif PY_MAJOR_VERSION < 3 + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + likely(PyString_CheckExact(s)) ? PyUnicode_FromEncodedObject(s, NULL, "strict") :\ + PyObject_Format(s, f)) +#elif CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + likely(PyLong_CheckExact(s)) ? PyLong_Type.tp_repr(s) :\ + likely(PyFloat_CheckExact(s)) ? PyFloat_Type.tp_repr(s) :\ + PyObject_Format(s, f)) +#else + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + PyObject_Format(s, f)) +#endif + +/* JoinPyUnicode.proto */ +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char); + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* RaiseUnexpectedTypeError.proto */ +static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* IncludeStructmemberH.proto */ +#include + +/* FixUpExtensionType.proto */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); +#endif + +/* PyObjectCallNoArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod0.proto */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); + +/* ValidateBasesTuple.proto */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); +#endif + +/* PyType_Ready.proto */ +CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetupReduce.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_setup_reduce(PyObject* type_obj); +#endif + +/* FetchSharedCythonModule.proto */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void); + +/* FetchCommonType.proto */ +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); +#else +static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); +#endif + +/* PyMethodNew.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + typesModule = PyImport_ImportModule("types"); + if (!typesModule) return NULL; + methodType = PyObject_GetAttrString(typesModule, "MethodType"); + Py_DECREF(typesModule); + if (!methodType) return NULL; + result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); + Py_DECREF(methodType); + return result; +} +#elif PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + return PyMethod_New(func, self); +} +#else + #define __Pyx_PyMethod_New PyMethod_New +#endif + +/* PyVectorcallFastCallDict.proto */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); +#endif + +/* CythonFunctionShared.proto */ +#define __Pyx_CyFunction_USED +#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 +#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 +#define __Pyx_CYFUNCTION_CCLASS 0x04 +#define __Pyx_CYFUNCTION_COROUTINE 0x08 +#define __Pyx_CyFunction_GetClosure(f)\ + (((__pyx_CyFunctionObject *) (f))->func_closure) +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_CyFunction_GetClassObj(f)\ + (((__pyx_CyFunctionObject *) (f))->func_classobj) +#else + #define __Pyx_CyFunction_GetClassObj(f)\ + ((PyObject*) ((PyCMethodObject *) (f))->mm_class) +#endif +#define __Pyx_CyFunction_SetClassObj(f, classobj)\ + __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) +#define __Pyx_CyFunction_Defaults(type, f)\ + ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) +#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ + ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) +typedef struct { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject_HEAD + PyObject *func; +#elif PY_VERSION_HEX < 0x030900B1 + PyCFunctionObject func; +#else + PyCMethodObject func; +#endif +#if CYTHON_BACKPORT_VECTORCALL + __pyx_vectorcallfunc func_vectorcall; +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_weakreflist; +#endif + PyObject *func_dict; + PyObject *func_name; + PyObject *func_qualname; + PyObject *func_doc; + PyObject *func_globals; + PyObject *func_code; + PyObject *func_closure; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_classobj; +#endif + void *defaults; + int defaults_pyobjects; + size_t defaults_size; + int flags; + PyObject *defaults_tuple; + PyObject *defaults_kwdict; + PyObject *(*defaults_getter)(PyObject *); + PyObject *func_annotations; + PyObject *func_is_coroutine; +} __pyx_CyFunctionObject; +#undef __Pyx_CyOrPyCFunction_Check +#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) +#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) +#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); +#undef __Pyx_IsSameCFunction +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, + size_t size, + int pyobjects); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, + PyObject *tuple); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, + PyObject *dict); +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, + PyObject *dict); +static int __pyx_CyFunction_init(PyObject *module); +#if CYTHON_METH_FASTCALL +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +#if CYTHON_BACKPORT_VECTORCALL +#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) +#else +#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) +#endif +#endif + +/* CythonFunction.proto */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); +#endif + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* FormatTypeName.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +typedef PyObject *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%U" +static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); +#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) +#else +typedef const char *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%.200s" +#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) +#define __Pyx_DECREF_TypeName(obj) +#endif + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static unsigned long __Pyx_get_runtime_version(void); +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +/* #### Code section: module_declarations ### */ + +/* Module declarations from "cython" */ + +/* Module declarations from "sqlalchemy.engine._processors_cy" */ +static PyObject *__pyx_f_10sqlalchemy_6engine_14_processors_cy___pyx_unpickle_to_decimal_processor_factory__set_state(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *, PyObject *); /*proto*/ +/* #### Code section: typeinfo ### */ +/* #### Code section: before_global_var ### */ +#define __Pyx_MODULE_NAME "sqlalchemy.engine._processors_cy" +extern int __pyx_module_is_main_sqlalchemy__engine___processors_cy; +int __pyx_module_is_main_sqlalchemy__engine___processors_cy = 0; + +/* Implementation of "sqlalchemy.engine._processors_cy" */ +/* #### Code section: global_var ### */ +/* #### Code section: string_decls ### */ +static const char __pyx_k_[] = "%."; +static const char __pyx_k_f[] = "f"; +static const char __pyx_k__3[] = "."; +static const char __pyx_k_gc[] = "gc"; +static const char __pyx_k_Any[] = "Any"; +static const char __pyx_k__19[] = "?"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_bool[] = "bool"; +static const char __pyx_k_date[] = "date"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_self[] = "self"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_time[] = "time"; +static const char __pyx_k_type[] = "type_"; +static const char __pyx_k_scale[] = "scale"; +static const char __pyx_k_slots[] = "__slots__"; +static const char __pyx_k_state[] = "state"; +static const char __pyx_k_value[] = "value"; +static const char __pyx_k_dict_2[] = "_dict"; +static const char __pyx_k_enable[] = "enable"; +static const char __pyx_k_format[] = "format_"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_return[] = "return"; +static const char __pyx_k_to_str[] = "to_str"; +static const char __pyx_k_typing[] = "typing"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_disable[] = "disable"; +static const char __pyx_k_Optional[] = "Optional"; +static const char __pyx_k_date_cls[] = "date_cls"; +static const char __pyx_k_datetime[] = "datetime"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_time_cls[] = "time_cls"; +static const char __pyx_k_to_float[] = "to_float"; +static const char __pyx_k_isenabled[] = "isenabled"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_pyx_result[] = "__pyx_result"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_is_compiled[] = "_is_compiled"; +static const char __pyx_k_str_to_date[] = "str_to_date"; +static const char __pyx_k_str_to_time[] = "str_to_time"; +static const char __pyx_k_Optional_str[] = "Optional[str]"; +static const char __pyx_k_datetime_cls[] = "datetime_cls"; +static const char __pyx_k_is_coroutine[] = "_is_coroutine"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_stringsource[] = ""; +static const char __pyx_k_use_setstate[] = "use_setstate"; +static const char __pyx_k_Optional_bool[] = "Optional[bool]"; +static const char __pyx_k_fromisoformat[] = "fromisoformat"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_Optional_float[] = "Optional[float]"; +static const char __pyx_k_int_to_boolean[] = "int_to_boolean"; +static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_str_to_datetime[] = "str_to_datetime"; +static const char __pyx_k_Optional_date_cls[] = "Optional[date_cls]"; +static const char __pyx_k_Optional_time_cls[] = "Optional[time_cls]"; +static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_Optional_datetime_cls[] = "Optional[datetime_cls]"; +static const char __pyx_k_to_decimal_processor_factory[] = "to_decimal_processor_factory"; +static const char __pyx_k_pyx_unpickle_to_decimal_proces[] = "__pyx_unpickle_to_decimal_processor_factory"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))"; +static const char __pyx_k_lib_sqlalchemy_engine__processor[] = "lib/sqlalchemy/engine/_processors_cy.py"; +static const char __pyx_k_sqlalchemy_engine__processors_cy[] = "sqlalchemy.engine._processors_cy"; +static const char __pyx_k_to_decimal_processor_factory___r[] = "to_decimal_processor_factory.__reduce_cython__"; +static const char __pyx_k_to_decimal_processor_factory___s[] = "to_decimal_processor_factory.__setstate_cython__"; +/* #### Code section: decls ### */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_2int_to_boolean(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_4to_str(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_6to_float(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_8str_to_datetime(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_10str_to_time(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_12str_to_date(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory___init__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyTypeObject *__pyx_v_type_, PyObject *__pyx_v_scale); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_2__call__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_4__reduce_cython__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_6__setstate_cython__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_14__pyx_unpickle_to_decimal_processor_factory(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_tp_new_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +/* #### Code section: late_includes ### */ +/* #### Code section: module_state ### */ +typedef struct { + PyObject *__pyx_d; + PyObject *__pyx_b; + PyObject *__pyx_cython_runtime; + PyObject *__pyx_empty_tuple; + PyObject *__pyx_empty_bytes; + PyObject *__pyx_empty_unicode; + #ifdef __Pyx_CyFunction_USED + PyTypeObject *__pyx_CyFunctionType; + #endif + #ifdef __Pyx_FusedFunction_USED + PyTypeObject *__pyx_FusedFunctionType; + #endif + #ifdef __Pyx_Generator_USED + PyTypeObject *__pyx_GeneratorType; + #endif + #ifdef __Pyx_IterableCoroutine_USED + PyTypeObject *__pyx_IterableCoroutineType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineAwaitType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineType; + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + PyObject *__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory; + #endif + PyTypeObject *__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory; + PyObject *__pyx_kp_u_; + PyObject *__pyx_n_s_Any; + PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; + PyObject *__pyx_n_s_Optional; + PyObject *__pyx_kp_s_Optional_bool; + PyObject *__pyx_kp_s_Optional_date_cls; + PyObject *__pyx_kp_s_Optional_datetime_cls; + PyObject *__pyx_kp_s_Optional_float; + PyObject *__pyx_kp_s_Optional_str; + PyObject *__pyx_kp_s_Optional_time_cls; + PyObject *__pyx_n_s_PickleError; + PyObject *__pyx_n_s__19; + PyObject *__pyx_kp_u__3; + PyObject *__pyx_n_s_asyncio_coroutines; + PyObject *__pyx_n_s_bool; + PyObject *__pyx_n_s_cline_in_traceback; + PyObject *__pyx_n_s_date; + PyObject *__pyx_n_s_date_cls; + PyObject *__pyx_n_s_datetime; + PyObject *__pyx_n_s_datetime_cls; + PyObject *__pyx_n_s_dict; + PyObject *__pyx_n_s_dict_2; + PyObject *__pyx_kp_u_disable; + PyObject *__pyx_kp_u_enable; + PyObject *__pyx_n_u_f; + PyObject *__pyx_n_u_format; + PyObject *__pyx_n_s_fromisoformat; + PyObject *__pyx_kp_u_gc; + PyObject *__pyx_n_s_getstate; + PyObject *__pyx_n_s_import; + PyObject *__pyx_n_s_int_to_boolean; + PyObject *__pyx_n_s_is_compiled; + PyObject *__pyx_n_s_is_coroutine; + PyObject *__pyx_kp_u_isenabled; + PyObject *__pyx_kp_s_lib_sqlalchemy_engine__processor; + PyObject *__pyx_n_s_main; + PyObject *__pyx_n_s_name; + PyObject *__pyx_n_s_new; + PyObject *__pyx_n_s_pickle; + PyObject *__pyx_n_s_pyx_PickleError; + PyObject *__pyx_n_s_pyx_checksum; + PyObject *__pyx_n_s_pyx_result; + PyObject *__pyx_n_s_pyx_state; + PyObject *__pyx_n_s_pyx_type; + PyObject *__pyx_n_s_pyx_unpickle_to_decimal_proces; + PyObject *__pyx_n_s_reduce; + PyObject *__pyx_n_s_reduce_cython; + PyObject *__pyx_n_s_reduce_ex; + PyObject *__pyx_n_s_return; + PyObject *__pyx_n_s_scale; + PyObject *__pyx_n_s_self; + PyObject *__pyx_n_s_setstate; + PyObject *__pyx_n_s_setstate_cython; + PyObject *__pyx_n_s_slots; + PyObject *__pyx_n_s_sqlalchemy_engine__processors_cy; + PyObject *__pyx_n_s_state; + PyObject *__pyx_n_s_str_to_date; + PyObject *__pyx_n_s_str_to_datetime; + PyObject *__pyx_n_s_str_to_time; + PyObject *__pyx_kp_s_stringsource; + PyObject *__pyx_n_s_test; + PyObject *__pyx_n_s_time; + PyObject *__pyx_n_s_time_cls; + PyObject *__pyx_n_s_to_decimal_processor_factory; + PyObject *__pyx_n_s_to_decimal_processor_factory___r; + PyObject *__pyx_n_s_to_decimal_processor_factory___s; + PyObject *__pyx_n_s_to_float; + PyObject *__pyx_n_s_to_str; + PyObject *__pyx_n_s_type; + PyObject *__pyx_n_u_type; + PyObject *__pyx_n_s_typing; + PyObject *__pyx_n_s_update; + PyObject *__pyx_n_s_use_setstate; + PyObject *__pyx_n_s_value; + PyObject *__pyx_int_61481721; + PyObject *__pyx_int_190945570; + PyObject *__pyx_int_248337392; + PyObject *__pyx_tuple__2; + PyObject *__pyx_tuple__5; + PyObject *__pyx_tuple__12; + PyObject *__pyx_tuple__13; + PyObject *__pyx_tuple__15; + PyObject *__pyx_tuple__17; + PyObject *__pyx_codeobj__4; + PyObject *__pyx_codeobj__6; + PyObject *__pyx_codeobj__7; + PyObject *__pyx_codeobj__8; + PyObject *__pyx_codeobj__9; + PyObject *__pyx_codeobj__10; + PyObject *__pyx_codeobj__11; + PyObject *__pyx_codeobj__14; + PyObject *__pyx_codeobj__16; + PyObject *__pyx_codeobj__18; +} __pyx_mstate; + +#if CYTHON_USE_MODULE_STATE +#ifdef __cplusplus +namespace { + extern struct PyModuleDef __pyx_moduledef; +} /* anonymous namespace */ +#else +static struct PyModuleDef __pyx_moduledef; +#endif + +#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) + +#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) + +#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) +#else +static __pyx_mstate __pyx_mstate_global_static = +#ifdef __cplusplus + {}; +#else + {0}; +#endif +static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; +#endif +/* #### Code section: module_state_clear ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_clear(PyObject *m) { + __pyx_mstate *clear_module_state = __pyx_mstate(m); + if (!clear_module_state) return 0; + Py_CLEAR(clear_module_state->__pyx_d); + Py_CLEAR(clear_module_state->__pyx_b); + Py_CLEAR(clear_module_state->__pyx_cython_runtime); + Py_CLEAR(clear_module_state->__pyx_empty_tuple); + Py_CLEAR(clear_module_state->__pyx_empty_bytes); + Py_CLEAR(clear_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_CLEAR(clear_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); + #endif + Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); + Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); + Py_CLEAR(clear_module_state->__pyx_kp_u_); + Py_CLEAR(clear_module_state->__pyx_n_s_Any); + Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); + Py_CLEAR(clear_module_state->__pyx_n_s_Optional); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_bool); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_date_cls); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_datetime_cls); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_float); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_str); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_time_cls); + Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); + Py_CLEAR(clear_module_state->__pyx_n_s__19); + Py_CLEAR(clear_module_state->__pyx_kp_u__3); + Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); + Py_CLEAR(clear_module_state->__pyx_n_s_bool); + Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); + Py_CLEAR(clear_module_state->__pyx_n_s_date); + Py_CLEAR(clear_module_state->__pyx_n_s_date_cls); + Py_CLEAR(clear_module_state->__pyx_n_s_datetime); + Py_CLEAR(clear_module_state->__pyx_n_s_datetime_cls); + Py_CLEAR(clear_module_state->__pyx_n_s_dict); + Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); + Py_CLEAR(clear_module_state->__pyx_kp_u_disable); + Py_CLEAR(clear_module_state->__pyx_kp_u_enable); + Py_CLEAR(clear_module_state->__pyx_n_u_f); + Py_CLEAR(clear_module_state->__pyx_n_u_format); + Py_CLEAR(clear_module_state->__pyx_n_s_fromisoformat); + Py_CLEAR(clear_module_state->__pyx_kp_u_gc); + Py_CLEAR(clear_module_state->__pyx_n_s_getstate); + Py_CLEAR(clear_module_state->__pyx_n_s_import); + Py_CLEAR(clear_module_state->__pyx_n_s_int_to_boolean); + Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); + Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); + Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); + Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_engine__processor); + Py_CLEAR(clear_module_state->__pyx_n_s_main); + Py_CLEAR(clear_module_state->__pyx_n_s_name); + Py_CLEAR(clear_module_state->__pyx_n_s_new); + Py_CLEAR(clear_module_state->__pyx_n_s_pickle); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_result); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_state); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_type); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_to_decimal_proces); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce_ex); + Py_CLEAR(clear_module_state->__pyx_n_s_return); + Py_CLEAR(clear_module_state->__pyx_n_s_scale); + Py_CLEAR(clear_module_state->__pyx_n_s_self); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_slots); + Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_engine__processors_cy); + Py_CLEAR(clear_module_state->__pyx_n_s_state); + Py_CLEAR(clear_module_state->__pyx_n_s_str_to_date); + Py_CLEAR(clear_module_state->__pyx_n_s_str_to_datetime); + Py_CLEAR(clear_module_state->__pyx_n_s_str_to_time); + Py_CLEAR(clear_module_state->__pyx_kp_s_stringsource); + Py_CLEAR(clear_module_state->__pyx_n_s_test); + Py_CLEAR(clear_module_state->__pyx_n_s_time); + Py_CLEAR(clear_module_state->__pyx_n_s_time_cls); + Py_CLEAR(clear_module_state->__pyx_n_s_to_decimal_processor_factory); + Py_CLEAR(clear_module_state->__pyx_n_s_to_decimal_processor_factory___r); + Py_CLEAR(clear_module_state->__pyx_n_s_to_decimal_processor_factory___s); + Py_CLEAR(clear_module_state->__pyx_n_s_to_float); + Py_CLEAR(clear_module_state->__pyx_n_s_to_str); + Py_CLEAR(clear_module_state->__pyx_n_s_type); + Py_CLEAR(clear_module_state->__pyx_n_u_type); + Py_CLEAR(clear_module_state->__pyx_n_s_typing); + Py_CLEAR(clear_module_state->__pyx_n_s_update); + Py_CLEAR(clear_module_state->__pyx_n_s_use_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_value); + Py_CLEAR(clear_module_state->__pyx_int_61481721); + Py_CLEAR(clear_module_state->__pyx_int_190945570); + Py_CLEAR(clear_module_state->__pyx_int_248337392); + Py_CLEAR(clear_module_state->__pyx_tuple__2); + Py_CLEAR(clear_module_state->__pyx_tuple__5); + Py_CLEAR(clear_module_state->__pyx_tuple__12); + Py_CLEAR(clear_module_state->__pyx_tuple__13); + Py_CLEAR(clear_module_state->__pyx_tuple__15); + Py_CLEAR(clear_module_state->__pyx_tuple__17); + Py_CLEAR(clear_module_state->__pyx_codeobj__4); + Py_CLEAR(clear_module_state->__pyx_codeobj__6); + Py_CLEAR(clear_module_state->__pyx_codeobj__7); + Py_CLEAR(clear_module_state->__pyx_codeobj__8); + Py_CLEAR(clear_module_state->__pyx_codeobj__9); + Py_CLEAR(clear_module_state->__pyx_codeobj__10); + Py_CLEAR(clear_module_state->__pyx_codeobj__11); + Py_CLEAR(clear_module_state->__pyx_codeobj__14); + Py_CLEAR(clear_module_state->__pyx_codeobj__16); + Py_CLEAR(clear_module_state->__pyx_codeobj__18); + return 0; +} +#endif +/* #### Code section: module_state_traverse ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { + __pyx_mstate *traverse_module_state = __pyx_mstate(m); + if (!traverse_module_state) return 0; + Py_VISIT(traverse_module_state->__pyx_d); + Py_VISIT(traverse_module_state->__pyx_b); + Py_VISIT(traverse_module_state->__pyx_cython_runtime); + Py_VISIT(traverse_module_state->__pyx_empty_tuple); + Py_VISIT(traverse_module_state->__pyx_empty_bytes); + Py_VISIT(traverse_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_VISIT(traverse_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); + #endif + Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); + Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); + Py_VISIT(traverse_module_state->__pyx_kp_u_); + Py_VISIT(traverse_module_state->__pyx_n_s_Any); + Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); + Py_VISIT(traverse_module_state->__pyx_n_s_Optional); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_bool); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_date_cls); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_datetime_cls); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_float); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_str); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_time_cls); + Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); + Py_VISIT(traverse_module_state->__pyx_n_s__19); + Py_VISIT(traverse_module_state->__pyx_kp_u__3); + Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); + Py_VISIT(traverse_module_state->__pyx_n_s_bool); + Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); + Py_VISIT(traverse_module_state->__pyx_n_s_date); + Py_VISIT(traverse_module_state->__pyx_n_s_date_cls); + Py_VISIT(traverse_module_state->__pyx_n_s_datetime); + Py_VISIT(traverse_module_state->__pyx_n_s_datetime_cls); + Py_VISIT(traverse_module_state->__pyx_n_s_dict); + Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); + Py_VISIT(traverse_module_state->__pyx_kp_u_disable); + Py_VISIT(traverse_module_state->__pyx_kp_u_enable); + Py_VISIT(traverse_module_state->__pyx_n_u_f); + Py_VISIT(traverse_module_state->__pyx_n_u_format); + Py_VISIT(traverse_module_state->__pyx_n_s_fromisoformat); + Py_VISIT(traverse_module_state->__pyx_kp_u_gc); + Py_VISIT(traverse_module_state->__pyx_n_s_getstate); + Py_VISIT(traverse_module_state->__pyx_n_s_import); + Py_VISIT(traverse_module_state->__pyx_n_s_int_to_boolean); + Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); + Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); + Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); + Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_engine__processor); + Py_VISIT(traverse_module_state->__pyx_n_s_main); + Py_VISIT(traverse_module_state->__pyx_n_s_name); + Py_VISIT(traverse_module_state->__pyx_n_s_new); + Py_VISIT(traverse_module_state->__pyx_n_s_pickle); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_result); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_state); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_type); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_to_decimal_proces); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce_ex); + Py_VISIT(traverse_module_state->__pyx_n_s_return); + Py_VISIT(traverse_module_state->__pyx_n_s_scale); + Py_VISIT(traverse_module_state->__pyx_n_s_self); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_slots); + Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_engine__processors_cy); + Py_VISIT(traverse_module_state->__pyx_n_s_state); + Py_VISIT(traverse_module_state->__pyx_n_s_str_to_date); + Py_VISIT(traverse_module_state->__pyx_n_s_str_to_datetime); + Py_VISIT(traverse_module_state->__pyx_n_s_str_to_time); + Py_VISIT(traverse_module_state->__pyx_kp_s_stringsource); + Py_VISIT(traverse_module_state->__pyx_n_s_test); + Py_VISIT(traverse_module_state->__pyx_n_s_time); + Py_VISIT(traverse_module_state->__pyx_n_s_time_cls); + Py_VISIT(traverse_module_state->__pyx_n_s_to_decimal_processor_factory); + Py_VISIT(traverse_module_state->__pyx_n_s_to_decimal_processor_factory___r); + Py_VISIT(traverse_module_state->__pyx_n_s_to_decimal_processor_factory___s); + Py_VISIT(traverse_module_state->__pyx_n_s_to_float); + Py_VISIT(traverse_module_state->__pyx_n_s_to_str); + Py_VISIT(traverse_module_state->__pyx_n_s_type); + Py_VISIT(traverse_module_state->__pyx_n_u_type); + Py_VISIT(traverse_module_state->__pyx_n_s_typing); + Py_VISIT(traverse_module_state->__pyx_n_s_update); + Py_VISIT(traverse_module_state->__pyx_n_s_use_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_value); + Py_VISIT(traverse_module_state->__pyx_int_61481721); + Py_VISIT(traverse_module_state->__pyx_int_190945570); + Py_VISIT(traverse_module_state->__pyx_int_248337392); + Py_VISIT(traverse_module_state->__pyx_tuple__2); + Py_VISIT(traverse_module_state->__pyx_tuple__5); + Py_VISIT(traverse_module_state->__pyx_tuple__12); + Py_VISIT(traverse_module_state->__pyx_tuple__13); + Py_VISIT(traverse_module_state->__pyx_tuple__15); + Py_VISIT(traverse_module_state->__pyx_tuple__17); + Py_VISIT(traverse_module_state->__pyx_codeobj__4); + Py_VISIT(traverse_module_state->__pyx_codeobj__6); + Py_VISIT(traverse_module_state->__pyx_codeobj__7); + Py_VISIT(traverse_module_state->__pyx_codeobj__8); + Py_VISIT(traverse_module_state->__pyx_codeobj__9); + Py_VISIT(traverse_module_state->__pyx_codeobj__10); + Py_VISIT(traverse_module_state->__pyx_codeobj__11); + Py_VISIT(traverse_module_state->__pyx_codeobj__14); + Py_VISIT(traverse_module_state->__pyx_codeobj__16); + Py_VISIT(traverse_module_state->__pyx_codeobj__18); + return 0; +} +#endif +/* #### Code section: module_state_defines ### */ +#define __pyx_d __pyx_mstate_global->__pyx_d +#define __pyx_b __pyx_mstate_global->__pyx_b +#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime +#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple +#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes +#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode +#ifdef __Pyx_CyFunction_USED +#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType +#endif +#ifdef __Pyx_FusedFunction_USED +#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType +#endif +#ifdef __Pyx_Generator_USED +#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType +#endif +#ifdef __Pyx_IterableCoroutine_USED +#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#define __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory __pyx_mstate_global->__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory +#endif +#define __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory __pyx_mstate_global->__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory +#define __pyx_kp_u_ __pyx_mstate_global->__pyx_kp_u_ +#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any +#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 +#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional +#define __pyx_kp_s_Optional_bool __pyx_mstate_global->__pyx_kp_s_Optional_bool +#define __pyx_kp_s_Optional_date_cls __pyx_mstate_global->__pyx_kp_s_Optional_date_cls +#define __pyx_kp_s_Optional_datetime_cls __pyx_mstate_global->__pyx_kp_s_Optional_datetime_cls +#define __pyx_kp_s_Optional_float __pyx_mstate_global->__pyx_kp_s_Optional_float +#define __pyx_kp_s_Optional_str __pyx_mstate_global->__pyx_kp_s_Optional_str +#define __pyx_kp_s_Optional_time_cls __pyx_mstate_global->__pyx_kp_s_Optional_time_cls +#define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError +#define __pyx_n_s__19 __pyx_mstate_global->__pyx_n_s__19 +#define __pyx_kp_u__3 __pyx_mstate_global->__pyx_kp_u__3 +#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines +#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool +#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback +#define __pyx_n_s_date __pyx_mstate_global->__pyx_n_s_date +#define __pyx_n_s_date_cls __pyx_mstate_global->__pyx_n_s_date_cls +#define __pyx_n_s_datetime __pyx_mstate_global->__pyx_n_s_datetime +#define __pyx_n_s_datetime_cls __pyx_mstate_global->__pyx_n_s_datetime_cls +#define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict +#define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 +#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable +#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable +#define __pyx_n_u_f __pyx_mstate_global->__pyx_n_u_f +#define __pyx_n_u_format __pyx_mstate_global->__pyx_n_u_format +#define __pyx_n_s_fromisoformat __pyx_mstate_global->__pyx_n_s_fromisoformat +#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc +#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate +#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import +#define __pyx_n_s_int_to_boolean __pyx_mstate_global->__pyx_n_s_int_to_boolean +#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled +#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine +#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled +#define __pyx_kp_s_lib_sqlalchemy_engine__processor __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_engine__processor +#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main +#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name +#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new +#define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle +#define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError +#define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum +#define __pyx_n_s_pyx_result __pyx_mstate_global->__pyx_n_s_pyx_result +#define __pyx_n_s_pyx_state __pyx_mstate_global->__pyx_n_s_pyx_state +#define __pyx_n_s_pyx_type __pyx_mstate_global->__pyx_n_s_pyx_type +#define __pyx_n_s_pyx_unpickle_to_decimal_proces __pyx_mstate_global->__pyx_n_s_pyx_unpickle_to_decimal_proces +#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce +#define __pyx_n_s_reduce_cython __pyx_mstate_global->__pyx_n_s_reduce_cython +#define __pyx_n_s_reduce_ex __pyx_mstate_global->__pyx_n_s_reduce_ex +#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return +#define __pyx_n_s_scale __pyx_mstate_global->__pyx_n_s_scale +#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self +#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate +#define __pyx_n_s_setstate_cython __pyx_mstate_global->__pyx_n_s_setstate_cython +#define __pyx_n_s_slots __pyx_mstate_global->__pyx_n_s_slots +#define __pyx_n_s_sqlalchemy_engine__processors_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_engine__processors_cy +#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state +#define __pyx_n_s_str_to_date __pyx_mstate_global->__pyx_n_s_str_to_date +#define __pyx_n_s_str_to_datetime __pyx_mstate_global->__pyx_n_s_str_to_datetime +#define __pyx_n_s_str_to_time __pyx_mstate_global->__pyx_n_s_str_to_time +#define __pyx_kp_s_stringsource __pyx_mstate_global->__pyx_kp_s_stringsource +#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test +#define __pyx_n_s_time __pyx_mstate_global->__pyx_n_s_time +#define __pyx_n_s_time_cls __pyx_mstate_global->__pyx_n_s_time_cls +#define __pyx_n_s_to_decimal_processor_factory __pyx_mstate_global->__pyx_n_s_to_decimal_processor_factory +#define __pyx_n_s_to_decimal_processor_factory___r __pyx_mstate_global->__pyx_n_s_to_decimal_processor_factory___r +#define __pyx_n_s_to_decimal_processor_factory___s __pyx_mstate_global->__pyx_n_s_to_decimal_processor_factory___s +#define __pyx_n_s_to_float __pyx_mstate_global->__pyx_n_s_to_float +#define __pyx_n_s_to_str __pyx_mstate_global->__pyx_n_s_to_str +#define __pyx_n_s_type __pyx_mstate_global->__pyx_n_s_type +#define __pyx_n_u_type __pyx_mstate_global->__pyx_n_u_type +#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing +#define __pyx_n_s_update __pyx_mstate_global->__pyx_n_s_update +#define __pyx_n_s_use_setstate __pyx_mstate_global->__pyx_n_s_use_setstate +#define __pyx_n_s_value __pyx_mstate_global->__pyx_n_s_value +#define __pyx_int_61481721 __pyx_mstate_global->__pyx_int_61481721 +#define __pyx_int_190945570 __pyx_mstate_global->__pyx_int_190945570 +#define __pyx_int_248337392 __pyx_mstate_global->__pyx_int_248337392 +#define __pyx_tuple__2 __pyx_mstate_global->__pyx_tuple__2 +#define __pyx_tuple__5 __pyx_mstate_global->__pyx_tuple__5 +#define __pyx_tuple__12 __pyx_mstate_global->__pyx_tuple__12 +#define __pyx_tuple__13 __pyx_mstate_global->__pyx_tuple__13 +#define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 +#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 +#define __pyx_codeobj__4 __pyx_mstate_global->__pyx_codeobj__4 +#define __pyx_codeobj__6 __pyx_mstate_global->__pyx_codeobj__6 +#define __pyx_codeobj__7 __pyx_mstate_global->__pyx_codeobj__7 +#define __pyx_codeobj__8 __pyx_mstate_global->__pyx_codeobj__8 +#define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 +#define __pyx_codeobj__10 __pyx_mstate_global->__pyx_codeobj__10 +#define __pyx_codeobj__11 __pyx_mstate_global->__pyx_codeobj__11 +#define __pyx_codeobj__14 __pyx_mstate_global->__pyx_codeobj__14 +#define __pyx_codeobj__16 __pyx_mstate_global->__pyx_codeobj__16 +#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 +/* #### Code section: module_code ### */ + +/* "sqlalchemy/engine/_processors_cy.py":27 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +PyDoc_STRVAR(__pyx_doc_10sqlalchemy_6engine_14_processors_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_6engine_14_processors_cy__is_compiled}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy__is_compiled(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled", 1); + + /* "sqlalchemy/engine/_processors_cy.py":29 + * def _is_compiled() -> bool: + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":27 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_processors_cy.py":35 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def int_to_boolean(value: Any) -> Optional[bool]: + * if value is None: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean = {"int_to_boolean", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_value = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("int_to_boolean (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 35, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "int_to_boolean") < 0)) __PYX_ERR(0, 35, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_value = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("int_to_boolean", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 35, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.int_to_boolean", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_2int_to_boolean(__pyx_self, __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_2int_to_boolean(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("int_to_boolean", 1); + + /* "sqlalchemy/engine/_processors_cy.py":37 + * @cython.annotation_typing(False) + * def int_to_boolean(value: Any) -> Optional[bool]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return True if value else False + */ + __pyx_t_1 = (__pyx_v_value == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_processors_cy.py":38 + * def int_to_boolean(value: Any) -> Optional[bool]: + * if value is None: + * return None # <<<<<<<<<<<<<< + * return True if value else False + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":37 + * @cython.annotation_typing(False) + * def int_to_boolean(value: Any) -> Optional[bool]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return True if value else False + */ + } + + /* "sqlalchemy/engine/_processors_cy.py":39 + * if value is None: + * return None + * return True if value else False # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 39, __pyx_L1_error) + if (__pyx_t_1) { + __Pyx_INCREF(Py_True); + __pyx_t_2 = Py_True; + } else { + __Pyx_INCREF(Py_False); + __pyx_t_2 = Py_False; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":35 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def int_to_boolean(value: Any) -> Optional[bool]: + * if value is None: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.int_to_boolean", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_processors_cy.py":42 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def to_str(value: Any) -> Optional[str]: + * if value is None: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_5to_str(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_5to_str = {"to_str", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_5to_str, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_5to_str(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_value = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("to_str (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 42, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "to_str") < 0)) __PYX_ERR(0, 42, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_value = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("to_str", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 42, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_str", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_4to_str(__pyx_self, __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_4to_str(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("to_str", 1); + + /* "sqlalchemy/engine/_processors_cy.py":44 + * @cython.annotation_typing(False) + * def to_str(value: Any) -> Optional[str]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return str(value) + */ + __pyx_t_1 = (__pyx_v_value == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_processors_cy.py":45 + * def to_str(value: Any) -> Optional[str]: + * if value is None: + * return None # <<<<<<<<<<<<<< + * return str(value) + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":44 + * @cython.annotation_typing(False) + * def to_str(value: Any) -> Optional[str]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return str(value) + */ + } + + /* "sqlalchemy/engine/_processors_cy.py":46 + * if value is None: + * return None + * return str(value) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_Unicode(__pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":42 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def to_str(value: Any) -> Optional[str]: + * if value is None: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_str", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_processors_cy.py":49 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def to_float(value: Any) -> Optional[float]: + * if value is None: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_7to_float(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_7to_float = {"to_float", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_7to_float, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_7to_float(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_value = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("to_float (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 49, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "to_float") < 0)) __PYX_ERR(0, 49, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_value = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("to_float", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 49, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_float", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_6to_float(__pyx_self, __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_6to_float(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("to_float", 1); + + /* "sqlalchemy/engine/_processors_cy.py":51 + * @cython.annotation_typing(False) + * def to_float(value: Any) -> Optional[float]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return float(value) + */ + __pyx_t_1 = (__pyx_v_value == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_processors_cy.py":52 + * def to_float(value: Any) -> Optional[float]: + * if value is None: + * return None # <<<<<<<<<<<<<< + * return float(value) + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":51 + * @cython.annotation_typing(False) + * def to_float(value: Any) -> Optional[float]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return float(value) + */ + } + + /* "sqlalchemy/engine/_processors_cy.py":53 + * if value is None: + * return None + * return float(value) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyNumber_Float(__pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":49 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def to_float(value: Any) -> Optional[float]: + * if value is None: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_float", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_processors_cy.py":56 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: + * if value is None: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime = {"str_to_datetime", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_value = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("str_to_datetime (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 56, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "str_to_datetime") < 0)) __PYX_ERR(0, 56, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_value = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("str_to_datetime", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 56, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_datetime", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_8str_to_datetime(__pyx_self, __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_8str_to_datetime(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("str_to_datetime", 1); + + /* "sqlalchemy/engine/_processors_cy.py":58 + * @cython.annotation_typing(False) + * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return datetime_cls.fromisoformat(value) + */ + __pyx_t_1 = (__pyx_v_value == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_processors_cy.py":59 + * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: + * if value is None: + * return None # <<<<<<<<<<<<<< + * return datetime_cls.fromisoformat(value) + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":58 + * @cython.annotation_typing(False) + * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return datetime_cls.fromisoformat(value) + */ + } + + /* "sqlalchemy/engine/_processors_cy.py":60 + * if value is None: + * return None + * return datetime_cls.fromisoformat(value) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_datetime_cls); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 60, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_fromisoformat); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 60, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_value}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 60, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":56 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: + * if value is None: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_datetime", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_processors_cy.py":63 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_time(value: Optional[str]) -> Optional[time_cls]: + * if value is None: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_11str_to_time(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_11str_to_time = {"str_to_time", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_11str_to_time, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_11str_to_time(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_value = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("str_to_time (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 63, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "str_to_time") < 0)) __PYX_ERR(0, 63, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_value = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("str_to_time", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 63, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_time", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_10str_to_time(__pyx_self, __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_10str_to_time(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("str_to_time", 1); + + /* "sqlalchemy/engine/_processors_cy.py":65 + * @cython.annotation_typing(False) + * def str_to_time(value: Optional[str]) -> Optional[time_cls]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return time_cls.fromisoformat(value) + */ + __pyx_t_1 = (__pyx_v_value == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_processors_cy.py":66 + * def str_to_time(value: Optional[str]) -> Optional[time_cls]: + * if value is None: + * return None # <<<<<<<<<<<<<< + * return time_cls.fromisoformat(value) + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":65 + * @cython.annotation_typing(False) + * def str_to_time(value: Optional[str]) -> Optional[time_cls]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return time_cls.fromisoformat(value) + */ + } + + /* "sqlalchemy/engine/_processors_cy.py":67 + * if value is None: + * return None + * return time_cls.fromisoformat(value) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_time_cls); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_fromisoformat); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_value}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":63 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_time(value: Optional[str]) -> Optional[time_cls]: + * if value is None: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_time", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_processors_cy.py":70 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_date(value: Optional[str]) -> Optional[date_cls]: + * if value is None: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_13str_to_date(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_13str_to_date = {"str_to_date", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_13str_to_date, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_13str_to_date(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_value = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("str_to_date (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 70, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "str_to_date") < 0)) __PYX_ERR(0, 70, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_value = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("str_to_date", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 70, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_date", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_12str_to_date(__pyx_self, __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_12str_to_date(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("str_to_date", 1); + + /* "sqlalchemy/engine/_processors_cy.py":72 + * @cython.annotation_typing(False) + * def str_to_date(value: Optional[str]) -> Optional[date_cls]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return date_cls.fromisoformat(value) + */ + __pyx_t_1 = (__pyx_v_value == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_processors_cy.py":73 + * def str_to_date(value: Optional[str]) -> Optional[date_cls]: + * if value is None: + * return None # <<<<<<<<<<<<<< + * return date_cls.fromisoformat(value) + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":72 + * @cython.annotation_typing(False) + * def str_to_date(value: Optional[str]) -> Optional[date_cls]: + * if value is None: # <<<<<<<<<<<<<< + * return None + * return date_cls.fromisoformat(value) + */ + } + + /* "sqlalchemy/engine/_processors_cy.py":74 + * if value is None: + * return None + * return date_cls.fromisoformat(value) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_date_cls); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_fromisoformat); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_value}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":70 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_date(value: Optional[str]) -> Optional[date_cls]: + * if value is None: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_date", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_processors_cy.py":84 + * __slots__ = ("type_", "format_") + * + * def __init__(self, type_: type, scale: int): # <<<<<<<<<<<<<< + * self.type_ = type_ + * self.format_ = f"%.{scale}f" + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyTypeObject *__pyx_v_type_ = 0; + PyObject *__pyx_v_scale = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[2] = {0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_type,&__pyx_n_s_scale,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_VARARGS(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_type)) != 0)) { + (void)__Pyx_Arg_NewRef_VARARGS(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 84, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_scale)) != 0)) { + (void)__Pyx_Arg_NewRef_VARARGS(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 84, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 2, 2, 1); __PYX_ERR(0, 84, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 84, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 2)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + values[1] = __Pyx_Arg_VARARGS(__pyx_args, 1); + } + __pyx_v_type_ = ((PyTypeObject*)values[0]); + __pyx_v_scale = ((PyObject*)values[1]); + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 84, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_type_), (&PyType_Type), 0, "type_", 1))) __PYX_ERR(0, 84, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_scale), (&PyInt_Type), 0, "scale", 1))) __PYX_ERR(0, 84, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory___init__(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v_self), __pyx_v_type_, __pyx_v_scale); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory___init__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyTypeObject *__pyx_v_type_, PyObject *__pyx_v_scale) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + Py_UCS4 __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 1); + + /* "sqlalchemy/engine/_processors_cy.py":85 + * + * def __init__(self, type_: type, scale: int): + * self.type_ = type_ # <<<<<<<<<<<<<< + * self.format_ = f"%.{scale}f" + * + */ + __Pyx_INCREF((PyObject *)__pyx_v_type_); + __Pyx_GIVEREF((PyObject *)__pyx_v_type_); + __Pyx_GOTREF((PyObject *)__pyx_v_self->type_); + __Pyx_DECREF((PyObject *)__pyx_v_self->type_); + __pyx_v_self->type_ = __pyx_v_type_; + + /* "sqlalchemy/engine/_processors_cy.py":86 + * def __init__(self, type_: type, scale: int): + * self.type_ = type_ + * self.format_ = f"%.{scale}f" # <<<<<<<<<<<<<< + * + * def __call__(self, value: Optional[Any]) -> object: + */ + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = 0; + __pyx_t_3 = 127; + __Pyx_INCREF(__pyx_kp_u_); + __pyx_t_2 += 2; + __Pyx_GIVEREF(__pyx_kp_u_); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_kp_u_); + __pyx_t_4 = __Pyx_PyObject_FormatSimple(__pyx_v_scale, __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; + __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_4); + __pyx_t_4 = 0; + __Pyx_INCREF(__pyx_n_u_f); + __pyx_t_2 += 1; + __Pyx_GIVEREF(__pyx_n_u_f); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_n_u_f); + __pyx_t_4 = __Pyx_PyUnicode_Join(__pyx_t_1, 3, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GIVEREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_v_self->format_); + __Pyx_DECREF(__pyx_v_self->format_); + __pyx_v_self->format_ = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":84 + * __slots__ = ("type_", "format_") + * + * def __init__(self, type_: type, scale: int): # <<<<<<<<<<<<<< + * self.type_ = type_ + * self.format_ = f"%.{scale}f" + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_processors_cy.py":88 + * self.format_ = f"%.{scale}f" + * + * def __call__(self, value: Optional[Any]) -> object: # <<<<<<<<<<<<<< + * if value is None: + * return None + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_value = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_VARARGS(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 88, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__call__") < 0)) __PYX_ERR(0, 88, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + } + __pyx_v_value = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__call__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 88, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_2__call__(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v_self), __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_2__call__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__call__", 1); + + /* "sqlalchemy/engine/_processors_cy.py":89 + * + * def __call__(self, value: Optional[Any]) -> object: + * if value is None: # <<<<<<<<<<<<<< + * return None + * else: + */ + __pyx_t_1 = (__pyx_v_value == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_processors_cy.py":90 + * def __call__(self, value: Optional[Any]) -> object: + * if value is None: + * return None # <<<<<<<<<<<<<< + * else: + * return self.type_(self.format_ % value) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "sqlalchemy/engine/_processors_cy.py":89 + * + * def __call__(self, value: Optional[Any]) -> object: + * if value is None: # <<<<<<<<<<<<<< + * return None + * else: + */ + } + + /* "sqlalchemy/engine/_processors_cy.py":92 + * return None + * else: + * return self.type_(self.format_ % value) # <<<<<<<<<<<<<< + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyUnicode_FormatSafe(__pyx_v_self->format_, __pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_v_self->type_), __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + } + + /* "sqlalchemy/engine/_processors_cy.py":88 + * self.format_ = f"%.{scale}f" + * + * def __call__(self, value: Optional[Any]) -> object: # <<<<<<<<<<<<<< + * if value is None: + * return None + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_4__reduce_cython__(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_4__reduce_cython__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 1); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self.format_, self.type_) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->format_); + __Pyx_GIVEREF(__pyx_v_self->format_); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->format_)) __PYX_ERR(1, 5, __pyx_L1_error); + __Pyx_INCREF((PyObject *)__pyx_v_self->type_); + __Pyx_GIVEREF((PyObject *)__pyx_v_self->type_); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_self->type_))) __PYX_ERR(1, 5, __pyx_L1_error); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self.format_, self.type_) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self.format_, self.type_) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(1, 8, __pyx_L1_error); + __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.format_ is not None or self.type_ is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self.format_, self.type_) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self.format_ is not None or self.type_ is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state + */ + /*else*/ { + __pyx_t_4 = (__pyx_v_self->format_ != ((PyObject*)Py_None)); + if (!__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = (__pyx_v_self->type_ != ((PyTypeObject*)Py_None)); + __pyx_t_2 = __pyx_t_4; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_2; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.format_ is not None or self.type_ is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state + * else: + */ + if (__pyx_v_use_setstate) { + + /* "(tree fragment)":13 + * use_setstate = self.format_ is not None or self.type_ is not None + * if use_setstate: + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_to_decimal_proces); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_61481721); + __Pyx_GIVEREF(__pyx_int_61481721); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_61481721)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_3); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_3 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self.format_ is not None or self.type_ is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state + * else: + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_to_decimal_proces); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_61481721); + __Pyx_GIVEREF(__pyx_int_61481721); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_61481721)) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_5); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_5)) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_5 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 16, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(1, 16, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v___pyx_state = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(1, 16, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_6__setstate_cython__(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v_self), __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_6__setstate_cython__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 1); + + /* "(tree fragment)":17 + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_14_processors_cy___pyx_unpickle_to_decimal_processor_factory__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_to_decimal_processor_factory(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory = {"__pyx_unpickle_to_decimal_processor_factory", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[3] = {0,0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_to_decimal_processor_factory (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_to_decimal_processor_factory", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_to_decimal_processor_factory", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_to_decimal_processor_factory") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 3)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_to_decimal_processor_factory", 1, 3, 3, __pyx_nargs); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.__pyx_unpickle_to_decimal_processor_factory", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_14__pyx_unpickle_to_decimal_processor_factory(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_14__pyx_unpickle_to_decimal_processor_factory(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_to_decimal_processor_factory", 1); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__2, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum + * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(1, 5, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum # <<<<<<<<<<<<<< + * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum + * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_v___pyx_result = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum + * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_2 = (__pyx_v___pyx_state != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_14_processors_cy___pyx_unpickle_to_decimal_processor_factory__set_state(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum + * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): + * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_to_decimal_processor_factory(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.__pyx_unpickle_to_decimal_processor_factory", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_10sqlalchemy_6engine_14_processors_cy___pyx_unpickle_to_decimal_processor_factory__set_state(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + unsigned int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_to_decimal_processor_factory__set_state", 1); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): + * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_t_1))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->format_); + __Pyx_DECREF(__pyx_v___pyx_result->format_); + __pyx_v___pyx_result->format_ = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyType_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("type", __pyx_t_1))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF((PyObject *)__pyx_v___pyx_result->type_); + __Pyx_DECREF((PyObject *)__pyx_v___pyx_result->type_); + __pyx_v___pyx_result->type_ = ((PyTypeObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): + * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_4 = (__pyx_t_3 > 2); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_2 = __pyx_t_4; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[2]) # <<<<<<<<<<<<<< + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): + * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[2]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] + * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.__pyx_unpickle_to_decimal_processor_factory__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_tp_new_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *p; + PyObject *o; + #if CYTHON_COMPILING_IN_LIMITED_API + allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); + o = alloc_func(t, 0); + #else + if (likely(!__Pyx_PyType_HasFeature(t, Py_TPFLAGS_IS_ABSTRACT))) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + #endif + p = ((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)o); + p->type_ = ((PyTypeObject*)Py_None); Py_INCREF(Py_None); + p->format_ = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyObject *o) { + struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *p = (struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely((PY_VERSION_HEX >= 0x03080000 || __Pyx_PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)) && __Pyx_PyObject_GetSlot(o, tp_finalize, destructor)) && !__Pyx_PyObject_GC_IsFinalized(o)) { + if (__Pyx_PyObject_GetSlot(o, tp_dealloc, destructor) == __pyx_tp_dealloc_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->type_); + Py_CLEAR(p->format_); + #if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + (*Py_TYPE(o)->tp_free)(o); + #else + { + freefunc tp_free = (freefunc)PyType_GetSlot(Py_TYPE(o), Py_tp_free); + if (tp_free) tp_free(o); + } + #endif +} + +static int __pyx_tp_traverse_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *p = (struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)o; + if (p->type_) { + e = (*v)(((PyObject *)p->type_), a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *p = (struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)o; + tmp = ((PyObject*)p->type_); + p->type_ = ((PyTypeObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory[] = { + {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {0, 0, 0, 0} +}; +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_slots[] = { + {Py_tp_dealloc, (void *)__pyx_tp_dealloc_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, + {Py_tp_call, (void *)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_3__call__}, + {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, + {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, + {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, + {Py_tp_init, (void *)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_1__init__}, + {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, + {0, 0}, +}; +static PyType_Spec __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_spec = { + "sqlalchemy.engine._processors_cy.to_decimal_processor_factory", + sizeof(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory), + 0, + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, + __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_slots, +}; +#else + +static PyTypeObject __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory = { + PyVarObject_HEAD_INIT(0, 0) + "sqlalchemy.engine._processors_cy.""to_decimal_processor_factory", /*tp_name*/ + sizeof(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + __pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_3__call__, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_traverse*/ + __pyx_tp_clear_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + #if !CYTHON_USE_TYPE_SPECS + 0, /*tp_dictoffset*/ + #endif + __pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + #if CYTHON_USE_TP_FINALIZE + 0, /*tp_finalize*/ + #else + NULL, /*tp_finalize*/ + #endif + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if __PYX_NEED_TP_PRINT_SLOT == 1 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030C0000 + 0, /*tp_watched*/ + #endif + #if PY_VERSION_HEX >= 0x030d00A4 + 0, /*tp_versions_used*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, /*tp_pypy_flags*/ + #endif +}; +#endif + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif +/* #### Code section: pystring_table ### */ + +static int __Pyx_CreateStringTabAndInitStrings(void) { + __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, + {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, + {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, + {&__pyx_kp_s_Optional_bool, __pyx_k_Optional_bool, sizeof(__pyx_k_Optional_bool), 0, 0, 1, 0}, + {&__pyx_kp_s_Optional_date_cls, __pyx_k_Optional_date_cls, sizeof(__pyx_k_Optional_date_cls), 0, 0, 1, 0}, + {&__pyx_kp_s_Optional_datetime_cls, __pyx_k_Optional_datetime_cls, sizeof(__pyx_k_Optional_datetime_cls), 0, 0, 1, 0}, + {&__pyx_kp_s_Optional_float, __pyx_k_Optional_float, sizeof(__pyx_k_Optional_float), 0, 0, 1, 0}, + {&__pyx_kp_s_Optional_str, __pyx_k_Optional_str, sizeof(__pyx_k_Optional_str), 0, 0, 1, 0}, + {&__pyx_kp_s_Optional_time_cls, __pyx_k_Optional_time_cls, sizeof(__pyx_k_Optional_time_cls), 0, 0, 1, 0}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s__19, __pyx_k__19, sizeof(__pyx_k__19), 0, 0, 1, 1}, + {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, + {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, + {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_date, __pyx_k_date, sizeof(__pyx_k_date), 0, 0, 1, 1}, + {&__pyx_n_s_date_cls, __pyx_k_date_cls, sizeof(__pyx_k_date_cls), 0, 0, 1, 1}, + {&__pyx_n_s_datetime, __pyx_k_datetime, sizeof(__pyx_k_datetime), 0, 0, 1, 1}, + {&__pyx_n_s_datetime_cls, __pyx_k_datetime_cls, sizeof(__pyx_k_datetime_cls), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, + {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, + {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, + {&__pyx_n_u_f, __pyx_k_f, sizeof(__pyx_k_f), 0, 1, 0, 1}, + {&__pyx_n_u_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 1, 0, 1}, + {&__pyx_n_s_fromisoformat, __pyx_k_fromisoformat, sizeof(__pyx_k_fromisoformat), 0, 0, 1, 1}, + {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_int_to_boolean, __pyx_k_int_to_boolean, sizeof(__pyx_k_int_to_boolean), 0, 0, 1, 1}, + {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, + {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, + {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, + {&__pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_k_lib_sqlalchemy_engine__processor, sizeof(__pyx_k_lib_sqlalchemy_engine__processor), 0, 0, 1, 0}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_to_decimal_proces, __pyx_k_pyx_unpickle_to_decimal_proces, sizeof(__pyx_k_pyx_unpickle_to_decimal_proces), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, + {&__pyx_n_s_scale, __pyx_k_scale, sizeof(__pyx_k_scale), 0, 0, 1, 1}, + {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_slots, __pyx_k_slots, sizeof(__pyx_k_slots), 0, 0, 1, 1}, + {&__pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_k_sqlalchemy_engine__processors_cy, sizeof(__pyx_k_sqlalchemy_engine__processors_cy), 0, 0, 1, 1}, + {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, + {&__pyx_n_s_str_to_date, __pyx_k_str_to_date, sizeof(__pyx_k_str_to_date), 0, 0, 1, 1}, + {&__pyx_n_s_str_to_datetime, __pyx_k_str_to_datetime, sizeof(__pyx_k_str_to_datetime), 0, 0, 1, 1}, + {&__pyx_n_s_str_to_time, __pyx_k_str_to_time, sizeof(__pyx_k_str_to_time), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_time, __pyx_k_time, sizeof(__pyx_k_time), 0, 0, 1, 1}, + {&__pyx_n_s_time_cls, __pyx_k_time_cls, sizeof(__pyx_k_time_cls), 0, 0, 1, 1}, + {&__pyx_n_s_to_decimal_processor_factory, __pyx_k_to_decimal_processor_factory, sizeof(__pyx_k_to_decimal_processor_factory), 0, 0, 1, 1}, + {&__pyx_n_s_to_decimal_processor_factory___r, __pyx_k_to_decimal_processor_factory___r, sizeof(__pyx_k_to_decimal_processor_factory___r), 0, 0, 1, 1}, + {&__pyx_n_s_to_decimal_processor_factory___s, __pyx_k_to_decimal_processor_factory___s, sizeof(__pyx_k_to_decimal_processor_factory___s), 0, 0, 1, 1}, + {&__pyx_n_s_to_float, __pyx_k_to_float, sizeof(__pyx_k_to_float), 0, 0, 1, 1}, + {&__pyx_n_s_to_str, __pyx_k_to_str, sizeof(__pyx_k_to_str), 0, 0, 1, 1}, + {&__pyx_n_s_type, __pyx_k_type, sizeof(__pyx_k_type), 0, 0, 1, 1}, + {&__pyx_n_u_type, __pyx_k_type, sizeof(__pyx_k_type), 0, 1, 0, 1}, + {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_n_s_use_setstate, __pyx_k_use_setstate, sizeof(__pyx_k_use_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} + }; + return __Pyx_InitStrings(__pyx_string_tab); +} +/* #### Code section: cached_builtins ### */ +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + return 0; +} +/* #### Code section: cached_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum + */ + __pyx_tuple__2 = PyTuple_Pack(3, __pyx_int_61481721, __pyx_int_248337392, __pyx_int_190945570); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "sqlalchemy/engine/_processors_cy.py":27 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_codeobj__4 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_is_compiled, 27, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__4)) __PYX_ERR(0, 27, __pyx_L1_error) + + /* "sqlalchemy/engine/_processors_cy.py":35 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def int_to_boolean(value: Any) -> Optional[bool]: + * if value is None: + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_n_s_value); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 35, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + __pyx_codeobj__6 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_int_to_boolean, 35, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__6)) __PYX_ERR(0, 35, __pyx_L1_error) + + /* "sqlalchemy/engine/_processors_cy.py":42 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def to_str(value: Any) -> Optional[str]: + * if value is None: + */ + __pyx_codeobj__7 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_to_str, 42, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__7)) __PYX_ERR(0, 42, __pyx_L1_error) + + /* "sqlalchemy/engine/_processors_cy.py":49 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def to_float(value: Any) -> Optional[float]: + * if value is None: + */ + __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_to_float, 49, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(0, 49, __pyx_L1_error) + + /* "sqlalchemy/engine/_processors_cy.py":56 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: + * if value is None: + */ + __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_str_to_datetime, 56, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 56, __pyx_L1_error) + + /* "sqlalchemy/engine/_processors_cy.py":63 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_time(value: Optional[str]) -> Optional[time_cls]: + * if value is None: + */ + __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_str_to_time, 63, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 63, __pyx_L1_error) + + /* "sqlalchemy/engine/_processors_cy.py":70 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_date(value: Optional[str]) -> Optional[date_cls]: + * if value is None: + */ + __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_str_to_date, 70, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 70, __pyx_L1_error) + + /* "sqlalchemy/engine/_processors_cy.py":82 + * format_: str + * + * __slots__ = ("type_", "format_") # <<<<<<<<<<<<<< + * + * def __init__(self, type_: type, scale: int): + */ + __pyx_tuple__12 = PyTuple_Pack(2, __pyx_n_u_type, __pyx_n_u_format); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 82, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_tuple__13 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(1, 1, __pyx_L1_error) + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) + */ + __pyx_tuple__15 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(1, 16, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __pyx_unpickle_to_decimal_processor_factory(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__17 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__17); + __Pyx_GIVEREF(__pyx_tuple__17); + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_to_decimal_proces, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} +/* #### Code section: init_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { + if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_61481721 = PyInt_FromLong(61481721L); if (unlikely(!__pyx_int_61481721)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_190945570 = PyInt_FromLong(190945570L); if (unlikely(!__pyx_int_190945570)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_248337392 = PyInt_FromLong(248337392L); if (unlikely(!__pyx_int_248337392)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: init_globals ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + return 0; +} +/* #### Code section: init_module ### */ + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + #if CYTHON_USE_TYPE_SPECS + __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_spec, NULL); if (unlikely(!__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory)) __PYX_ERR(0, 78, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_spec, __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) < 0) __PYX_ERR(0, 78, __pyx_L1_error) + #else + __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory = &__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + #endif + #if !CYTHON_USE_TYPE_SPECS + if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) < 0) __PYX_ERR(0, 78, __pyx_L1_error) + #endif + #if PY_MAJOR_VERSION < 3 + __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory->tp_print = 0; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory->tp_dictoffset && __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory->tp_getattro == PyObject_GenericGetAttr)) { + __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory->tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_to_decimal_processor_factory, (PyObject *) __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) < 0) __PYX_ERR(0, 78, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) < 0) __PYX_ERR(0, 78, __pyx_L1_error) + #endif + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__processors_cy(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__processors_cy}, + {0, NULL} +}; +#endif + +#ifdef __cplusplus +namespace { + struct PyModuleDef __pyx_moduledef = + #else + static struct PyModuleDef __pyx_moduledef = + #endif + { + PyModuleDef_HEAD_INIT, + "_processors_cy", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #elif CYTHON_USE_MODULE_STATE + sizeof(__pyx_mstate), /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + #if CYTHON_USE_MODULE_STATE + __pyx_m_traverse, /* m_traverse */ + __pyx_m_clear, /* m_clear */ + NULL /* m_free */ + #else + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ + #endif + }; + #ifdef __cplusplus +} /* anonymous namespace */ +#endif +#endif + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_processors_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_processors_cy(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__processors_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__processors_cy(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) +#else +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) +#endif +{ + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { +#if CYTHON_COMPILING_IN_LIMITED_API + result = PyModule_AddObject(module, to_name, value); +#else + result = PyDict_SetItemString(moddict, to_name, value); +#endif + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + CYTHON_UNUSED_VAR(def); + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + moddict = module; +#else + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; +#endif + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__processors_cy(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + int stringtab_initialized = 0; + #if CYTHON_USE_MODULE_STATE + int pystate_addmodule_run = 0; + #endif + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_processors_cy' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_processors_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #elif CYTHON_USE_MODULE_STATE + __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + { + int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_processors_cy" pseudovariable */ + if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + pystate_addmodule_run = 1; + } + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #endif + CYTHON_UNUSED_VAR(__pyx_t_1); + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__processors_cy(void)", 0); + if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + stringtab_initialized = 1; + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_sqlalchemy__engine___processors_cy) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "sqlalchemy.engine._processors_cy")) { + if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.engine._processors_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "sqlalchemy/engine/_processors_cy.py":10 + * from __future__ import annotations + * + * from datetime import date as date_cls # <<<<<<<<<<<<<< + * from datetime import datetime as datetime_cls + * from datetime import time as time_cls + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_date); + __Pyx_GIVEREF(__pyx_n_s_date); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_date)) __PYX_ERR(0, 10, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_datetime, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_date); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_date_cls, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":11 + * + * from datetime import date as date_cls + * from datetime import datetime as datetime_cls # <<<<<<<<<<<<<< + * from datetime import time as time_cls + * from typing import Any + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_datetime); + __Pyx_GIVEREF(__pyx_n_s_datetime); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_datetime)) __PYX_ERR(0, 11, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_datetime, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_datetime); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_datetime_cls, __pyx_t_3) < 0) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":12 + * from datetime import date as date_cls + * from datetime import datetime as datetime_cls + * from datetime import time as time_cls # <<<<<<<<<<<<<< + * from typing import Any + * from typing import Optional + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_time); + __Pyx_GIVEREF(__pyx_n_s_time); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_time)) __PYX_ERR(0, 12, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_datetime, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_time); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_time_cls, __pyx_t_2) < 0) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":13 + * from datetime import datetime as datetime_cls + * from datetime import time as time_cls + * from typing import Any # <<<<<<<<<<<<<< + * from typing import Optional + * + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Any); + __Pyx_GIVEREF(__pyx_n_s_Any); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Any)) __PYX_ERR(0, 13, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Any); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_3) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":14 + * from datetime import time as time_cls + * from typing import Any + * from typing import Optional # <<<<<<<<<<<<<< + * + * # START GENERATED CYTHON IMPORT + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Optional); + __Pyx_GIVEREF(__pyx_n_s_Optional); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 14, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Optional); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_2) < 0) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":18 + * # START GENERATED CYTHON IMPORT + * # This section is automatically generated by the script tools/cython_imports.py + * try: # <<<<<<<<<<<<<< + * # NOTE: the cython compiler needs this "import cython" in the file, it + * # can't be only "from sqlalchemy.util import cython" with the fallback + */ + { + (void)__pyx_t_1; (void)__pyx_t_4; (void)__pyx_t_5; /* mark used */ + /*try:*/ { + + /* "sqlalchemy/engine/_processors_cy.py":22 + * # can't be only "from sqlalchemy.util import cython" with the fallback + * # in that module + * import cython # <<<<<<<<<<<<<< + * except ModuleNotFoundError: + * from sqlalchemy.util import cython + */ + } + } + + /* "sqlalchemy/engine/_processors_cy.py":27 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 27, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__4)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_2) < 0) __PYX_ERR(0, 27, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":35 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def int_to_boolean(value: Any) -> Optional[bool]: + * if value is None: + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 35, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 35, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Optional_bool) < 0) __PYX_ERR(0, 35, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean, 0, __pyx_n_s_int_to_boolean, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__6)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 35, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_int_to_boolean, __pyx_t_3) < 0) __PYX_ERR(0, 35, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":42 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def to_str(value: Any) -> Optional[str]: + * if value is None: + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 42, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 42, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_Optional_str) < 0) __PYX_ERR(0, 42, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_5to_str, 0, __pyx_n_s_to_str, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__7)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 42, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_to_str, __pyx_t_2) < 0) __PYX_ERR(0, 42, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":49 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def to_float(value: Any) -> Optional[float]: + * if value is None: + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 49, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Optional_float) < 0) __PYX_ERR(0, 49, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_7to_float, 0, __pyx_n_s_to_float, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__8)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_to_float, __pyx_t_3) < 0) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":56 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: + * if value is None: + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_kp_s_Optional_str) < 0) __PYX_ERR(0, 56, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_Optional_datetime_cls) < 0) __PYX_ERR(0, 56, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime, 0, __pyx_n_s_str_to_datetime, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__9)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_str_to_datetime, __pyx_t_2) < 0) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":63 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_time(value: Optional[str]) -> Optional[time_cls]: + * if value is None: + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_value, __pyx_kp_s_Optional_str) < 0) __PYX_ERR(0, 63, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Optional_time_cls) < 0) __PYX_ERR(0, 63, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_11str_to_time, 0, __pyx_n_s_str_to_time, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__10)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_str_to_time, __pyx_t_3) < 0) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":70 + * + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def str_to_date(value: Optional[str]) -> Optional[date_cls]: + * if value is None: + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_kp_s_Optional_str) < 0) __PYX_ERR(0, 70, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_Optional_date_cls) < 0) __PYX_ERR(0, 70, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_13str_to_date, 0, __pyx_n_s_str_to_date, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__11)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_str_to_date, __pyx_t_2) < 0) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":82 + * format_: str + * + * __slots__ = ("type_", "format_") # <<<<<<<<<<<<<< + * + * def __init__(self, type_: type, scale: int): + */ + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, __pyx_n_s_slots, __pyx_tuple__12) < 0) __PYX_ERR(0, 82, __pyx_L1_error) + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_to_decimal_processor_factory___r, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__14)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, __pyx_n_s_reduce_cython, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_to_decimal_processor_factory___s, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__16)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, __pyx_n_s_setstate_cython, __pyx_t_2) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); + + /* "(tree fragment)":1 + * def __pyx_unpickle_to_decimal_processor_factory(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory, 0, __pyx_n_s_pyx_unpickle_to_decimal_proces, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_to_decimal_proces, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_processors_cy.py":1 + * # engine/_processors_cy.py # <<<<<<<<<<<<<< + * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors + * # + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + if (__pyx_m) { + if (__pyx_d && stringtab_initialized) { + __Pyx_AddTraceback("init sqlalchemy.engine._processors_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + #if !CYTHON_USE_MODULE_STATE + Py_CLEAR(__pyx_m); + #else + Py_DECREF(__pyx_m); + if (pystate_addmodule_run) { + PyObject *tp, *value, *tb; + PyErr_Fetch(&tp, &value, &tb); + PyState_RemoveModule(&__pyx_moduledef); + PyErr_Restore(tp, value, tb); + } + #endif + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init sqlalchemy.engine._processors_cy"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} +/* #### Code section: cleanup_globals ### */ +/* #### Code section: cleanup_module ### */ +/* #### Code section: main_method ### */ +/* #### Code section: utility_code_pragmas ### */ +#ifdef _MSC_VER +#pragma warning( push ) +/* Warning 4127: conditional expression is constant + * Cython uses constant conditional expressions to allow in inline functions to be optimized at + * compile-time, so this warning is not useful + */ +#pragma warning( disable : 4127 ) +#endif + + + +/* #### Code section: utility_code_def ### */ + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* TupleAndListFromArray */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { + PyObject *v; + Py_ssize_t i; + for (i = 0; i < length; i++) { + v = dest[i] = src[i]; + Py_INCREF(v); + } +} +static CYTHON_INLINE PyObject * +__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + Py_INCREF(__pyx_empty_tuple); + return __pyx_empty_tuple; + } + res = PyTuple_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); + return res; +} +static CYTHON_INLINE PyObject * +__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + return PyList_New(0); + } + res = PyList_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); + return res; +} +#endif + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* fastcall */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) +{ + Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); + for (i = 0; i < n; i++) + { + if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; + } + for (i = 0; i < n; i++) + { + int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); + if (unlikely(eq != 0)) { + if (unlikely(eq < 0)) return NULL; + return kwvalues[i]; + } + } + return NULL; +} +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { + Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); + PyObject *dict; + dict = PyDict_New(); + if (unlikely(!dict)) + return NULL; + for (i=0; i= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); + while (1) { + Py_XDECREF(key); key = NULL; + Py_XDECREF(value); value = NULL; + if (kwds_is_tuple) { + Py_ssize_t size; +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(kwds); +#else + size = PyTuple_Size(kwds); + if (size < 0) goto bad; +#endif + if (pos >= size) break; +#if CYTHON_AVOID_BORROWED_REFS + key = __Pyx_PySequence_ITEM(kwds, pos); + if (!key) goto bad; +#elif CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kwds, pos); +#else + key = PyTuple_GetItem(kwds, pos); + if (!key) goto bad; +#endif + value = kwvalues[pos]; + pos++; + } + else + { + if (!PyDict_Next(kwds, &pos, &key, &value)) break; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + } + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(value); + Py_DECREF(key); +#endif + key = NULL; + value = NULL; + continue; + } +#if !CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + Py_INCREF(value); + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = ( + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key) + ); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + Py_XDECREF(key); + Py_XDECREF(value); + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + Py_XDECREF(key); + Py_XDECREF(value); + return -1; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* pybytes_as_double */ +static double __Pyx_SlowPyString_AsDouble(PyObject *obj) { + PyObject *float_value; +#if PY_MAJOR_VERSION >= 3 + float_value = PyFloat_FromString(obj); +#else + float_value = PyFloat_FromString(obj, 0); +#endif + if (likely(float_value)) { +#if CYTHON_ASSUME_SAFE_MACROS + double value = PyFloat_AS_DOUBLE(float_value); +#else + double value = PyFloat_AsDouble(float_value); +#endif + Py_DECREF(float_value); + return value; + } + return (double)-1; +} +static const char* __Pyx__PyBytes_AsDouble_Copy(const char* start, char* buffer, Py_ssize_t length) { + int last_was_punctuation = 1; + Py_ssize_t i; + for (i=0; i < length; i++) { + char chr = start[i]; + int is_punctuation = (chr == '_') | (chr == '.') | (chr == 'e') | (chr == 'E'); + *buffer = chr; + buffer += (chr != '_'); + if (unlikely(last_was_punctuation & is_punctuation)) goto parse_failure; + last_was_punctuation = is_punctuation; + } + if (unlikely(last_was_punctuation)) goto parse_failure; + *buffer = '\0'; + return buffer; +parse_failure: + return NULL; +} +static double __Pyx__PyBytes_AsDouble_inf_nan(const char* start, Py_ssize_t length) { + int matches = 1; + char sign = start[0]; + int is_signed = (sign == '+') | (sign == '-'); + start += is_signed; + length -= is_signed; + switch (start[0]) { + #ifdef Py_NAN + case 'n': + case 'N': + if (unlikely(length != 3)) goto parse_failure; + matches &= (start[1] == 'a' || start[1] == 'A'); + matches &= (start[2] == 'n' || start[2] == 'N'); + if (unlikely(!matches)) goto parse_failure; + return (sign == '-') ? -Py_NAN : Py_NAN; + #endif + case 'i': + case 'I': + if (unlikely(length < 3)) goto parse_failure; + matches &= (start[1] == 'n' || start[1] == 'N'); + matches &= (start[2] == 'f' || start[2] == 'F'); + if (likely(length == 3 && matches)) + return (sign == '-') ? -Py_HUGE_VAL : Py_HUGE_VAL; + if (unlikely(length != 8)) goto parse_failure; + matches &= (start[3] == 'i' || start[3] == 'I'); + matches &= (start[4] == 'n' || start[4] == 'N'); + matches &= (start[5] == 'i' || start[5] == 'I'); + matches &= (start[6] == 't' || start[6] == 'T'); + matches &= (start[7] == 'y' || start[7] == 'Y'); + if (unlikely(!matches)) goto parse_failure; + return (sign == '-') ? -Py_HUGE_VAL : Py_HUGE_VAL; + case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': + break; + default: + goto parse_failure; + } + return 0.0; +parse_failure: + return -1.0; +} +static CYTHON_INLINE int __Pyx__PyBytes_AsDouble_IsSpace(char ch) { + return (ch == 0x20) | !((ch < 0x9) | (ch > 0xd)); +} +CYTHON_UNUSED static double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length) { + double value; + Py_ssize_t i, digits; + const char *last = start + length; + char *end; + while (__Pyx__PyBytes_AsDouble_IsSpace(*start)) + start++; + while (start < last - 1 && __Pyx__PyBytes_AsDouble_IsSpace(last[-1])) + last--; + length = last - start; + if (unlikely(length <= 0)) goto fallback; + value = __Pyx__PyBytes_AsDouble_inf_nan(start, length); + if (unlikely(value == -1.0)) goto fallback; + if (value != 0.0) return value; + digits = 0; + for (i=0; i < length; digits += start[i++] != '_'); + if (likely(digits == length)) { + value = PyOS_string_to_double(start, &end, NULL); + } else if (digits < 40) { + char number[40]; + last = __Pyx__PyBytes_AsDouble_Copy(start, number, length); + if (unlikely(!last)) goto fallback; + value = PyOS_string_to_double(number, &end, NULL); + } else { + char *number = (char*) PyMem_Malloc((digits + 1) * sizeof(char)); + if (unlikely(!number)) goto fallback; + last = __Pyx__PyBytes_AsDouble_Copy(start, number, length); + if (unlikely(!last)) { + PyMem_Free(number); + goto fallback; + } + value = PyOS_string_to_double(number, &end, NULL); + PyMem_Free(number); + } + if (likely(end == last) || (value == (double)-1 && PyErr_Occurred())) { + return value; + } +fallback: + return __Pyx_SlowPyString_AsDouble(obj); +} + +/* pynumber_float */ +static CYTHON_INLINE PyObject* __Pyx__PyNumber_Float(PyObject* obj) { + double val; + if (PyLong_CheckExact(obj)) { +#if CYTHON_USE_PYLONG_INTERNALS + if (likely(__Pyx_PyLong_IsCompact(obj))) { + val = (double) __Pyx_PyLong_CompactValue(obj); + goto no_error; + } +#endif + val = PyLong_AsDouble(obj); + } else if (PyUnicode_CheckExact(obj)) { + val = __Pyx_PyUnicode_AsDouble(obj); + } else if (PyBytes_CheckExact(obj)) { + val = __Pyx_PyBytes_AsDouble(obj); + } else if (PyByteArray_CheckExact(obj)) { + val = __Pyx_PyByteArray_AsDouble(obj); + } else { + return PyNumber_Float(obj); + } + if (unlikely(val == -1 && PyErr_Occurred())) { + return NULL; + } +#if CYTHON_USE_PYLONG_INTERNALS +no_error: +#endif + return PyFloat_FromDouble(val); +} + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030C00A6 + PyObject *current_exception = tstate->current_exception; + if (unlikely(!current_exception)) return 0; + exc_type = (PyObject*) Py_TYPE(current_exception); + if (exc_type == err) return 1; +#else + exc_type = tstate->curexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; +#endif + #if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(exc_type); + #endif + if (unlikely(PyTuple_Check(err))) { + result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + } else { + result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); + } + #if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(exc_type); + #endif + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject *tmp_value; + assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); + if (value) { + #if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) + #endif + PyException_SetTraceback(value, tb); + } + tmp_value = tstate->current_exception; + tstate->current_exception = value; + Py_XDECREF(tmp_value); + Py_XDECREF(type); + Py_XDECREF(tb); +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject* exc_value; + exc_value = tstate->current_exception; + tstate->current_exception = 0; + *value = exc_value; + *type = NULL; + *tb = NULL; + if (exc_value) { + *type = (PyObject*) Py_TYPE(exc_value); + Py_INCREF(*type); + #if CYTHON_COMPILING_IN_CPYTHON + *tb = ((PyBaseExceptionObject*) exc_value)->traceback; + Py_XINCREF(*tb); + #else + *tb = PyException_GetTraceback(exc_value); + #endif + } +#else + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#endif +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* PyObjectGetAttrStrNoError */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + (void) PyObject_GetOptionalAttr(obj, attr_name, &result); + return result; +#else +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +#endif +} + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); + if (unlikely(!result) && !PyErr_Occurred()) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#elif CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(!__pyx_m)) { + return NULL; + } + result = PyObject_GetAttr(__pyx_m, name); + if (likely(result)) { + return result; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { + return NULL; + } + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { + return NULL; + } + #endif + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); + self = __Pyx_CyOrPyCFunction_GET_SELF(func); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectFastCall */ +#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API +static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { + PyObject *argstuple; + PyObject *result = 0; + size_t i; + argstuple = PyTuple_New((Py_ssize_t)nargs); + if (unlikely(!argstuple)) return NULL; + for (i = 0; i < nargs; i++) { + Py_INCREF(args[i]); + if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; + } + result = __Pyx_PyObject_Call(func, argstuple, kwargs); + bad: + Py_DECREF(argstuple); + return result; +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { + Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); +#if CYTHON_COMPILING_IN_CPYTHON + if (nargs == 0 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) + return __Pyx_PyObject_CallMethO(func, NULL); + } + else if (nargs == 1 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) + return __Pyx_PyObject_CallMethO(func, args[0]); + } +#endif + #if PY_VERSION_HEX < 0x030800B1 + #if CYTHON_FAST_PYCCALL + if (PyCFunction_Check(func)) { + if (kwargs) { + return _PyCFunction_FastCallDict(func, args, nargs, kwargs); + } else { + return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); + } + } + #if PY_VERSION_HEX >= 0x030700A1 + if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { + return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); + } + #endif + #endif + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); + } + #endif + #endif + if (kwargs == NULL) { + #if CYTHON_VECTORCALL + #if PY_VERSION_HEX < 0x03090000 + vectorcallfunc f = _PyVectorcall_Function(func); + #else + vectorcallfunc f = PyVectorcall_Function(func); + #endif + if (f) { + return f(func, args, (size_t)nargs, NULL); + } + #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL + if (__Pyx_CyFunction_CheckExact(func)) { + __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); + if (f) return f(func, args, (size_t)nargs, NULL); + } + #endif + } + if (nargs == 0) { + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); + } + #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API + return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); + #else + return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); + #endif +} + +/* ArgTypeTest */ +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + __Pyx_TypeName type_name; + __Pyx_TypeName obj_type_name; + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + type_name = __Pyx_PyType_GetName(type); + obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected " __Pyx_FMT_TYPENAME + ", got " __Pyx_FMT_TYPENAME ")", name, type_name, obj_type_name); + __Pyx_DECREF_TypeName(type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* JoinPyUnicode */ +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char) { +#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyObject *result_uval; + int result_ukind, kind_shift; + Py_ssize_t i, char_pos; + void *result_udata; + CYTHON_MAYBE_UNUSED_VAR(max_char); +#if CYTHON_PEP393_ENABLED + result_uval = PyUnicode_New(result_ulength, max_char); + if (unlikely(!result_uval)) return NULL; + result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; + kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; + result_udata = PyUnicode_DATA(result_uval); +#else + result_uval = PyUnicode_FromUnicode(NULL, result_ulength); + if (unlikely(!result_uval)) return NULL; + result_ukind = sizeof(Py_UNICODE); + kind_shift = (result_ukind == 4) ? 2 : result_ukind - 1; + result_udata = PyUnicode_AS_UNICODE(result_uval); +#endif + assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); + char_pos = 0; + for (i=0; i < value_count; i++) { + int ukind; + Py_ssize_t ulength; + void *udata; + PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); + if (unlikely(__Pyx_PyUnicode_READY(uval))) + goto bad; + ulength = __Pyx_PyUnicode_GET_LENGTH(uval); + if (unlikely(!ulength)) + continue; + if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) + goto overflow; + ukind = __Pyx_PyUnicode_KIND(uval); + udata = __Pyx_PyUnicode_DATA(uval); + if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { + memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); + } else { + #if PY_VERSION_HEX >= 0x030d0000 + if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; + #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) + _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); + #else + Py_ssize_t j; + for (j=0; j < ulength; j++) { + Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); + __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); + } + #endif + } + char_pos += ulength; + } + return result_uval; +overflow: + PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); +bad: + Py_DECREF(result_uval); + return NULL; +#else + CYTHON_UNUSED_VAR(max_char); + CYTHON_UNUSED_VAR(result_ulength); + CYTHON_UNUSED_VAR(value_count); + return PyUnicode_Join(__pyx_empty_unicode, value_tuple); +#endif +} + +/* PyObjectCallOneArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *args[2] = {NULL, arg}; + return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kw, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { + Py_ssize_t kwsize; +#if CYTHON_ASSUME_SAFE_MACROS + kwsize = PyTuple_GET_SIZE(kw); +#else + kwsize = PyTuple_Size(kw); + if (kwsize < 0) return 0; +#endif + if (unlikely(kwsize == 0)) + return 1; + if (!kw_allowed) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, 0); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + goto invalid_keyword; + } +#if PY_VERSION_HEX < 0x03090000 + for (pos = 0; pos < kwsize; pos++) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, pos); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } +#endif + return 1; + } + while (PyDict_Next(kw, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if (!kw_allowed && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* GetAttr3 */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +#endif +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + int res = PyObject_GetOptionalAttr(o, n, &r); + return (res != 0) ? r : __Pyx_NewRef(d); +#else + #if CYTHON_USE_TYPE_SLOTS + if (likely(PyString_Check(n))) { + r = __Pyx_PyObject_GetAttrStrNoError(o, n); + if (unlikely(!r) && likely(!PyErr_Occurred())) { + r = __Pyx_NewRef(d); + } + return r; + } + #endif + r = PyObject_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +#endif +} + +/* RaiseUnexpectedTypeError */ +static int +__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) +{ + __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, + expected, obj_type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *module = 0; + PyObject *empty_dict = 0; + PyObject *empty_list = 0; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (unlikely(!py_import)) + goto bad; + if (!from_list) { + empty_list = PyList_New(0); + if (unlikely(!empty_list)) + goto bad; + from_list = empty_list; + } + #endif + empty_dict = PyDict_New(); + if (unlikely(!empty_dict)) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, 1); + if (unlikely(!module)) { + if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (unlikely(!py_level)) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, level); + #endif + } + } +bad: + Py_XDECREF(empty_dict); + Py_XDECREF(empty_list); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + const char* module_name_str = 0; + PyObject* module_name = 0; + PyObject* module_dot = 0; + PyObject* full_name = 0; + PyErr_Clear(); + module_name_str = PyModule_GetName(module); + if (unlikely(!module_name_str)) { goto modbad; } + module_name = PyUnicode_FromString(module_name_str); + if (unlikely(!module_name)) { goto modbad; } + module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__3); + if (unlikely(!module_dot)) { goto modbad; } + full_name = PyUnicode_Concat(module_dot, name); + if (unlikely(!full_name)) { goto modbad; } + #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) + { + PyObject *modules = PyImport_GetModuleDict(); + if (unlikely(!modules)) + goto modbad; + value = PyObject_GetItem(modules, full_name); + } + #else + value = PyImport_GetModule(full_name); + #endif + modbad: + Py_XDECREF(full_name); + Py_XDECREF(module_dot); + Py_XDECREF(module_name); + } + if (unlikely(!value)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + __Pyx_PyThreadState_declare + CYTHON_UNUSED_VAR(cause); + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + #if PY_VERSION_HEX >= 0x030C00A6 + PyException_SetTraceback(value, tb); + #elif CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (unlikely(!j)) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; + PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; + if (mm && mm->mp_subscript) { + PyObject *r, *key = PyInt_FromSsize_t(i); + if (unlikely(!key)) return NULL; + r = mm->mp_subscript(o, key); + Py_DECREF(key); + return r; + } + if (likely(sm && sm->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { + Py_ssize_t l = sm->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return sm->sq_item(o, i); + } + } +#else + if (is_list || !PyMapping_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (!r) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* FixUpExtensionType */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { +#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + CYTHON_UNUSED_VAR(spec); + CYTHON_UNUSED_VAR(type); +#else + const PyType_Slot *slot = spec->slots; + while (slot && slot->slot && slot->slot != Py_tp_members) + slot++; + if (slot && slot->slot == Py_tp_members) { + int changed = 0; +#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) + const +#endif + PyMemberDef *memb = (PyMemberDef*) slot->pfunc; + while (memb && memb->name) { + if (memb->name[0] == '_' && memb->name[1] == '_') { +#if PY_VERSION_HEX < 0x030900b1 + if (strcmp(memb->name, "__weaklistoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_weaklistoffset = memb->offset; + changed = 1; + } + else if (strcmp(memb->name, "__dictoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_dictoffset = memb->offset; + changed = 1; + } +#if CYTHON_METH_FASTCALL + else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); +#if PY_VERSION_HEX >= 0x030800b4 + type->tp_vectorcall_offset = memb->offset; +#else + type->tp_print = (printfunc) memb->offset; +#endif + changed = 1; + } +#endif +#else + if ((0)); +#endif +#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON + else if (strcmp(memb->name, "__module__") == 0) { + PyObject *descr; + assert(memb->type == T_OBJECT); + assert(memb->flags == 0 || memb->flags == READONLY); + descr = PyDescr_NewMember(type, memb); + if (unlikely(!descr)) + return -1; + if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { + Py_DECREF(descr); + return -1; + } + Py_DECREF(descr); + changed = 1; + } +#endif + } + memb++; + } + if (changed) + PyType_Modified(type); + } +#endif + return 0; +} +#endif + +/* PyObjectCallNoArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { + PyObject *arg[2] = {NULL, NULL}; + return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + __Pyx_TypeName type_name; + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR + if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) +#elif PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (likely(descr != NULL)) { + *method = descr; + return 0; + } + type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod0 */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { + PyObject *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_CallOneArg(method, obj); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) goto bad; + result = __Pyx_PyObject_CallNoArg(method); + Py_DECREF(method); +bad: + return result; +} + +/* ValidateBasesTuple */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { + Py_ssize_t i, n; +#if CYTHON_ASSUME_SAFE_MACROS + n = PyTuple_GET_SIZE(bases); +#else + n = PyTuple_Size(bases); + if (n < 0) return -1; +#endif + for (i = 1; i < n; i++) + { +#if CYTHON_AVOID_BORROWED_REFS + PyObject *b0 = PySequence_GetItem(bases, i); + if (!b0) return -1; +#elif CYTHON_ASSUME_SAFE_MACROS + PyObject *b0 = PyTuple_GET_ITEM(bases, i); +#else + PyObject *b0 = PyTuple_GetItem(bases, i); + if (!b0) return -1; +#endif + PyTypeObject *b; +#if PY_MAJOR_VERSION < 3 + if (PyClass_Check(b0)) + { + PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", + PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } +#endif + b = (PyTypeObject*) b0; + if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); + __Pyx_DECREF_TypeName(b_name); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + if (dictoffset == 0) + { + Py_ssize_t b_dictoffset = 0; +#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + b_dictoffset = b->tp_dictoffset; +#else + PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); + if (!py_b_dictoffset) goto dictoffset_return; + b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); + Py_DECREF(py_b_dictoffset); + if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; +#endif + if (b_dictoffset) { + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "extension type '%.200s' has no __dict__ slot, " + "but base type '" __Pyx_FMT_TYPENAME "' has: " + "either add 'cdef dict __dict__' to the extension type " + "or add '__slots__ = [...]' to the base type", + type_name, b_name); + __Pyx_DECREF_TypeName(b_name); + } +#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) + dictoffset_return: +#endif +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + } +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + } + return 0; +} +#endif + +/* PyType_Ready */ +static int __Pyx_PyType_Ready(PyTypeObject *t) { +#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) + (void)__Pyx_PyObject_CallMethod0; +#if CYTHON_USE_TYPE_SPECS + (void)__Pyx_validate_bases_tuple; +#endif + return PyType_Ready(t); +#else + int r; + PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); + if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) + return -1; +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + { + int gc_was_enabled; + #if PY_VERSION_HEX >= 0x030A00b1 + gc_was_enabled = PyGC_Disable(); + (void)__Pyx_PyObject_CallMethod0; + #else + PyObject *ret, *py_status; + PyObject *gc = NULL; + #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) + gc = PyImport_GetModule(__pyx_kp_u_gc); + #endif + if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); + if (unlikely(!gc)) return -1; + py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); + if (unlikely(!py_status)) { + Py_DECREF(gc); + return -1; + } + gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); + Py_DECREF(py_status); + if (gc_was_enabled > 0) { + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); + if (unlikely(!ret)) { + Py_DECREF(gc); + return -1; + } + Py_DECREF(ret); + } else if (unlikely(gc_was_enabled == -1)) { + Py_DECREF(gc); + return -1; + } + #endif + t->tp_flags |= Py_TPFLAGS_HEAPTYPE; +#if PY_VERSION_HEX >= 0x030A0000 + t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; +#endif +#else + (void)__Pyx_PyObject_CallMethod0; +#endif + r = PyType_Ready(t); +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; + #if PY_VERSION_HEX >= 0x030A00b1 + if (gc_was_enabled) + PyGC_Enable(); + #else + if (gc_was_enabled) { + PyObject *tp, *v, *tb; + PyErr_Fetch(&tp, &v, &tb); + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); + if (likely(ret || r == -1)) { + Py_XDECREF(ret); + PyErr_Restore(tp, v, tb); + } else { + Py_XDECREF(tp); + Py_XDECREF(v); + Py_XDECREF(tb); + r = -1; + } + } + Py_DECREF(gc); + #endif + } +#endif + return r; +#endif +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, attr_name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(attr_name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetupReduce */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStrNoError(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_getstate = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; + PyObject *getstate = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); +#else + getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); + if (!getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (getstate) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); +#else + object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); + if (!object_getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (object_getstate != getstate) { + goto __PYX_GOOD; + } + } +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + goto __PYX_BAD; + } + setstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; +__PYX_BAD: + if (!PyErr_Occurred()) { + __Pyx_TypeName type_obj_name = + __Pyx_PyType_GetName((PyTypeObject*)type_obj); + PyErr_Format(PyExc_RuntimeError, + "Unable to initialize pickling for " __Pyx_FMT_TYPENAME, type_obj_name); + __Pyx_DECREF_TypeName(type_obj_name); + } + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); + Py_XDECREF(object_getstate); + Py_XDECREF(getstate); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} +#endif + +/* FetchSharedCythonModule */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void) { + return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); +} + +/* FetchCommonType */ +static int __Pyx_VerifyCachedType(PyObject *cached_type, + const char *name, + Py_ssize_t basicsize, + Py_ssize_t expected_basicsize) { + if (!PyType_Check(cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", name); + return -1; + } + if (basicsize != expected_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + name); + return -1; + } + return 0; +} +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { + PyObject* abi_module; + const char* object_name; + PyTypeObject *cached_type = NULL; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + object_name = strrchr(type->tp_name, '.'); + object_name = object_name ? object_name+1 : type->tp_name; + cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + if (__Pyx_VerifyCachedType( + (PyObject *)cached_type, + object_name, + cached_type->tp_basicsize, + type->tp_basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; +done: + Py_DECREF(abi_module); + return cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#else +static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { + PyObject *abi_module, *cached_type = NULL; + const char* object_name = strrchr(spec->name, '.'); + object_name = object_name ? object_name+1 : spec->name; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + cached_type = PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + Py_ssize_t basicsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_basicsize; + py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); + if (unlikely(!py_basicsize)) goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; +#else + basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; +#endif + if (__Pyx_VerifyCachedType( + cached_type, + object_name, + basicsize, + spec->basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + CYTHON_UNUSED_VAR(module); + cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); + if (unlikely(!cached_type)) goto bad; + if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; +done: + Py_DECREF(abi_module); + assert(cached_type == NULL || PyType_Check(cached_type)); + return (PyTypeObject *) cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#endif + +/* PyVectorcallFastCallDict */ +#if CYTHON_METH_FASTCALL +static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + PyObject *res = NULL; + PyObject *kwnames; + PyObject **newargs; + PyObject **kwvalues; + Py_ssize_t i, pos; + size_t j; + PyObject *key, *value; + unsigned long keys_are_strings; + Py_ssize_t nkw = PyDict_GET_SIZE(kw); + newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); + if (unlikely(newargs == NULL)) { + PyErr_NoMemory(); + return NULL; + } + for (j = 0; j < nargs; j++) newargs[j] = args[j]; + kwnames = PyTuple_New(nkw); + if (unlikely(kwnames == NULL)) { + PyMem_Free(newargs); + return NULL; + } + kwvalues = newargs + nargs; + pos = i = 0; + keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; + while (PyDict_Next(kw, &pos, &key, &value)) { + keys_are_strings &= Py_TYPE(key)->tp_flags; + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(kwnames, i, key); + kwvalues[i] = value; + i++; + } + if (unlikely(!keys_are_strings)) { + PyErr_SetString(PyExc_TypeError, "keywords must be strings"); + goto cleanup; + } + res = vc(func, newargs, nargs, kwnames); +cleanup: + Py_DECREF(kwnames); + for (i = 0; i < nkw; i++) + Py_DECREF(kwvalues[i]); + PyMem_Free(newargs); + return res; +} +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { + return vc(func, args, nargs, NULL); + } + return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); +} +#endif + +/* CythonFunctionShared */ +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + if (__Pyx_CyFunction_Check(func)) { + return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; + } else if (PyCFunction_Check(func)) { + return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; + } + return 0; +} +#else +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +} +#endif +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + __Pyx_Py_XDECREF_SET( + __Pyx_CyFunction_GetClassObj(f), + ((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#else + __Pyx_Py_XDECREF_SET( + ((PyCMethodObject *) (f))->mm_class, + (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#endif +} +static PyObject * +__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) +{ + CYTHON_UNUSED_VAR(closure); + if (unlikely(op->func_doc == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); + if (unlikely(!op->func_doc)) return NULL; +#else + if (((PyCFunctionObject*)op)->m_ml->ml_doc) { +#if PY_MAJOR_VERSION >= 3 + op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#else + op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#endif + if (unlikely(op->func_doc == NULL)) + return NULL; + } else { + Py_INCREF(Py_None); + return Py_None; + } +#endif + } + Py_INCREF(op->func_doc); + return op->func_doc; +} +static int +__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (value == NULL) { + value = Py_None; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_doc, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_name == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_name = PyObject_GetAttrString(op->func, "__name__"); +#elif PY_MAJOR_VERSION >= 3 + op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#else + op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#endif + if (unlikely(op->func_name == NULL)) + return NULL; + } + Py_INCREF(op->func_name); + return op->func_name; +} +static int +__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_name, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_qualname); + return op->func_qualname; +} +static int +__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_qualname, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_dict == NULL)) { + op->func_dict = PyDict_New(); + if (unlikely(op->func_dict == NULL)) + return NULL; + } + Py_INCREF(op->func_dict); + return op->func_dict; +} +static int +__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(value == NULL)) { + PyErr_SetString(PyExc_TypeError, + "function's dictionary may not be deleted"); + return -1; + } + if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "setting function's dictionary to a non-dict"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_dict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_globals); + return op->func_globals; +} +static PyObject * +__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(op); + CYTHON_UNUSED_VAR(context); + Py_INCREF(Py_None); + return Py_None; +} +static PyObject * +__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) +{ + PyObject* result = (op->func_code) ? op->func_code : Py_None; + CYTHON_UNUSED_VAR(context); + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { + int result = 0; + PyObject *res = op->defaults_getter((PyObject *) op); + if (unlikely(!res)) + return -1; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + op->defaults_tuple = PyTuple_GET_ITEM(res, 0); + Py_INCREF(op->defaults_tuple); + op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); + Py_INCREF(op->defaults_kwdict); + #else + op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); + if (unlikely(!op->defaults_tuple)) result = -1; + else { + op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); + if (unlikely(!op->defaults_kwdict)) result = -1; + } + #endif + Py_DECREF(res); + return result; +} +static int +__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__defaults__ must be set to a tuple object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_tuple; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_tuple; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__kwdefaults__ must be set to a dict object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_kwdict; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_kwdict; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value || value == Py_None) { + value = NULL; + } else if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__annotations__ must be set to a dict object"); + return -1; + } + Py_XINCREF(value); + __Pyx_Py_XDECREF_SET(op->func_annotations, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->func_annotations; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + result = PyDict_New(); + if (unlikely(!result)) return NULL; + op->func_annotations = result; + } + Py_INCREF(result); + return result; +} +static PyObject * +__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { + int is_coroutine; + CYTHON_UNUSED_VAR(context); + if (op->func_is_coroutine) { + return __Pyx_NewRef(op->func_is_coroutine); + } + is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; +#if PY_VERSION_HEX >= 0x03050000 + if (is_coroutine) { + PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; + fromlist = PyList_New(1); + if (unlikely(!fromlist)) return NULL; + Py_INCREF(marker); +#if CYTHON_ASSUME_SAFE_MACROS + PyList_SET_ITEM(fromlist, 0, marker); +#else + if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { + Py_DECREF(marker); + Py_DECREF(fromlist); + return NULL; + } +#endif + module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); + Py_DECREF(fromlist); + if (unlikely(!module)) goto ignore; + op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); + Py_DECREF(module); + if (likely(op->func_is_coroutine)) { + return __Pyx_NewRef(op->func_is_coroutine); + } +ignore: + PyErr_Clear(); + } +#endif + op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); + return __Pyx_NewRef(op->func_is_coroutine); +} +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject * +__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_GetAttrString(op->func, "__module__"); +} +static int +__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_SetAttrString(op->func, "__module__", value); +} +#endif +static PyGetSetDef __pyx_CyFunction_getsets[] = { + {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, + {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, + {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, + {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, +#if CYTHON_COMPILING_IN_LIMITED_API + {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, +#endif + {0, 0, 0, 0, 0} +}; +static PyMemberDef __pyx_CyFunction_members[] = { +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, +#endif +#if CYTHON_USE_TYPE_SPECS + {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, +#if CYTHON_METH_FASTCALL +#if CYTHON_BACKPORT_VECTORCALL + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, +#else +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, +#endif +#endif +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, +#else + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, +#endif +#endif + {0, 0, 0, 0, 0} +}; +static PyObject * +__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) +{ + CYTHON_UNUSED_VAR(args); +#if PY_MAJOR_VERSION >= 3 + Py_INCREF(m->func_qualname); + return m->func_qualname; +#else + return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); +#endif +} +static PyMethodDef __pyx_CyFunction_methods[] = { + {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, + {0, 0, 0, 0} +}; +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) +#else +#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) +#endif +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { +#if !CYTHON_COMPILING_IN_LIMITED_API + PyCFunctionObject *cf = (PyCFunctionObject*) op; +#endif + if (unlikely(op == NULL)) + return NULL; +#if CYTHON_COMPILING_IN_LIMITED_API + op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); + if (unlikely(!op->func)) return NULL; +#endif + op->flags = flags; + __Pyx_CyFunction_weakreflist(op) = NULL; +#if !CYTHON_COMPILING_IN_LIMITED_API + cf->m_ml = ml; + cf->m_self = (PyObject *) op; +#endif + Py_XINCREF(closure); + op->func_closure = closure; +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_XINCREF(module); + cf->m_module = module; +#endif + op->func_dict = NULL; + op->func_name = NULL; + Py_INCREF(qualname); + op->func_qualname = qualname; + op->func_doc = NULL; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + op->func_classobj = NULL; +#else + ((PyCMethodObject*)op)->mm_class = NULL; +#endif + op->func_globals = globals; + Py_INCREF(op->func_globals); + Py_XINCREF(code); + op->func_code = code; + op->defaults_pyobjects = 0; + op->defaults_size = 0; + op->defaults = NULL; + op->defaults_tuple = NULL; + op->defaults_kwdict = NULL; + op->defaults_getter = NULL; + op->func_annotations = NULL; + op->func_is_coroutine = NULL; +#if CYTHON_METH_FASTCALL + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { + case METH_NOARGS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; + break; + case METH_O: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; + break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; + break; + case METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; + break; + case METH_VARARGS | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = NULL; + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + Py_DECREF(op); + return NULL; + } +#endif + return (PyObject *) op; +} +static int +__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) +{ + Py_CLEAR(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_CLEAR(m->func); +#else + Py_CLEAR(((PyCFunctionObject*)m)->m_module); +#endif + Py_CLEAR(m->func_dict); + Py_CLEAR(m->func_name); + Py_CLEAR(m->func_qualname); + Py_CLEAR(m->func_doc); + Py_CLEAR(m->func_globals); + Py_CLEAR(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API +#if PY_VERSION_HEX < 0x030900B1 + Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); +#else + { + PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; + ((PyCMethodObject *) (m))->mm_class = NULL; + Py_XDECREF(cls); + } +#endif +#endif + Py_CLEAR(m->defaults_tuple); + Py_CLEAR(m->defaults_kwdict); + Py_CLEAR(m->func_annotations); + Py_CLEAR(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_XDECREF(pydefaults[i]); + PyObject_Free(m->defaults); + m->defaults = NULL; + } + return 0; +} +static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + if (__Pyx_CyFunction_weakreflist(m) != NULL) + PyObject_ClearWeakRefs((PyObject *) m); + __Pyx_CyFunction_clear(m); + __Pyx_PyHeapTypeObject_GC_Del(m); +} +static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + PyObject_GC_UnTrack(m); + __Pyx__CyFunction_dealloc(m); +} +static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) +{ + Py_VISIT(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(m->func); +#else + Py_VISIT(((PyCFunctionObject*)m)->m_module); +#endif + Py_VISIT(m->func_dict); + Py_VISIT(m->func_name); + Py_VISIT(m->func_qualname); + Py_VISIT(m->func_doc); + Py_VISIT(m->func_globals); + Py_VISIT(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); +#endif + Py_VISIT(m->defaults_tuple); + Py_VISIT(m->defaults_kwdict); + Py_VISIT(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_VISIT(pydefaults[i]); + } + return 0; +} +static PyObject* +__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) +{ +#if PY_MAJOR_VERSION >= 3 + return PyUnicode_FromFormat("", + op->func_qualname, (void *)op); +#else + return PyString_FromFormat("", + PyString_AsString(op->func_qualname), (void *)op); +#endif +} +static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *f = ((__pyx_CyFunctionObject*)func)->func; + PyObject *py_name = NULL; + PyCFunction meth; + int flags; + meth = PyCFunction_GetFunction(f); + if (unlikely(!meth)) return NULL; + flags = PyCFunction_GetFlags(f); + if (unlikely(flags < 0)) return NULL; +#else + PyCFunctionObject* f = (PyCFunctionObject*)func; + PyCFunction meth = f->m_ml->ml_meth; + int flags = f->m_ml->ml_flags; +#endif + Py_ssize_t size; + switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { + case METH_VARARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) + return (*meth)(self, arg); + break; + case METH_VARARGS | METH_KEYWORDS: + return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); + case METH_NOARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 0)) + return (*meth)(self, NULL); +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + case METH_O: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 1)) { + PyObject *result, *arg0; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + arg0 = PyTuple_GET_ITEM(arg, 0); + #else + arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; + #endif + result = (*meth)(self, arg0); + #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(arg0); + #endif + return result; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + return NULL; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", + py_name); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", + f->m_ml->ml_name); +#endif + return NULL; +} +static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *self, *result; +#if CYTHON_COMPILING_IN_LIMITED_API + self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); + if (unlikely(!self) && PyErr_Occurred()) return NULL; +#else + self = ((PyCFunctionObject*)func)->m_self; +#endif + result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); + return result; +} +static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { + PyObject *result; + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; +#if CYTHON_METH_FASTCALL + __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); + if (vc) { +#if CYTHON_ASSUME_SAFE_MACROS + return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); +#else + (void) &__Pyx_PyVectorcall_FastCallDict; + return PyVectorcall_Call(func, args, kw); +#endif + } +#endif + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + Py_ssize_t argc; + PyObject *new_args; + PyObject *self; +#if CYTHON_ASSUME_SAFE_MACROS + argc = PyTuple_GET_SIZE(args); +#else + argc = PyTuple_Size(args); + if (unlikely(!argc) < 0) return NULL; +#endif + new_args = PyTuple_GetSlice(args, 1, argc); + if (unlikely(!new_args)) + return NULL; + self = PyTuple_GetItem(args, 0); + if (unlikely(!self)) { + Py_DECREF(new_args); +#if PY_MAJOR_VERSION > 2 + PyErr_Format(PyExc_TypeError, + "unbound method %.200S() needs an argument", + cyfunc->func_qualname); +#else + PyErr_SetString(PyExc_TypeError, + "unbound method needs an argument"); +#endif + return NULL; + } + result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); + Py_DECREF(new_args); + } else { + result = __Pyx_CyFunction_Call(func, args, kw); + } + return result; +} +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) +{ + int ret = 0; + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + if (unlikely(nargs < 1)) { + PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", + ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + ret = 1; + } + if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + return ret; +} +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 0)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, NULL); +} +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 1)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, args[0]); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; + PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); +} +#endif +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_CyFunctionType_slots[] = { + {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, + {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, + {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, + {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, + {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, + {Py_tp_methods, (void *)__pyx_CyFunction_methods}, + {Py_tp_members, (void *)__pyx_CyFunction_members}, + {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, + {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, + {0, 0}, +}; +static PyType_Spec __pyx_CyFunctionType_spec = { + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + __pyx_CyFunctionType_slots +}; +#else +static PyTypeObject __pyx_CyFunctionType_type = { + PyVarObject_HEAD_INIT(0, 0) + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, + (destructor) __Pyx_CyFunction_dealloc, +#if !CYTHON_METH_FASTCALL + 0, +#elif CYTHON_BACKPORT_VECTORCALL + (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), +#else + offsetof(PyCFunctionObject, vectorcall), +#endif + 0, + 0, +#if PY_MAJOR_VERSION < 3 + 0, +#else + 0, +#endif + (reprfunc) __Pyx_CyFunction_repr, + 0, + 0, + 0, + 0, + __Pyx_CyFunction_CallAsMethod, + 0, + 0, + 0, + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + 0, + (traverseproc) __Pyx_CyFunction_traverse, + (inquiry) __Pyx_CyFunction_clear, + 0, +#if PY_VERSION_HEX < 0x030500A0 + offsetof(__pyx_CyFunctionObject, func_weakreflist), +#else + offsetof(PyCFunctionObject, m_weakreflist), +#endif + 0, + 0, + __pyx_CyFunction_methods, + __pyx_CyFunction_members, + __pyx_CyFunction_getsets, + 0, + 0, + __Pyx_PyMethod_New, + 0, + offsetof(__pyx_CyFunctionObject, func_dict), + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +#if PY_VERSION_HEX >= 0x030400a1 + 0, +#endif +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, +#endif +#if __PYX_NEED_TP_PRINT_SLOT + 0, +#endif +#if PY_VERSION_HEX >= 0x030C0000 + 0, +#endif +#if PY_VERSION_HEX >= 0x030d00A4 + 0, +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, +#endif +}; +#endif +static int __pyx_CyFunction_init(PyObject *module) { +#if CYTHON_USE_TYPE_SPECS + __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); +#else + CYTHON_UNUSED_VAR(module); + __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); +#endif + if (unlikely(__pyx_CyFunctionType == NULL)) { + return -1; + } + return 0; +} +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults = PyObject_Malloc(size); + if (unlikely(!m->defaults)) + return PyErr_NoMemory(); + memset(m->defaults, 0, size); + m->defaults_pyobjects = pyobjects; + m->defaults_size = size; + return m->defaults; +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_tuple = tuple; + Py_INCREF(tuple); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_kwdict = dict; + Py_INCREF(dict); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->func_annotations = dict; + Py_INCREF(dict); +} + +/* CythonFunction */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + PyObject *op = __Pyx_CyFunction_Init( + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + PyObject_GC_Track(op); + } + return op; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + CYTHON_MAYBE_UNUSED_VAR(tstate); + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} +#endif + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, + PyObject *firstlineno, PyObject *name) { + PyObject *replace = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; + replace = PyObject_GetAttrString(code, "replace"); + if (likely(replace)) { + PyObject *result; + result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); + Py_DECREF(replace); + return result; + } + PyErr_Clear(); + #if __PYX_LIMITED_VERSION_HEX < 0x030780000 + { + PyObject *compiled = NULL, *result = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; + compiled = Py_CompileString( + "out = type(code)(\n" + " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" + " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" + " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" + " code.co_lnotab)\n", "", Py_file_input); + if (!compiled) return NULL; + result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); + Py_DECREF(compiled); + if (!result) PyErr_Print(); + Py_DECREF(result); + result = PyDict_GetItemString(scratch_dict, "out"); + if (result) Py_INCREF(result); + return result; + } + #else + return NULL; + #endif +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; + PyObject *replace = NULL, *getframe = NULL, *frame = NULL; + PyObject *exc_type, *exc_value, *exc_traceback; + int success = 0; + if (c_line) { + (void) __pyx_cfilenm; + (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); + } + PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); + code_object = Py_CompileString("_getframe()", filename, Py_eval_input); + if (unlikely(!code_object)) goto bad; + py_py_line = PyLong_FromLong(py_line); + if (unlikely(!py_py_line)) goto bad; + py_funcname = PyUnicode_FromString(funcname); + if (unlikely(!py_funcname)) goto bad; + dict = PyDict_New(); + if (unlikely(!dict)) goto bad; + { + PyObject *old_code_object = code_object; + code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); + Py_DECREF(old_code_object); + } + if (unlikely(!code_object)) goto bad; + getframe = PySys_GetObject("_getframe"); + if (unlikely(!getframe)) goto bad; + if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; + frame = PyEval_EvalCode(code_object, dict, dict); + if (unlikely(!frame) || frame == Py_None) goto bad; + success = 1; + bad: + PyErr_Restore(exc_type, exc_value, exc_traceback); + Py_XDECREF(code_object); + Py_XDECREF(py_py_line); + Py_XDECREF(py_funcname); + Py_XDECREF(dict); + Py_XDECREF(replace); + if (success) { + PyTraceBack_Here( + (struct _frame*)frame); + } + Py_XDECREF(frame); +} +#else +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject *ptype, *pvalue, *ptraceback; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) { + /* If the code object creation fails, then we should clear the + fetched exception references and propagate the new exception */ + Py_XDECREF(ptype); + Py_XDECREF(pvalue); + Py_XDECREF(ptraceback); + goto bad; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} +#endif + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(long) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(long) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(long) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + long val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (long) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (long) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (long) -1; + } else { + stepval = v; + } + v = NULL; + val = (long) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((long) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((long) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (long) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* FormatTypeName */ +#if CYTHON_COMPILING_IN_LIMITED_API +static __Pyx_TypeName +__Pyx_PyType_GetName(PyTypeObject* tp) +{ + PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, + __pyx_n_s_name); + if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { + PyErr_Clear(); + Py_XDECREF(name); + name = __Pyx_NewRef(__pyx_n_s__19); + } + return name; +} +#endif + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(int) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(int) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(int) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + int val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (int) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (int) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (int) -1; + } else { + stepval = v; + } + v = NULL; + val = (int) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((int) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((int) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (int) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (cls == a || cls == b) return 1; + mro = cls->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + PyObject *base = PyTuple_GET_ITEM(mro, i); + if (base == (PyObject *)a || base == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + if (exc_type1) { + return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); + } else { + return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030B00A4 + return Py_Version & ~0xFFUL; +#else + const char* rt_version = Py_GetVersion(); + unsigned long version = 0; + unsigned long factor = 0x01000000UL; + unsigned int digit = 0; + int i = 0; + while (factor) { + while ('0' <= rt_version[i] && rt_version[i] <= '9') { + digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); + ++i; + } + version += factor * digit; + if (rt_version[i] != '.') + break; + digit = 0; + factor >>= 8; + ++i; + } + return version; +#endif +} +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { + const unsigned long MAJOR_MINOR = 0xFFFF0000UL; + if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) + return 0; + if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) + return 1; + { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compile time Python version %d.%d " + "of module '%.100s' " + "%s " + "runtime version %d.%d", + (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), + __Pyx_MODULE_NAME, + (allow_newer) ? "was newer than" : "does not match", + (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) + ); + return PyErr_WarnEx(NULL, message, 1); + } +} + +/* InitStrings */ +#if PY_MAJOR_VERSION >= 3 +static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { + if (t.is_unicode | t.is_str) { + if (t.intern) { + *str = PyUnicode_InternFromString(t.s); + } else if (t.encoding) { + *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); + } else { + *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); + } + } else { + *str = PyBytes_FromStringAndSize(t.s, t.n - 1); + } + if (!*str) + return -1; + if (PyObject_Hash(*str) == -1) + return -1; + return 0; +} +#endif +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION >= 3 + __Pyx_InitString(*t, t->p); + #else + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + #endif + ++t; + } + return 0; +} + +#include +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { + size_t len = strlen(s); + if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { + PyErr_SetString(PyExc_OverflowError, "byte string is too long"); + return -1; + } + return (Py_ssize_t) len; +} +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return __Pyx_PyUnicode_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return PyByteArray_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { + __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " + "The ability to return an instance of a strict subclass of int is deprecated, " + "and may be removed in a future version of Python.", + result_type_name)) { + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; + } + __Pyx_DECREF_TypeName(result_type_name); + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", + type_name, type_name, result_type_name); + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(__Pyx_PyLong_IsCompact(b))) { + return __Pyx_PyLong_CompactValue(b); + } else { + const digit* digits = __Pyx_PyLong_Digits(b); + const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +/* #### Code section: utility_code_pragmas_end ### */ +#ifdef _MSC_VER +#pragma warning( pop ) +#endif + + + +/* #### Code section: end ### */ +#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/engine/_row_cy.c b/lib/sqlalchemy/engine/_row_cy.c new file mode 100644 index 00000000000..1690f68a817 --- /dev/null +++ b/lib/sqlalchemy/engine/_row_cy.c @@ -0,0 +1,11171 @@ +/* Generated by Cython 3.0.11 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "name": "sqlalchemy.engine._row_cy", + "sources": [ + "lib/sqlalchemy/engine/_row_cy.py" + ] + }, + "module_name": "sqlalchemy.engine._row_cy" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#if defined(CYTHON_LIMITED_API) && 0 + #ifndef Py_LIMITED_API + #if CYTHON_LIMITED_API+0 > 0x03030000 + #define Py_LIMITED_API CYTHON_LIMITED_API + #else + #define Py_LIMITED_API 0x03030000 + #endif + #endif +#endif + +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.7+ or Python 3.3+. +#else +#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API +#define __PYX_EXTRA_ABI_MODULE_NAME "limited" +#else +#define __PYX_EXTRA_ABI_MODULE_NAME "" +#endif +#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME +#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI +#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." +#define CYTHON_HEX_VERSION 0x03000BF0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #define HAVE_LONG_LONG +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX +#if defined(GRAALVM_PYTHON) + /* For very preliminary testing purposes. Most variables are set the same as PyPy. + The existence of this section does not imply that anything works or is even tested */ + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 1 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(PYPY_VERSION) + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #if PY_VERSION_HEX < 0x03090000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(CYTHON_LIMITED_API) + #ifdef Py_LIMITED_API + #undef __PYX_LIMITED_VERSION_HEX + #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API + #endif + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 1 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_CLINE_IN_TRACEBACK + #define CYTHON_CLINE_IN_TRACEBACK 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 1 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #endif + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 1 + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #ifndef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) + #endif + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #if PY_VERSION_HEX < 0x030400a1 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #elif !defined(CYTHON_USE_TP_FINALIZE) + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #if PY_VERSION_HEX < 0x030600B1 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #elif !defined(CYTHON_USE_DICT_VERSIONS) + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) + #endif + #if PY_VERSION_HEX < 0x030700A3 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK 1 + #endif + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if !defined(CYTHON_VECTORCALL) +#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) +#endif +#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(maybe_unused) + #define CYTHON_UNUSED [[maybe_unused]] + #endif + #endif + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR + #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_USE_CPP_STD_MOVE + #if defined(__cplusplus) && (\ + __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) + #define CYTHON_USE_CPP_STD_MOVE 1 + #else + #define CYTHON_USE_CPP_STD_MOVE 0 + #endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + #endif + #endif + #if _MSC_VER < 1300 + #ifdef _WIN64 + typedef unsigned long long __pyx_uintptr_t; + #else + typedef unsigned int __pyx_uintptr_t; + #endif + #else + #ifdef _WIN64 + typedef unsigned __int64 __pyx_uintptr_t; + #else + typedef unsigned __int32 __pyx_uintptr_t; + #endif + #endif +#else + #include + typedef uintptr_t __pyx_uintptr_t; +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif +#ifdef __cplusplus + template + struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; + #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) +#else + #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) +#endif +#if CYTHON_COMPILING_IN_PYPY == 1 + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) +#else + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) +#endif +#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_DefaultClassType PyClass_Type + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if CYTHON_COMPILING_IN_LIMITED_API + static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *exception_table = NULL; + PyObject *types_module=NULL, *code_type=NULL, *result=NULL; + #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 + PyObject *version_info; + PyObject *py_minor_version = NULL; + #endif + long minor_version = 0; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 + minor_version = 11; + #else + if (!(version_info = PySys_GetObject("version_info"))) goto end; + if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; + minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); + if (minor_version == -1 && PyErr_Occurred()) goto end; + #endif + if (!(types_module = PyImport_ImportModule("types"))) goto end; + if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; + if (minor_version <= 7) { + (void)p; + result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else if (minor_version <= 10) { + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else { + if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); + } + end: + Py_XDECREF(code_type); + Py_XDECREF(exception_table); + Py_XDECREF(types_module); + if (type) { + PyErr_Restore(type, value, traceback); + } + return result; + } + #ifndef CO_OPTIMIZED + #define CO_OPTIMIZED 0x0001 + #endif + #ifndef CO_NEWLOCALS + #define CO_NEWLOCALS 0x0002 + #endif + #ifndef CO_VARARGS + #define CO_VARARGS 0x0004 + #endif + #ifndef CO_VARKEYWORDS + #define CO_VARKEYWORDS 0x0008 + #endif + #ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x0200 + #endif + #ifndef CO_GENERATOR + #define CO_GENERATOR 0x0020 + #endif + #ifndef CO_COROUTINE + #define CO_COROUTINE 0x0080 + #endif +#elif PY_VERSION_HEX >= 0x030B0000 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyCodeObject *result; + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); + if (!empty_bytes) return NULL; + result = + #if PY_VERSION_HEX >= 0x030C0000 + PyUnstable_Code_NewWithPosOnlyArgs + #else + PyCode_NewWithPosOnlyArgs + #endif + (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); + Py_DECREF(empty_bytes); + return result; + } +#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#endif +#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) + #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) +#else + #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) + #define __Pyx_Py_Is(x, y) Py_Is(x, y) +#else + #define __Pyx_Py_Is(x, y) ((x) == (y)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) + #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) +#else + #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) + #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) +#else + #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) + #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) +#else + #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) +#endif +#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) +#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) +#else + #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) +#endif +#ifndef CO_COROUTINE + #define CO_COROUTINE 0x80 +#endif +#ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x200 +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef Py_TPFLAGS_SEQUENCE + #define Py_TPFLAGS_SEQUENCE 0 +#endif +#ifndef Py_TPFLAGS_MAPPING + #define Py_TPFLAGS_MAPPING 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif +#endif +#if CYTHON_METH_FASTCALL + #define __Pyx_METH_FASTCALL METH_FASTCALL + #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast + #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords +#else + #define __Pyx_METH_FASTCALL METH_VARARGS + #define __Pyx_PyCFunction_FastCall PyCFunction + #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords +#endif +#if CYTHON_VECTORCALL + #define __pyx_vectorcallfunc vectorcallfunc + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET + #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) +#elif CYTHON_BACKPORT_VECTORCALL + typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames); + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) +#else + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) +#endif +#if PY_MAJOR_VERSION >= 0x030900B1 +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) +#else +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) +#endif +#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) +#elif !CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) +#endif +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) +static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { + return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; +} +#endif +static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { +#if CYTHON_COMPILING_IN_LIMITED_API + return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; +#else + return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +#endif +} +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) +#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) + typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); +#else + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) + #define __Pyx_PyCMethod PyCMethod +#endif +#ifndef METH_METHOD + #define METH_METHOD 0x200 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyThreadState_Current PyThreadState_Get() +#elif !CYTHON_FAST_THREAD_STATE + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) +{ + void *result; + result = PyModule_GetState(op); + if (!result) + Py_FatalError("Couldn't find the module state"); + return result; +} +#endif +#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) +#else + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if PY_MAJOR_VERSION < 3 + #if CYTHON_COMPILING_IN_PYPY + #if PYPY_VERSION_NUM < 0x07030600 + #if defined(__cplusplus) && __cplusplus >= 201402L + [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] + #elif defined(__GNUC__) || defined(__clang__) + __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) + #elif defined(_MSC_VER) + __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) + #endif + static CYTHON_INLINE int PyGILState_Check(void) { + return 0; + } + #else // PYPY_VERSION_NUM < 0x07030600 + #endif // PYPY_VERSION_NUM < 0x07030600 + #else + static CYTHON_INLINE int PyGILState_Check(void) { + PyThreadState * tstate = _PyThreadState_Current; + return tstate && (tstate == PyGILState_GetThisThreadState()); + } + #endif +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { + PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); + if (res == NULL) PyErr_Clear(); + return res; +} +#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) +#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#else +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { +#if CYTHON_COMPILING_IN_PYPY + return PyDict_GetItem(dict, name); +#else + PyDictEntry *ep; + PyDictObject *mp = (PyDictObject*) dict; + long hash = ((PyStringObject *) name)->ob_shash; + assert(hash != -1); + ep = (mp->ma_lookup)(mp, name, hash); + if (ep == NULL) { + return NULL; + } + return ep->me_value; +#endif +} +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#endif +#if CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) + #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) + #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) +#else + #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) + #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) + #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) +#else + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) +#endif +#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 +#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ + assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ + PyObject_GC_Del(obj);\ + Py_DECREF(type);\ +} +#else +#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) + #define __Pyx_PyUnicode_DATA(u) ((void*)u) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) +#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_READY(op) (0) + #else + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #else + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #if !defined(PyUnicode_DecodeUnicodeEscape) + #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) + #endif + #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) + #undef PyUnicode_Contains + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) + #endif + #if !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) + #endif + #if !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) + #endif +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#if CYTHON_COMPILING_IN_CPYTHON + #define __Pyx_PySequence_ListKeepNew(obj)\ + (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) +#else + #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) +#else + #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) + #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) +#endif +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) +#else + static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { + PyObject *module = PyImport_AddModule(name); + Py_XINCREF(module); + return module; + } +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define __Pyx_Py3Int_Check(op) PyLong_Check(op) + #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#else + #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) + #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) + #if !defined(_USE_MATH_DEFINES) + #define _USE_MATH_DEFINES + #endif +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifdef CYTHON_EXTERN_C + #undef __PYX_EXTERN_C + #define __PYX_EXTERN_C CYTHON_EXTERN_C +#elif defined(__PYX_EXTERN_C) + #ifdef _MSC_VER + #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") + #else + #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. + #endif +#else + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__sqlalchemy__engine___row_cy +#define __PYX_HAVE_API__sqlalchemy__engine___row_cy +/* Early includes */ +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_VERSION_HEX >= 0x030C00A7 + #ifndef _PyLong_SIGN_MASK + #define _PyLong_SIGN_MASK 3 + #endif + #ifndef _PyLong_NON_SIZE_BITS + #define _PyLong_NON_SIZE_BITS 3 + #endif + #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) + #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) + #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) + #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) + #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_SignedDigitCount(x)\ + ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) + #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) + #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) + #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) + #else + #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) + #endif + typedef Py_ssize_t __Pyx_compact_pylong; + typedef size_t __Pyx_compact_upylong; + #else + #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) + #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) + #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) + #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) + #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) + #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) + #define __Pyx_PyLong_CompactValue(x)\ + ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) + typedef sdigit __Pyx_compact_pylong; + typedef digit __Pyx_compact_upylong; + #endif + #if PY_VERSION_HEX >= 0x030C00A5 + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) + #else + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) + #endif +#endif +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +#include +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = (char) c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#include +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +#if !CYTHON_USE_MODULE_STATE +static PyObject *__pyx_m = NULL; +#endif +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm = __FILE__; +static const char *__pyx_filename; + +/* #### Code section: filename_table ### */ + +static const char *__pyx_f[] = { + "lib/sqlalchemy/engine/_row_cy.py", +}; +/* #### Code section: utility_code_proto_before_types ### */ +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + +/* #### Code section: numeric_typedefs ### */ +/* #### Code section: complex_type_declarations ### */ +/* #### Code section: type_declarations ### */ + +/*--- Type declarations ---*/ +struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow; + +/* "sqlalchemy/engine/_row_cy.py":45 + * + * @cython.cclass + * class BaseRow: # <<<<<<<<<<<<<< + * __slots__ = ("_parent", "_data", "_key_to_index") + * + */ +struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow { + PyObject_HEAD + struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_vtab; + PyObject *_parent; + PyObject *_key_to_index; + PyObject *_data; +}; + + + +struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow { + PyObject *(*_set_attrs)(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, PyObject *, PyObject *); + PyObject *(*_get_by_key_impl)(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, int); +}; +static struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_vtabptr_10sqlalchemy_6engine_7_row_cy_BaseRow; +static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, PyObject *, PyObject *); +/* #### Code section: utility_code_proto ### */ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, Py_ssize_t); + void (*DECREF)(void*, PyObject*, Py_ssize_t); + void (*GOTREF)(void*, PyObject*, Py_ssize_t); + void (*GIVEREF)(void*, PyObject*, Py_ssize_t); + void* (*SetupContext)(const char*, Py_ssize_t, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + } + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) + #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() +#endif + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContextNogil() + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_Py_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; Py_XDECREF(tmp);\ + } while (0) +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#if PY_VERSION_HEX >= 0x030C00A6 +#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) +#else +#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) +#endif +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) +#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* TupleAndListFromArray.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); +static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); +#endif + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* fastcall.proto */ +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) +#elif CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) +#else + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) +#endif +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) + #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) +#else + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) +#define __Pyx_KwValues_VARARGS(args, nargs) NULL +#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) +#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) +#if CYTHON_METH_FASTCALL + #define __Pyx_Arg_FASTCALL(args, i) args[i] + #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) + #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) + static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + #else + #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) + #endif + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) +#else + #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS + #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS + #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS + #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS + #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS + #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) + #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) +#else +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) +#endif + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, + const char* function_name); + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) do {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#if !CYTHON_VECTORCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if !CYTHON_VECTORCALL +#if PY_VERSION_HEX >= 0x03080000 + #include "frameobject.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif + #define __Pxy_PyFrame_Initialize_Offsets() + #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) +#else + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif +#endif +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectFastCall.proto */ +#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* RaiseUnexpectedTypeError.proto */ +static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* ObjectGetItem.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject *key); +#else +#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) +#endif + +/* dict_getitem_default.proto */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); + +/* UnpackUnboundCMethod.proto */ +typedef struct { + PyObject *type; + PyObject **method_name; + PyCFunction func; + PyObject *method; + int flag; +} __Pyx_CachedCFunction; + +/* CallUnboundCMethod1.proto */ +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#else +#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) +#endif + +/* CallUnboundCMethod2.proto */ +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); +#else +#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2) +#endif + +/* AssertionsEnabled.proto */ +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define __Pyx_init_assertions_enabled() (0) + #define __pyx_assertions_enabled() (1) +#elif CYTHON_COMPILING_IN_LIMITED_API || (CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030C0000) + static int __pyx_assertions_enabled_flag; + #define __pyx_assertions_enabled() (__pyx_assertions_enabled_flag) + static int __Pyx_init_assertions_enabled(void) { + PyObject *builtins, *debug, *debug_str; + int flag; + builtins = PyEval_GetBuiltins(); + if (!builtins) goto bad; + debug_str = PyUnicode_FromStringAndSize("__debug__", 9); + if (!debug_str) goto bad; + debug = PyObject_GetItem(builtins, debug_str); + Py_DECREF(debug_str); + if (!debug) goto bad; + flag = PyObject_IsTrue(debug); + Py_DECREF(debug); + if (flag == -1) goto bad; + __pyx_assertions_enabled_flag = flag; + return 0; + bad: + __pyx_assertions_enabled_flag = 1; + return -1; + } +#else + #define __Pyx_init_assertions_enabled() (0) + #define __pyx_assertions_enabled() (!Py_OptimizeFlag) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* SetItemInt.proto */ +#define __Pyx_SetItemInt(o, i, v, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_SetItemInt_Fast(o, (Py_ssize_t)i, v, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) :\ + __Pyx_SetItemInt_Generic(o, to_py_func(i), v))) +static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v); +static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v, + int is_list, int wraparound, int boundscheck); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* IncludeStructmemberH.proto */ +#include + +/* FixUpExtensionType.proto */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); +#endif + +/* PyObjectCallNoArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod0.proto */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); + +/* ValidateBasesTuple.proto */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); +#endif + +/* PyType_Ready.proto */ +CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyTypeObject* typeptr , void* vtable); + +/* GetVTable.proto */ +static void* __Pyx_GetVtable(PyTypeObject *type); + +/* MergeVTables.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_MergeVtables(PyTypeObject *type); +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* FetchSharedCythonModule.proto */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void); + +/* FetchCommonType.proto */ +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); +#else +static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); +#endif + +/* PyMethodNew.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + typesModule = PyImport_ImportModule("types"); + if (!typesModule) return NULL; + methodType = PyObject_GetAttrString(typesModule, "MethodType"); + Py_DECREF(typesModule); + if (!methodType) return NULL; + result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); + Py_DECREF(methodType); + return result; +} +#elif PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + return PyMethod_New(func, self); +} +#else + #define __Pyx_PyMethod_New PyMethod_New +#endif + +/* PyVectorcallFastCallDict.proto */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); +#endif + +/* CythonFunctionShared.proto */ +#define __Pyx_CyFunction_USED +#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 +#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 +#define __Pyx_CYFUNCTION_CCLASS 0x04 +#define __Pyx_CYFUNCTION_COROUTINE 0x08 +#define __Pyx_CyFunction_GetClosure(f)\ + (((__pyx_CyFunctionObject *) (f))->func_closure) +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_CyFunction_GetClassObj(f)\ + (((__pyx_CyFunctionObject *) (f))->func_classobj) +#else + #define __Pyx_CyFunction_GetClassObj(f)\ + ((PyObject*) ((PyCMethodObject *) (f))->mm_class) +#endif +#define __Pyx_CyFunction_SetClassObj(f, classobj)\ + __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) +#define __Pyx_CyFunction_Defaults(type, f)\ + ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) +#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ + ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) +typedef struct { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject_HEAD + PyObject *func; +#elif PY_VERSION_HEX < 0x030900B1 + PyCFunctionObject func; +#else + PyCMethodObject func; +#endif +#if CYTHON_BACKPORT_VECTORCALL + __pyx_vectorcallfunc func_vectorcall; +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_weakreflist; +#endif + PyObject *func_dict; + PyObject *func_name; + PyObject *func_qualname; + PyObject *func_doc; + PyObject *func_globals; + PyObject *func_code; + PyObject *func_closure; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_classobj; +#endif + void *defaults; + int defaults_pyobjects; + size_t defaults_size; + int flags; + PyObject *defaults_tuple; + PyObject *defaults_kwdict; + PyObject *(*defaults_getter)(PyObject *); + PyObject *func_annotations; + PyObject *func_is_coroutine; +} __pyx_CyFunctionObject; +#undef __Pyx_CyOrPyCFunction_Check +#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) +#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) +#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); +#undef __Pyx_IsSameCFunction +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, + size_t size, + int pyobjects); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, + PyObject *tuple); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, + PyObject *dict); +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, + PyObject *dict); +static int __pyx_CyFunction_init(PyObject *module); +#if CYTHON_METH_FASTCALL +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +#if CYTHON_BACKPORT_VECTORCALL +#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) +#else +#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) +#endif +#endif + +/* CythonFunction.proto */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); +#endif + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* FormatTypeName.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +typedef PyObject *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%U" +static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); +#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) +#else +typedef const char *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%.200s" +#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) +#define __Pyx_DECREF_TypeName(obj) +#endif + +/* GCCDiagnostics.proto */ +#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static unsigned long __Pyx_get_runtime_version(void); +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +/* #### Code section: module_declarations ### */ +static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_key_to_index, PyObject *__pyx_v_data); /* proto*/ +static PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__get_by_key_impl(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key, int __pyx_v_attr_err); /* proto*/ + +/* Module declarations from "cython" */ + +/* Module declarations from "sqlalchemy.engine._row_cy" */ +static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy__apply_processors(PyObject *, PyObject *); /*proto*/ +/* #### Code section: typeinfo ### */ +/* #### Code section: before_global_var ### */ +#define __Pyx_MODULE_NAME "sqlalchemy.engine._row_cy" +extern int __pyx_module_is_main_sqlalchemy__engine___row_cy; +int __pyx_module_is_main_sqlalchemy__engine___row_cy = 0; + +/* Implementation of "sqlalchemy.engine._row_cy" */ +/* #### Code section: global_var ### */ +static PyObject *__pyx_builtin_AssertionError; +static PyObject *__pyx_builtin_range; +/* #### Code section: string_decls ### */ +static const char __pyx_k_[] = "."; +static const char __pyx_k_gc[] = "gc"; +static const char __pyx_k_Any[] = "Any"; +static const char __pyx_k__15[] = "?"; +static const char __pyx_k_cls[] = "cls"; +static const char __pyx_k_get[] = "get"; +static const char __pyx_k_key[] = "key"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_obj[] = "obj"; +static const char __pyx_k_Dict[] = "Dict"; +static const char __pyx_k_List[] = "List"; +static const char __pyx_k_None[] = "None"; +static const char __pyx_k_Type[] = "Type"; +static const char __pyx_k_bool[] = "bool"; +static const char __pyx_k_data[] = "data"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_self[] = "self"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_Tuple[] = "Tuple"; +static const char __pyx_k_class[] = "__class__"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_slots[] = "__slots__"; +static const char __pyx_k_state[] = "state"; +static const char __pyx_k_data_2[] = "_data"; +static const char __pyx_k_enable[] = "enable"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_parent[] = "parent"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_result[] = "result"; +static const char __pyx_k_return[] = "return"; +static const char __pyx_k_typing[] = "typing"; +static const char __pyx_k_BaseRow[] = "BaseRow"; +static const char __pyx_k_KeyType[] = "_KeyType"; +static const char __pyx_k_disable[] = "disable"; +static const char __pyx_k_Iterator[] = "Iterator"; +static const char __pyx_k_List_Any[] = "List[Any]"; +static const char __pyx_k_Optional[] = "Optional"; +static const char __pyx_k_Sequence[] = "Sequence"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_parent_2[] = "_parent"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_Tuple_Any[] = "Tuple[Any, ...]"; +static const char __pyx_k_isenabled[] = "isenabled"; +static const char __pyx_k_processors[] = "processors"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_is_compiled[] = "_is_compiled"; +static const char __pyx_k_values_impl[] = "_values_impl"; +static const char __pyx_k_Dict_str_Any[] = "Dict[str, Any]"; +static const char __pyx_k_Type_BaseRow[] = "Type[BaseRow]"; +static const char __pyx_k_is_coroutine[] = "_is_coroutine"; +static const char __pyx_k_key_to_index[] = "key_to_index"; +static const char __pyx_k_TYPE_CHECKING[] = "TYPE_CHECKING"; +static const char __pyx_k_Tuple_Any_Any[] = "Tuple[Any, Any]"; +static const char __pyx_k_class_getitem[] = "__class_getitem__"; +static const char __pyx_k_key_not_found[] = "_key_not_found"; +static const char __pyx_k_AssertionError[] = "AssertionError"; +static const char __pyx_k_ProcessorsType[] = "_ProcessorsType"; +static const char __pyx_k_ResultMetaData[] = "ResultMetaData"; +static const char __pyx_k_key_to_index_2[] = "_key_to_index"; +static const char __pyx_k_BaseRow___reduce[] = "BaseRow.__reduce__"; +static const char __pyx_k_to_tuple_instance[] = "_to_tuple_instance"; +static const char __pyx_k_BaseRow___getstate[] = "BaseRow.__getstate__"; +static const char __pyx_k_BaseRow___setstate[] = "BaseRow.__setstate__"; +static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_BaseRow__values_impl[] = "BaseRow._values_impl"; +static const char __pyx_k_rowproxy_reconstructor[] = "rowproxy_reconstructor"; +static const char __pyx_k_get_by_key_impl_mapping[] = "_get_by_key_impl_mapping"; +static const char __pyx_k_sqlalchemy_engine__row_cy[] = "sqlalchemy.engine._row_cy"; +static const char __pyx_k_BaseRow__to_tuple_instance[] = "BaseRow._to_tuple_instance"; +static const char __pyx_k_BaseRow__get_by_key_impl_mapping[] = "BaseRow._get_by_key_impl_mapping"; +static const char __pyx_k_lib_sqlalchemy_engine__row_cy_py[] = "lib/sqlalchemy/engine/_row_cy.py"; +/* #### Code section: decls ### */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static int __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_processors, PyObject *__pyx_v_key_to_index, PyObject *__pyx_v_data); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_2__reduce__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_4__getstate__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_6__setstate__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_state); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_8_values_impl(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_10__iter__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static Py_ssize_t __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_12__len__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static Py_hash_t __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_14__hash__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_16__getitem__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_18_get_by_key_impl_mapping(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_20__getattr__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_name); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_22_to_tuple_instance(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_2rowproxy_reconstructor(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_cls, PyObject *__pyx_v_state); /* proto */ +static PyObject *__pyx_tp_new_10sqlalchemy_6engine_7_row_cy_BaseRow(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_get = {0, 0, 0, 0, 0}; +/* #### Code section: late_includes ### */ +/* #### Code section: module_state ### */ +typedef struct { + PyObject *__pyx_d; + PyObject *__pyx_b; + PyObject *__pyx_cython_runtime; + PyObject *__pyx_empty_tuple; + PyObject *__pyx_empty_bytes; + PyObject *__pyx_empty_unicode; + #ifdef __Pyx_CyFunction_USED + PyTypeObject *__pyx_CyFunctionType; + #endif + #ifdef __Pyx_FusedFunction_USED + PyTypeObject *__pyx_FusedFunctionType; + #endif + #ifdef __Pyx_Generator_USED + PyTypeObject *__pyx_GeneratorType; + #endif + #ifdef __Pyx_IterableCoroutine_USED + PyTypeObject *__pyx_IterableCoroutineType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineAwaitType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineType; + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + PyObject *__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow; + #endif + PyTypeObject *__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow; + PyObject *__pyx_kp_u_; + PyObject *__pyx_n_s_Any; + PyObject *__pyx_n_s_AssertionError; + PyObject *__pyx_n_s_BaseRow; + PyObject *__pyx_n_s_BaseRow___getstate; + PyObject *__pyx_n_s_BaseRow___reduce; + PyObject *__pyx_n_s_BaseRow___setstate; + PyObject *__pyx_n_s_BaseRow__get_by_key_impl_mapping; + PyObject *__pyx_n_s_BaseRow__to_tuple_instance; + PyObject *__pyx_n_s_BaseRow__values_impl; + PyObject *__pyx_n_s_Dict; + PyObject *__pyx_kp_s_Dict_str_Any; + PyObject *__pyx_n_s_Iterator; + PyObject *__pyx_n_s_KeyType; + PyObject *__pyx_n_s_List; + PyObject *__pyx_kp_s_List_Any; + PyObject *__pyx_n_s_None; + PyObject *__pyx_n_s_Optional; + PyObject *__pyx_n_s_ProcessorsType; + PyObject *__pyx_n_s_ResultMetaData; + PyObject *__pyx_n_s_Sequence; + PyObject *__pyx_n_s_TYPE_CHECKING; + PyObject *__pyx_n_s_Tuple; + PyObject *__pyx_kp_s_Tuple_Any; + PyObject *__pyx_kp_s_Tuple_Any_Any; + PyObject *__pyx_n_s_Type; + PyObject *__pyx_kp_s_Type_BaseRow; + PyObject *__pyx_n_s__15; + PyObject *__pyx_n_s_asyncio_coroutines; + PyObject *__pyx_n_s_bool; + PyObject *__pyx_n_s_class; + PyObject *__pyx_n_s_class_getitem; + PyObject *__pyx_n_s_cline_in_traceback; + PyObject *__pyx_n_s_cls; + PyObject *__pyx_n_s_data; + PyObject *__pyx_n_u_data_2; + PyObject *__pyx_kp_u_disable; + PyObject *__pyx_kp_u_enable; + PyObject *__pyx_kp_u_gc; + PyObject *__pyx_n_s_get; + PyObject *__pyx_n_s_get_by_key_impl_mapping; + PyObject *__pyx_n_s_getstate; + PyObject *__pyx_n_s_import; + PyObject *__pyx_n_s_is_compiled; + PyObject *__pyx_n_s_is_coroutine; + PyObject *__pyx_kp_u_isenabled; + PyObject *__pyx_n_s_key; + PyObject *__pyx_n_s_key_not_found; + PyObject *__pyx_n_s_key_to_index; + PyObject *__pyx_n_s_key_to_index_2; + PyObject *__pyx_n_u_key_to_index_2; + PyObject *__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py; + PyObject *__pyx_n_s_main; + PyObject *__pyx_n_s_name; + PyObject *__pyx_n_s_new; + PyObject *__pyx_n_s_obj; + PyObject *__pyx_n_s_parent; + PyObject *__pyx_n_u_parent_2; + PyObject *__pyx_n_s_processors; + PyObject *__pyx_n_s_pyx_vtable; + PyObject *__pyx_n_s_range; + PyObject *__pyx_n_s_reduce; + PyObject *__pyx_n_s_result; + PyObject *__pyx_n_s_return; + PyObject *__pyx_n_s_rowproxy_reconstructor; + PyObject *__pyx_n_s_self; + PyObject *__pyx_n_s_setstate; + PyObject *__pyx_n_s_slots; + PyObject *__pyx_n_s_sqlalchemy_engine__row_cy; + PyObject *__pyx_n_s_state; + PyObject *__pyx_n_s_test; + PyObject *__pyx_n_s_to_tuple_instance; + PyObject *__pyx_n_s_typing; + PyObject *__pyx_n_s_values_impl; + PyObject *__pyx_tuple__3; + PyObject *__pyx_tuple__4; + PyObject *__pyx_tuple__7; + PyObject *__pyx_tuple__10; + PyObject *__pyx_tuple__13; + PyObject *__pyx_codeobj__2; + PyObject *__pyx_codeobj__5; + PyObject *__pyx_codeobj__6; + PyObject *__pyx_codeobj__8; + PyObject *__pyx_codeobj__9; + PyObject *__pyx_codeobj__11; + PyObject *__pyx_codeobj__12; + PyObject *__pyx_codeobj__14; +} __pyx_mstate; + +#if CYTHON_USE_MODULE_STATE +#ifdef __cplusplus +namespace { + extern struct PyModuleDef __pyx_moduledef; +} /* anonymous namespace */ +#else +static struct PyModuleDef __pyx_moduledef; +#endif + +#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) + +#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) + +#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) +#else +static __pyx_mstate __pyx_mstate_global_static = +#ifdef __cplusplus + {}; +#else + {0}; +#endif +static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; +#endif +/* #### Code section: module_state_clear ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_clear(PyObject *m) { + __pyx_mstate *clear_module_state = __pyx_mstate(m); + if (!clear_module_state) return 0; + Py_CLEAR(clear_module_state->__pyx_d); + Py_CLEAR(clear_module_state->__pyx_b); + Py_CLEAR(clear_module_state->__pyx_cython_runtime); + Py_CLEAR(clear_module_state->__pyx_empty_tuple); + Py_CLEAR(clear_module_state->__pyx_empty_bytes); + Py_CLEAR(clear_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_CLEAR(clear_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); + #endif + Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow); + Py_CLEAR(clear_module_state->__pyx_kp_u_); + Py_CLEAR(clear_module_state->__pyx_n_s_Any); + Py_CLEAR(clear_module_state->__pyx_n_s_AssertionError); + Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow); + Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow___getstate); + Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow___reduce); + Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow___setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow__get_by_key_impl_mapping); + Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow__to_tuple_instance); + Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow__values_impl); + Py_CLEAR(clear_module_state->__pyx_n_s_Dict); + Py_CLEAR(clear_module_state->__pyx_kp_s_Dict_str_Any); + Py_CLEAR(clear_module_state->__pyx_n_s_Iterator); + Py_CLEAR(clear_module_state->__pyx_n_s_KeyType); + Py_CLEAR(clear_module_state->__pyx_n_s_List); + Py_CLEAR(clear_module_state->__pyx_kp_s_List_Any); + Py_CLEAR(clear_module_state->__pyx_n_s_None); + Py_CLEAR(clear_module_state->__pyx_n_s_Optional); + Py_CLEAR(clear_module_state->__pyx_n_s_ProcessorsType); + Py_CLEAR(clear_module_state->__pyx_n_s_ResultMetaData); + Py_CLEAR(clear_module_state->__pyx_n_s_Sequence); + Py_CLEAR(clear_module_state->__pyx_n_s_TYPE_CHECKING); + Py_CLEAR(clear_module_state->__pyx_n_s_Tuple); + Py_CLEAR(clear_module_state->__pyx_kp_s_Tuple_Any); + Py_CLEAR(clear_module_state->__pyx_kp_s_Tuple_Any_Any); + Py_CLEAR(clear_module_state->__pyx_n_s_Type); + Py_CLEAR(clear_module_state->__pyx_kp_s_Type_BaseRow); + Py_CLEAR(clear_module_state->__pyx_n_s__15); + Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); + Py_CLEAR(clear_module_state->__pyx_n_s_bool); + Py_CLEAR(clear_module_state->__pyx_n_s_class); + Py_CLEAR(clear_module_state->__pyx_n_s_class_getitem); + Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); + Py_CLEAR(clear_module_state->__pyx_n_s_cls); + Py_CLEAR(clear_module_state->__pyx_n_s_data); + Py_CLEAR(clear_module_state->__pyx_n_u_data_2); + Py_CLEAR(clear_module_state->__pyx_kp_u_disable); + Py_CLEAR(clear_module_state->__pyx_kp_u_enable); + Py_CLEAR(clear_module_state->__pyx_kp_u_gc); + Py_CLEAR(clear_module_state->__pyx_n_s_get); + Py_CLEAR(clear_module_state->__pyx_n_s_get_by_key_impl_mapping); + Py_CLEAR(clear_module_state->__pyx_n_s_getstate); + Py_CLEAR(clear_module_state->__pyx_n_s_import); + Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); + Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); + Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); + Py_CLEAR(clear_module_state->__pyx_n_s_key); + Py_CLEAR(clear_module_state->__pyx_n_s_key_not_found); + Py_CLEAR(clear_module_state->__pyx_n_s_key_to_index); + Py_CLEAR(clear_module_state->__pyx_n_s_key_to_index_2); + Py_CLEAR(clear_module_state->__pyx_n_u_key_to_index_2); + Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py); + Py_CLEAR(clear_module_state->__pyx_n_s_main); + Py_CLEAR(clear_module_state->__pyx_n_s_name); + Py_CLEAR(clear_module_state->__pyx_n_s_new); + Py_CLEAR(clear_module_state->__pyx_n_s_obj); + Py_CLEAR(clear_module_state->__pyx_n_s_parent); + Py_CLEAR(clear_module_state->__pyx_n_u_parent_2); + Py_CLEAR(clear_module_state->__pyx_n_s_processors); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_vtable); + Py_CLEAR(clear_module_state->__pyx_n_s_range); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce); + Py_CLEAR(clear_module_state->__pyx_n_s_result); + Py_CLEAR(clear_module_state->__pyx_n_s_return); + Py_CLEAR(clear_module_state->__pyx_n_s_rowproxy_reconstructor); + Py_CLEAR(clear_module_state->__pyx_n_s_self); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_slots); + Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_engine__row_cy); + Py_CLEAR(clear_module_state->__pyx_n_s_state); + Py_CLEAR(clear_module_state->__pyx_n_s_test); + Py_CLEAR(clear_module_state->__pyx_n_s_to_tuple_instance); + Py_CLEAR(clear_module_state->__pyx_n_s_typing); + Py_CLEAR(clear_module_state->__pyx_n_s_values_impl); + Py_CLEAR(clear_module_state->__pyx_tuple__3); + Py_CLEAR(clear_module_state->__pyx_tuple__4); + Py_CLEAR(clear_module_state->__pyx_tuple__7); + Py_CLEAR(clear_module_state->__pyx_tuple__10); + Py_CLEAR(clear_module_state->__pyx_tuple__13); + Py_CLEAR(clear_module_state->__pyx_codeobj__2); + Py_CLEAR(clear_module_state->__pyx_codeobj__5); + Py_CLEAR(clear_module_state->__pyx_codeobj__6); + Py_CLEAR(clear_module_state->__pyx_codeobj__8); + Py_CLEAR(clear_module_state->__pyx_codeobj__9); + Py_CLEAR(clear_module_state->__pyx_codeobj__11); + Py_CLEAR(clear_module_state->__pyx_codeobj__12); + Py_CLEAR(clear_module_state->__pyx_codeobj__14); + return 0; +} +#endif +/* #### Code section: module_state_traverse ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { + __pyx_mstate *traverse_module_state = __pyx_mstate(m); + if (!traverse_module_state) return 0; + Py_VISIT(traverse_module_state->__pyx_d); + Py_VISIT(traverse_module_state->__pyx_b); + Py_VISIT(traverse_module_state->__pyx_cython_runtime); + Py_VISIT(traverse_module_state->__pyx_empty_tuple); + Py_VISIT(traverse_module_state->__pyx_empty_bytes); + Py_VISIT(traverse_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_VISIT(traverse_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); + #endif + Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow); + Py_VISIT(traverse_module_state->__pyx_kp_u_); + Py_VISIT(traverse_module_state->__pyx_n_s_Any); + Py_VISIT(traverse_module_state->__pyx_n_s_AssertionError); + Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow); + Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow___getstate); + Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow___reduce); + Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow___setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow__get_by_key_impl_mapping); + Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow__to_tuple_instance); + Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow__values_impl); + Py_VISIT(traverse_module_state->__pyx_n_s_Dict); + Py_VISIT(traverse_module_state->__pyx_kp_s_Dict_str_Any); + Py_VISIT(traverse_module_state->__pyx_n_s_Iterator); + Py_VISIT(traverse_module_state->__pyx_n_s_KeyType); + Py_VISIT(traverse_module_state->__pyx_n_s_List); + Py_VISIT(traverse_module_state->__pyx_kp_s_List_Any); + Py_VISIT(traverse_module_state->__pyx_n_s_None); + Py_VISIT(traverse_module_state->__pyx_n_s_Optional); + Py_VISIT(traverse_module_state->__pyx_n_s_ProcessorsType); + Py_VISIT(traverse_module_state->__pyx_n_s_ResultMetaData); + Py_VISIT(traverse_module_state->__pyx_n_s_Sequence); + Py_VISIT(traverse_module_state->__pyx_n_s_TYPE_CHECKING); + Py_VISIT(traverse_module_state->__pyx_n_s_Tuple); + Py_VISIT(traverse_module_state->__pyx_kp_s_Tuple_Any); + Py_VISIT(traverse_module_state->__pyx_kp_s_Tuple_Any_Any); + Py_VISIT(traverse_module_state->__pyx_n_s_Type); + Py_VISIT(traverse_module_state->__pyx_kp_s_Type_BaseRow); + Py_VISIT(traverse_module_state->__pyx_n_s__15); + Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); + Py_VISIT(traverse_module_state->__pyx_n_s_bool); + Py_VISIT(traverse_module_state->__pyx_n_s_class); + Py_VISIT(traverse_module_state->__pyx_n_s_class_getitem); + Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); + Py_VISIT(traverse_module_state->__pyx_n_s_cls); + Py_VISIT(traverse_module_state->__pyx_n_s_data); + Py_VISIT(traverse_module_state->__pyx_n_u_data_2); + Py_VISIT(traverse_module_state->__pyx_kp_u_disable); + Py_VISIT(traverse_module_state->__pyx_kp_u_enable); + Py_VISIT(traverse_module_state->__pyx_kp_u_gc); + Py_VISIT(traverse_module_state->__pyx_n_s_get); + Py_VISIT(traverse_module_state->__pyx_n_s_get_by_key_impl_mapping); + Py_VISIT(traverse_module_state->__pyx_n_s_getstate); + Py_VISIT(traverse_module_state->__pyx_n_s_import); + Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); + Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); + Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); + Py_VISIT(traverse_module_state->__pyx_n_s_key); + Py_VISIT(traverse_module_state->__pyx_n_s_key_not_found); + Py_VISIT(traverse_module_state->__pyx_n_s_key_to_index); + Py_VISIT(traverse_module_state->__pyx_n_s_key_to_index_2); + Py_VISIT(traverse_module_state->__pyx_n_u_key_to_index_2); + Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py); + Py_VISIT(traverse_module_state->__pyx_n_s_main); + Py_VISIT(traverse_module_state->__pyx_n_s_name); + Py_VISIT(traverse_module_state->__pyx_n_s_new); + Py_VISIT(traverse_module_state->__pyx_n_s_obj); + Py_VISIT(traverse_module_state->__pyx_n_s_parent); + Py_VISIT(traverse_module_state->__pyx_n_u_parent_2); + Py_VISIT(traverse_module_state->__pyx_n_s_processors); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_vtable); + Py_VISIT(traverse_module_state->__pyx_n_s_range); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce); + Py_VISIT(traverse_module_state->__pyx_n_s_result); + Py_VISIT(traverse_module_state->__pyx_n_s_return); + Py_VISIT(traverse_module_state->__pyx_n_s_rowproxy_reconstructor); + Py_VISIT(traverse_module_state->__pyx_n_s_self); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_slots); + Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_engine__row_cy); + Py_VISIT(traverse_module_state->__pyx_n_s_state); + Py_VISIT(traverse_module_state->__pyx_n_s_test); + Py_VISIT(traverse_module_state->__pyx_n_s_to_tuple_instance); + Py_VISIT(traverse_module_state->__pyx_n_s_typing); + Py_VISIT(traverse_module_state->__pyx_n_s_values_impl); + Py_VISIT(traverse_module_state->__pyx_tuple__3); + Py_VISIT(traverse_module_state->__pyx_tuple__4); + Py_VISIT(traverse_module_state->__pyx_tuple__7); + Py_VISIT(traverse_module_state->__pyx_tuple__10); + Py_VISIT(traverse_module_state->__pyx_tuple__13); + Py_VISIT(traverse_module_state->__pyx_codeobj__2); + Py_VISIT(traverse_module_state->__pyx_codeobj__5); + Py_VISIT(traverse_module_state->__pyx_codeobj__6); + Py_VISIT(traverse_module_state->__pyx_codeobj__8); + Py_VISIT(traverse_module_state->__pyx_codeobj__9); + Py_VISIT(traverse_module_state->__pyx_codeobj__11); + Py_VISIT(traverse_module_state->__pyx_codeobj__12); + Py_VISIT(traverse_module_state->__pyx_codeobj__14); + return 0; +} +#endif +/* #### Code section: module_state_defines ### */ +#define __pyx_d __pyx_mstate_global->__pyx_d +#define __pyx_b __pyx_mstate_global->__pyx_b +#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime +#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple +#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes +#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode +#ifdef __Pyx_CyFunction_USED +#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType +#endif +#ifdef __Pyx_FusedFunction_USED +#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType +#endif +#ifdef __Pyx_Generator_USED +#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType +#endif +#ifdef __Pyx_IterableCoroutine_USED +#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#define __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow __pyx_mstate_global->__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow +#endif +#define __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow __pyx_mstate_global->__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow +#define __pyx_kp_u_ __pyx_mstate_global->__pyx_kp_u_ +#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any +#define __pyx_n_s_AssertionError __pyx_mstate_global->__pyx_n_s_AssertionError +#define __pyx_n_s_BaseRow __pyx_mstate_global->__pyx_n_s_BaseRow +#define __pyx_n_s_BaseRow___getstate __pyx_mstate_global->__pyx_n_s_BaseRow___getstate +#define __pyx_n_s_BaseRow___reduce __pyx_mstate_global->__pyx_n_s_BaseRow___reduce +#define __pyx_n_s_BaseRow___setstate __pyx_mstate_global->__pyx_n_s_BaseRow___setstate +#define __pyx_n_s_BaseRow__get_by_key_impl_mapping __pyx_mstate_global->__pyx_n_s_BaseRow__get_by_key_impl_mapping +#define __pyx_n_s_BaseRow__to_tuple_instance __pyx_mstate_global->__pyx_n_s_BaseRow__to_tuple_instance +#define __pyx_n_s_BaseRow__values_impl __pyx_mstate_global->__pyx_n_s_BaseRow__values_impl +#define __pyx_n_s_Dict __pyx_mstate_global->__pyx_n_s_Dict +#define __pyx_kp_s_Dict_str_Any __pyx_mstate_global->__pyx_kp_s_Dict_str_Any +#define __pyx_n_s_Iterator __pyx_mstate_global->__pyx_n_s_Iterator +#define __pyx_n_s_KeyType __pyx_mstate_global->__pyx_n_s_KeyType +#define __pyx_n_s_List __pyx_mstate_global->__pyx_n_s_List +#define __pyx_kp_s_List_Any __pyx_mstate_global->__pyx_kp_s_List_Any +#define __pyx_n_s_None __pyx_mstate_global->__pyx_n_s_None +#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional +#define __pyx_n_s_ProcessorsType __pyx_mstate_global->__pyx_n_s_ProcessorsType +#define __pyx_n_s_ResultMetaData __pyx_mstate_global->__pyx_n_s_ResultMetaData +#define __pyx_n_s_Sequence __pyx_mstate_global->__pyx_n_s_Sequence +#define __pyx_n_s_TYPE_CHECKING __pyx_mstate_global->__pyx_n_s_TYPE_CHECKING +#define __pyx_n_s_Tuple __pyx_mstate_global->__pyx_n_s_Tuple +#define __pyx_kp_s_Tuple_Any __pyx_mstate_global->__pyx_kp_s_Tuple_Any +#define __pyx_kp_s_Tuple_Any_Any __pyx_mstate_global->__pyx_kp_s_Tuple_Any_Any +#define __pyx_n_s_Type __pyx_mstate_global->__pyx_n_s_Type +#define __pyx_kp_s_Type_BaseRow __pyx_mstate_global->__pyx_kp_s_Type_BaseRow +#define __pyx_n_s__15 __pyx_mstate_global->__pyx_n_s__15 +#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines +#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool +#define __pyx_n_s_class __pyx_mstate_global->__pyx_n_s_class +#define __pyx_n_s_class_getitem __pyx_mstate_global->__pyx_n_s_class_getitem +#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback +#define __pyx_n_s_cls __pyx_mstate_global->__pyx_n_s_cls +#define __pyx_n_s_data __pyx_mstate_global->__pyx_n_s_data +#define __pyx_n_u_data_2 __pyx_mstate_global->__pyx_n_u_data_2 +#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable +#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable +#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc +#define __pyx_n_s_get __pyx_mstate_global->__pyx_n_s_get +#define __pyx_n_s_get_by_key_impl_mapping __pyx_mstate_global->__pyx_n_s_get_by_key_impl_mapping +#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate +#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import +#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled +#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine +#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled +#define __pyx_n_s_key __pyx_mstate_global->__pyx_n_s_key +#define __pyx_n_s_key_not_found __pyx_mstate_global->__pyx_n_s_key_not_found +#define __pyx_n_s_key_to_index __pyx_mstate_global->__pyx_n_s_key_to_index +#define __pyx_n_s_key_to_index_2 __pyx_mstate_global->__pyx_n_s_key_to_index_2 +#define __pyx_n_u_key_to_index_2 __pyx_mstate_global->__pyx_n_u_key_to_index_2 +#define __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py +#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main +#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name +#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new +#define __pyx_n_s_obj __pyx_mstate_global->__pyx_n_s_obj +#define __pyx_n_s_parent __pyx_mstate_global->__pyx_n_s_parent +#define __pyx_n_u_parent_2 __pyx_mstate_global->__pyx_n_u_parent_2 +#define __pyx_n_s_processors __pyx_mstate_global->__pyx_n_s_processors +#define __pyx_n_s_pyx_vtable __pyx_mstate_global->__pyx_n_s_pyx_vtable +#define __pyx_n_s_range __pyx_mstate_global->__pyx_n_s_range +#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce +#define __pyx_n_s_result __pyx_mstate_global->__pyx_n_s_result +#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return +#define __pyx_n_s_rowproxy_reconstructor __pyx_mstate_global->__pyx_n_s_rowproxy_reconstructor +#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self +#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate +#define __pyx_n_s_slots __pyx_mstate_global->__pyx_n_s_slots +#define __pyx_n_s_sqlalchemy_engine__row_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_engine__row_cy +#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state +#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test +#define __pyx_n_s_to_tuple_instance __pyx_mstate_global->__pyx_n_s_to_tuple_instance +#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing +#define __pyx_n_s_values_impl __pyx_mstate_global->__pyx_n_s_values_impl +#define __pyx_tuple__3 __pyx_mstate_global->__pyx_tuple__3 +#define __pyx_tuple__4 __pyx_mstate_global->__pyx_tuple__4 +#define __pyx_tuple__7 __pyx_mstate_global->__pyx_tuple__7 +#define __pyx_tuple__10 __pyx_mstate_global->__pyx_tuple__10 +#define __pyx_tuple__13 __pyx_mstate_global->__pyx_tuple__13 +#define __pyx_codeobj__2 __pyx_mstate_global->__pyx_codeobj__2 +#define __pyx_codeobj__5 __pyx_mstate_global->__pyx_codeobj__5 +#define __pyx_codeobj__6 __pyx_mstate_global->__pyx_codeobj__6 +#define __pyx_codeobj__8 __pyx_mstate_global->__pyx_codeobj__8 +#define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 +#define __pyx_codeobj__11 __pyx_mstate_global->__pyx_codeobj__11 +#define __pyx_codeobj__12 __pyx_mstate_global->__pyx_codeobj__12 +#define __pyx_codeobj__14 __pyx_mstate_global->__pyx_codeobj__14 +/* #### Code section: module_code ### */ + +/* "sqlalchemy/engine/_row_cy.py":36 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +PyDoc_STRVAR(__pyx_doc_10sqlalchemy_6engine_7_row_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_6engine_7_row_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_6engine_7_row_cy__is_compiled}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy__is_compiled(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled", 1); + + /* "sqlalchemy/engine/_row_cy.py":38 + * def _is_compiled() -> bool: + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":36 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":55 + * _data: Tuple[Any, ...] = cython.declare(tuple, visibility="readonly") + * + * def __init__( # <<<<<<<<<<<<<< + * self, + * parent: ResultMetaData, + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +PyDoc_STRVAR(__pyx_doc_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__, "Row objects are constructed by CursorResult objects."); +#if CYTHON_UPDATE_DESCRIPTOR_DOC +struct wrapperbase __pyx_wrapperbase_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__; +#endif +static int __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_parent = 0; + PyObject *__pyx_v_processors = 0; + PyObject *__pyx_v_key_to_index = 0; + PyObject *__pyx_v_data = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[4] = {0,0,0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_parent,&__pyx_n_s_processors,&__pyx_n_s_key_to_index,&__pyx_n_s_data,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 4: values[3] = __Pyx_Arg_VARARGS(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = __Pyx_Arg_VARARGS(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = __Pyx_Arg_VARARGS(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_parent)) != 0)) { + (void)__Pyx_Arg_NewRef_VARARGS(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 55, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_processors)) != 0)) { + (void)__Pyx_Arg_NewRef_VARARGS(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 55, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 4, 4, 1); __PYX_ERR(0, 55, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key_to_index)) != 0)) { + (void)__Pyx_Arg_NewRef_VARARGS(values[2]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 55, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 4, 4, 2); __PYX_ERR(0, 55, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_data)) != 0)) { + (void)__Pyx_Arg_NewRef_VARARGS(values[3]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 55, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 4, 4, 3); __PYX_ERR(0, 55, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 55, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 4)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + values[1] = __Pyx_Arg_VARARGS(__pyx_args, 1); + values[2] = __Pyx_Arg_VARARGS(__pyx_args, 2); + values[3] = __Pyx_Arg_VARARGS(__pyx_args, 3); + } + __pyx_v_parent = values[0]; + __pyx_v_processors = values[1]; + __pyx_v_key_to_index = ((PyObject*)values[2]); + __pyx_v_data = values[3]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 4, 4, __pyx_nargs); __PYX_ERR(0, 55, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_key_to_index), (&PyDict_Type), 0, "key_to_index", 1))) __PYX_ERR(0, 59, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), __pyx_v_parent, __pyx_v_processors, __pyx_v_key_to_index, __pyx_v_data); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_processors, PyObject *__pyx_v_key_to_index, PyObject *__pyx_v_data) { + PyObject *__pyx_v_data_tuple = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 1); + + /* "sqlalchemy/engine/_row_cy.py":66 + * data_tuple: Tuple[Any, ...] = ( + * _apply_processors(processors, data) + * if processors is not None # <<<<<<<<<<<<<< + * else tuple(data) + * ) + */ + __pyx_t_2 = (__pyx_v_processors != Py_None); + if (__pyx_t_2) { + + /* "sqlalchemy/engine/_row_cy.py":65 + * + * data_tuple: Tuple[Any, ...] = ( + * _apply_processors(processors, data) # <<<<<<<<<<<<<< + * if processors is not None + * else tuple(data) + */ + __pyx_t_3 = __pyx_f_10sqlalchemy_6engine_7_row_cy__apply_processors(__pyx_v_processors, __pyx_v_data); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __pyx_t_3; + __pyx_t_3 = 0; + } else { + + /* "sqlalchemy/engine/_row_cy.py":67 + * _apply_processors(processors, data) + * if processors is not None + * else tuple(data) # <<<<<<<<<<<<<< + * ) + * self._set_attrs(parent, key_to_index, data_tuple) + */ + __pyx_t_3 = __Pyx_PySequence_Tuple(__pyx_v_data); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __pyx_t_3; + __pyx_t_3 = 0; + } + __pyx_v_data_tuple = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":69 + * else tuple(data) + * ) + * self._set_attrs(parent, key_to_index, data_tuple) # <<<<<<<<<<<<<< + * + * @cython.cfunc + */ + __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(__pyx_v_self, __pyx_v_parent, __pyx_v_key_to_index, __pyx_v_data_tuple); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 69, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":55 + * _data: Tuple[Any, ...] = cython.declare(tuple, visibility="readonly") + * + * def __init__( # <<<<<<<<<<<<<< + * self, + * parent: ResultMetaData, + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_data_tuple); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":71 + * self._set_attrs(parent, key_to_index, data_tuple) + * + * @cython.cfunc # <<<<<<<<<<<<<< + * @cython.inline + * def _set_attrs( # type: ignore[no-untyped-def] # cython crashes + */ + +static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_key_to_index, PyObject *__pyx_v_data) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_set_attrs", 1); + + /* "sqlalchemy/engine/_row_cy.py":81 + * if cython.compiled: + * # cython does not use __setattr__ + * self._parent = parent # <<<<<<<<<<<<<< + * self._key_to_index = key_to_index + * self._data = data + */ + __Pyx_INCREF(__pyx_v_parent); + __Pyx_GIVEREF(__pyx_v_parent); + __Pyx_GOTREF(__pyx_v_self->_parent); + __Pyx_DECREF(__pyx_v_self->_parent); + __pyx_v_self->_parent = __pyx_v_parent; + + /* "sqlalchemy/engine/_row_cy.py":82 + * # cython does not use __setattr__ + * self._parent = parent + * self._key_to_index = key_to_index # <<<<<<<<<<<<<< + * self._data = data + * else: + */ + __Pyx_INCREF(__pyx_v_key_to_index); + __Pyx_GIVEREF(__pyx_v_key_to_index); + __Pyx_GOTREF(__pyx_v_self->_key_to_index); + __Pyx_DECREF(__pyx_v_self->_key_to_index); + __pyx_v_self->_key_to_index = __pyx_v_key_to_index; + + /* "sqlalchemy/engine/_row_cy.py":83 + * self._parent = parent + * self._key_to_index = key_to_index + * self._data = data # <<<<<<<<<<<<<< + * else: + * # python does, so use object.__setattr__ + */ + __Pyx_INCREF(__pyx_v_data); + __Pyx_GIVEREF(__pyx_v_data); + __Pyx_GOTREF(__pyx_v_self->_data); + __Pyx_DECREF(__pyx_v_self->_data); + __pyx_v_self->_data = __pyx_v_data; + + /* "sqlalchemy/engine/_row_cy.py":71 + * self._set_attrs(parent, key_to_index, data_tuple) + * + * @cython.cfunc # <<<<<<<<<<<<<< + * @cython.inline + * def _set_attrs( # type: ignore[no-untyped-def] # cython crashes + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":90 + * object.__setattr__(self, "_data", data) + * + * def __reduce__(self) -> Tuple[Any, Any]: # <<<<<<<<<<<<<< + * return ( + * rowproxy_reconstructor, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__ = {"__reduce__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__reduce__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_2__reduce__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_2__reduce__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + unsigned int __pyx_t_6; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce__", 1); + + /* "sqlalchemy/engine/_row_cy.py":91 + * + * def __reduce__(self) -> Tuple[Any, Any]: + * return ( # <<<<<<<<<<<<<< + * rowproxy_reconstructor, + * (self.__class__, self.__getstate__()), + */ + __Pyx_XDECREF(__pyx_r); + + /* "sqlalchemy/engine/_row_cy.py":92 + * def __reduce__(self) -> Tuple[Any, Any]: + * return ( + * rowproxy_reconstructor, # <<<<<<<<<<<<<< + * (self.__class__, self.__getstate__()), + * ) + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_rowproxy_reconstructor); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "sqlalchemy/engine/_row_cy.py":93 + * return ( + * rowproxy_reconstructor, + * (self.__class__, self.__getstate__()), # <<<<<<<<<<<<<< + * ) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_getstate); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_5, NULL}; + __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_6, 0+__pyx_t_6); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_2); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2)) __PYX_ERR(0, 93, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_3); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3)) __PYX_ERR(0, 93, __pyx_L1_error); + __pyx_t_2 = 0; + __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":92 + * def __reduce__(self) -> Tuple[Any, Any]: + * return ( + * rowproxy_reconstructor, # <<<<<<<<<<<<<< + * (self.__class__, self.__getstate__()), + * ) + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1)) __PYX_ERR(0, 92, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_4); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4)) __PYX_ERR(0, 92, __pyx_L1_error); + __pyx_t_1 = 0; + __pyx_t_4 = 0; + __pyx_r = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":90 + * object.__setattr__(self, "_data", data) + * + * def __reduce__(self) -> Tuple[Any, Any]: # <<<<<<<<<<<<<< + * return ( + * rowproxy_reconstructor, + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__reduce__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":96 + * ) + * + * def __getstate__(self) -> Dict[str, Any]: # <<<<<<<<<<<<<< + * return {"_parent": self._parent, "_data": self._data} + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__ = {"__getstate__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getstate__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__getstate__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__getstate__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_4__getstate__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_4__getstate__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getstate__", 1); + + /* "sqlalchemy/engine/_row_cy.py":97 + * + * def __getstate__(self) -> Dict[str, Any]: + * return {"_parent": self._parent, "_data": self._data} # <<<<<<<<<<<<<< + * + * def __setstate__(self, state: Dict[str, Any]) -> None: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 97, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_t_1, __pyx_n_u_parent_2, __pyx_v_self->_parent) < 0) __PYX_ERR(0, 97, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_1, __pyx_n_u_data_2, __pyx_v_self->_data) < 0) __PYX_ERR(0, 97, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":96 + * ) + * + * def __getstate__(self) -> Dict[str, Any]: # <<<<<<<<<<<<<< + * return {"_parent": self._parent, "_data": self._data} + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__getstate__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":99 + * return {"_parent": self._parent, "_data": self._data} + * + * def __setstate__(self, state: Dict[str, Any]) -> None: # <<<<<<<<<<<<<< + * parent = state["_parent"] + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__ = {"__setstate__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 99, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate__") < 0)) __PYX_ERR(0, 99, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_state = ((PyObject*)values[0]); + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setstate__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 99, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__setstate__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_state), (&PyDict_Type), 0, "state", 1))) __PYX_ERR(0, 99, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_6__setstate__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), __pyx_v_state); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_6__setstate__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_state) { + PyObject *__pyx_v_parent = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate__", 1); + + /* "sqlalchemy/engine/_row_cy.py":100 + * + * def __setstate__(self, state: Dict[str, Any]) -> None: + * parent = state["_parent"] # <<<<<<<<<<<<<< + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + * + */ + __pyx_t_1 = __Pyx_PyDict_GetItem(__pyx_v_state, __pyx_n_u_parent_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 100, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_parent = __pyx_t_1; + __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":101 + * def __setstate__(self, state: Dict[str, Any]) -> None: + * parent = state["_parent"] + * self._set_attrs(parent, parent._key_to_index, state["_data"]) # <<<<<<<<<<<<<< + * + * def _values_impl(self) -> List[Any]: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_parent, __pyx_n_s_key_to_index_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("dict", __pyx_t_1))) __PYX_ERR(0, 101, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyDict_GetItem(__pyx_v_state, __pyx_n_u_data_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (!(likely(PyTuple_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_t_2))) __PYX_ERR(0, 101, __pyx_L1_error) + __pyx_t_3 = __pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(__pyx_v_self, __pyx_v_parent, ((PyObject*)__pyx_t_1), ((PyObject*)__pyx_t_2)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":99 + * return {"_parent": self._parent, "_data": self._data} + * + * def __setstate__(self, state: Dict[str, Any]) -> None: # <<<<<<<<<<<<<< + * parent = state["_parent"] + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__setstate__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_parent); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":103 + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + * + * def _values_impl(self) -> List[Any]: # <<<<<<<<<<<<<< + * return list(self._data) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl = {"_values_impl", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_values_impl (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("_values_impl", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "_values_impl", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_8_values_impl(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_8_values_impl(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_values_impl", 1); + + /* "sqlalchemy/engine/_row_cy.py":104 + * + * def _values_impl(self) -> List[Any]: + * return list(self._data) # <<<<<<<<<<<<<< + * + * def __iter__(self) -> Iterator[Any]: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PySequence_List(__pyx_v_self->_data); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":103 + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + * + * def _values_impl(self) -> List[Any]: # <<<<<<<<<<<<<< + * return list(self._data) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow._values_impl", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":106 + * return list(self._data) + * + * def __iter__(self) -> Iterator[Any]: # <<<<<<<<<<<<<< + * return iter(self._data) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_11__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_11__iter__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_10__iter__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_10__iter__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__iter__", 1); + + /* "sqlalchemy/engine/_row_cy.py":107 + * + * def __iter__(self) -> Iterator[Any]: + * return iter(self._data) # <<<<<<<<<<<<<< + * + * def __len__(self) -> int: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_v_self->_data; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 107, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":106 + * return list(self._data) + * + * def __iter__(self) -> Iterator[Any]: # <<<<<<<<<<<<<< + * return iter(self._data) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":109 + * return iter(self._data) + * + * def __len__(self) -> int: # <<<<<<<<<<<<<< + * return len(self._data) + * + */ + +/* Python wrapper */ +static Py_ssize_t __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__(PyObject *__pyx_v_self); /*proto*/ +static Py_ssize_t __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_12__len__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static Py_ssize_t __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_12__len__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__len__", 1); + + /* "sqlalchemy/engine/_row_cy.py":110 + * + * def __len__(self) -> int: + * return len(self._data) # <<<<<<<<<<<<<< + * + * def __hash__(self) -> int: + */ + __pyx_t_1 = __pyx_v_self->_data; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 110, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyTuple_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":109 + * return iter(self._data) + * + * def __len__(self) -> int: # <<<<<<<<<<<<<< + * return len(self._data) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":112 + * return len(self._data) + * + * def __hash__(self) -> int: # <<<<<<<<<<<<<< + * return hash(self._data) + * + */ + +/* Python wrapper */ +static Py_hash_t __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_15__hash__(PyObject *__pyx_v_self); /*proto*/ +static Py_hash_t __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_15__hash__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + Py_hash_t __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__hash__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_14__hash__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static Py_hash_t __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_14__hash__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + Py_hash_t __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_hash_t __pyx_t_2; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__hash__", 1); + + /* "sqlalchemy/engine/_row_cy.py":113 + * + * def __hash__(self) -> int: + * return hash(self._data) # <<<<<<<<<<<<<< + * + * def __getitem__(self, key: Any) -> Any: + */ + __pyx_t_1 = __pyx_v_self->_data; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_2 = PyObject_Hash(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_hash_t)-1))) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":112 + * return len(self._data) + * + * def __hash__(self) -> int: # <<<<<<<<<<<<<< + * return hash(self._data) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__hash__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + if (unlikely(__pyx_r == -1) && !PyErr_Occurred()) __pyx_r = -2; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":115 + * return hash(self._data) + * + * def __getitem__(self, key: Any) -> Any: # <<<<<<<<<<<<<< + * return self._data[key] + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_17__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_17__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_16__getitem__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), ((PyObject *)__pyx_v_key)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_16__getitem__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getitem__", 1); + + /* "sqlalchemy/engine/_row_cy.py":116 + * + * def __getitem__(self, key: Any) -> Any: + * return self._data[key] # <<<<<<<<<<<<<< + * + * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_self->_data == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 116, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_self->_data, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":115 + * return hash(self._data) + * + * def __getitem__(self, key: Any) -> Any: # <<<<<<<<<<<<<< + * return self._data[key] + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":118 + * return self._data[key] + * + * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: # <<<<<<<<<<<<<< + * return self._get_by_key_impl(key, False) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping = {"_get_by_key_impl_mapping", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_key = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_get_by_key_impl_mapping (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 118, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_get_by_key_impl_mapping") < 0)) __PYX_ERR(0, 118, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_key = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("_get_by_key_impl_mapping", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 118, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow._get_by_key_impl_mapping", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_18_get_by_key_impl_mapping(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), __pyx_v_key); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_18_get_by_key_impl_mapping(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_get_by_key_impl_mapping", 1); + + /* "sqlalchemy/engine/_row_cy.py":119 + * + * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: + * return self._get_by_key_impl(key, False) # <<<<<<<<<<<<<< + * + * @cython.cfunc + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self->__pyx_vtab)->_get_by_key_impl(__pyx_v_self, __pyx_v_key, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 119, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":118 + * return self._data[key] + * + * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: # <<<<<<<<<<<<<< + * return self._get_by_key_impl(key, False) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow._get_by_key_impl_mapping", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":121 + * return self._get_by_key_impl(key, False) + * + * @cython.cfunc # <<<<<<<<<<<<<< + * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: + * index: Optional[int] = self._key_to_index.get(key) + */ + +static PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__get_by_key_impl(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key, int __pyx_v_attr_err) { + PyObject *__pyx_v_index = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + unsigned int __pyx_t_6; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_get_by_key_impl", 1); + + /* "sqlalchemy/engine/_row_cy.py":123 + * @cython.cfunc + * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: + * index: Optional[int] = self._key_to_index.get(key) # <<<<<<<<<<<<<< + * if index is not None: + * return self._data[index] + */ + if (unlikely(__pyx_v_self->_key_to_index == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(0, 123, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyDict_GetItemDefault(__pyx_v_self->_key_to_index, __pyx_v_key, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_1))) __PYX_ERR(0, 123, __pyx_L1_error) + __pyx_v_index = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":124 + * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: + * index: Optional[int] = self._key_to_index.get(key) + * if index is not None: # <<<<<<<<<<<<<< + * return self._data[index] + * self._parent._key_not_found(key, attr_err) + */ + __pyx_t_2 = (__pyx_v_index != ((PyObject*)Py_None)); + if (__pyx_t_2) { + + /* "sqlalchemy/engine/_row_cy.py":125 + * index: Optional[int] = self._key_to_index.get(key) + * if index is not None: + * return self._data[index] # <<<<<<<<<<<<<< + * self._parent._key_not_found(key, attr_err) + * + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_self->_data == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 125, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_self->_data, __pyx_v_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":124 + * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: + * index: Optional[int] = self._key_to_index.get(key) + * if index is not None: # <<<<<<<<<<<<<< + * return self._data[index] + * self._parent._key_not_found(key, attr_err) + */ + } + + /* "sqlalchemy/engine/_row_cy.py":126 + * if index is not None: + * return self._data[index] + * self._parent._key_not_found(key, attr_err) # <<<<<<<<<<<<<< + * + * @cython.annotation_typing(False) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_parent, __pyx_n_s_key_not_found); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_attr_err); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_6 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[3] = {__pyx_t_5, __pyx_v_key, __pyx_t_4}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_6, 2+__pyx_t_6); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":121 + * return self._get_by_key_impl(key, False) + * + * @cython.cfunc # <<<<<<<<<<<<<< + * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: + * index: Optional[int] = self._key_to_index.get(key) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow._get_by_key_impl", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_index); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":128 + * self._parent._key_not_found(key, attr_err) + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def __getattr__(self, name: str) -> Any: + * return self._get_by_key_impl(name, True) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_21__getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_name); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_21__getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_name) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getattr__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_20__getattr__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), ((PyObject *)__pyx_v_name)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_20__getattr__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_name) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getattr__", 1); + + /* "sqlalchemy/engine/_row_cy.py":130 + * @cython.annotation_typing(False) + * def __getattr__(self, name: str) -> Any: + * return self._get_by_key_impl(name, True) # <<<<<<<<<<<<<< + * + * def _to_tuple_instance(self) -> Tuple[Any, ...]: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self->__pyx_vtab)->_get_by_key_impl(__pyx_v_self, __pyx_v_name, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":128 + * self._parent._key_not_found(key, attr_err) + * + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def __getattr__(self, name: str) -> Any: + * return self._get_by_key_impl(name, True) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__getattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":132 + * return self._get_by_key_impl(name, True) + * + * def _to_tuple_instance(self) -> Tuple[Any, ...]: # <<<<<<<<<<<<<< + * return self._data + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance = {"_to_tuple_instance", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_to_tuple_instance (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("_to_tuple_instance", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "_to_tuple_instance", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_22_to_tuple_instance(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_22_to_tuple_instance(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_to_tuple_instance", 1); + + /* "sqlalchemy/engine/_row_cy.py":133 + * + * def _to_tuple_instance(self) -> Tuple[Any, ...]: + * return self._data # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_data); + __pyx_r = __pyx_v_self->_data; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":132 + * return self._get_by_key_impl(name, True) + * + * def _to_tuple_instance(self) -> Tuple[Any, ...]: # <<<<<<<<<<<<<< + * return self._data + * + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":49 + * + * if cython.compiled: + * _parent: ResultMetaData = cython.declare(object, visibility="readonly") # <<<<<<<<<<<<<< + * _key_to_index: Dict[_KeyType, int] = cython.declare( + * dict, visibility="readonly" + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent_1__get__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent___get__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 1); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_parent); + __pyx_r = __pyx_v_self->_parent; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":50 + * if cython.compiled: + * _parent: ResultMetaData = cython.declare(object, visibility="readonly") + * _key_to_index: Dict[_KeyType, int] = cython.declare( # <<<<<<<<<<<<<< + * dict, visibility="readonly" + * ) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index_1__get__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index___get__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 1); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_key_to_index); + __pyx_r = __pyx_v_self->_key_to_index; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":53 + * dict, visibility="readonly" + * ) + * _data: Tuple[Any, ...] = cython.declare(tuple, visibility="readonly") # <<<<<<<<<<<<<< + * + * def __init__( + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data_1__get__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data___get__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 1); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_data); + __pyx_r = __pyx_v_self->_data; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":136 + * + * + * @cython.inline # <<<<<<<<<<<<<< + * @cython.cfunc + * def _apply_processors( + */ + +static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy__apply_processors(PyObject *__pyx_v_proc, PyObject *__pyx_v_data) { + PyObject *__pyx_v_res = 0; + Py_ssize_t __pyx_v_proc_size; + Py_ssize_t __pyx_v_i; + PyObject *__pyx_v_p = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + Py_ssize_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + unsigned int __pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_apply_processors", 1); + + /* "sqlalchemy/engine/_row_cy.py":141 + * proc: _ProcessorsType, data: Sequence[Any] + * ) -> Tuple[Any, ...]: + * res: List[Any] = list(data) # <<<<<<<<<<<<<< + * proc_size: cython.Py_ssize_t = len(proc) + * # TODO: would be nice to do this only on the fist row + */ + __pyx_t_1 = PySequence_List(__pyx_v_data); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_res = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":142 + * ) -> Tuple[Any, ...]: + * res: List[Any] = list(data) + * proc_size: cython.Py_ssize_t = len(proc) # <<<<<<<<<<<<<< + * # TODO: would be nice to do this only on the fist row + * assert len(res) == proc_size + */ + __pyx_t_2 = PyObject_Length(__pyx_v_proc); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 142, __pyx_L1_error) + __pyx_v_proc_size = __pyx_t_2; + + /* "sqlalchemy/engine/_row_cy.py":144 + * proc_size: cython.Py_ssize_t = len(proc) + * # TODO: would be nice to do this only on the fist row + * assert len(res) == proc_size # <<<<<<<<<<<<<< + * for i in range(proc_size): + * p = proc[i] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + if (unlikely(__pyx_assertions_enabled())) { + __pyx_t_2 = __Pyx_PyList_GET_SIZE(__pyx_v_res); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 144, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 == __pyx_v_proc_size); + if (unlikely(!__pyx_t_3)) { + __Pyx_Raise(__pyx_builtin_AssertionError, 0, 0, 0); + __PYX_ERR(0, 144, __pyx_L1_error) + } + } + #else + if ((1)); else __PYX_ERR(0, 144, __pyx_L1_error) + #endif + + /* "sqlalchemy/engine/_row_cy.py":145 + * # TODO: would be nice to do this only on the fist row + * assert len(res) == proc_size + * for i in range(proc_size): # <<<<<<<<<<<<<< + * p = proc[i] + * if p is not None: + */ + __pyx_t_2 = __pyx_v_proc_size; + __pyx_t_4 = __pyx_t_2; + for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { + __pyx_v_i = __pyx_t_5; + + /* "sqlalchemy/engine/_row_cy.py":146 + * assert len(res) == proc_size + * for i in range(proc_size): + * p = proc[i] # <<<<<<<<<<<<<< + * if p is not None: + * res[i] = p(res[i]) + */ + __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_proc, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XDECREF_SET(__pyx_v_p, __pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":147 + * for i in range(proc_size): + * p = proc[i] + * if p is not None: # <<<<<<<<<<<<<< + * res[i] = p(res[i]) + * return tuple(res) + */ + __pyx_t_3 = (__pyx_v_p != Py_None); + if (__pyx_t_3) { + + /* "sqlalchemy/engine/_row_cy.py":148 + * p = proc[i] + * if p is not None: + * res[i] = p(res[i]) # <<<<<<<<<<<<<< + * return tuple(res) + * + */ + __pyx_t_6 = __Pyx_GetItemInt_List(__pyx_v_res, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(__pyx_v_p); + __pyx_t_7 = __pyx_v_p; __pyx_t_8 = NULL; + __pyx_t_9 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_9 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_8, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_7, __pyx_callargs+1-__pyx_t_9, 1+__pyx_t_9); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + if (unlikely((__Pyx_SetItemInt(__pyx_v_res, __pyx_v_i, __pyx_t_1, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1) < 0))) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":147 + * for i in range(proc_size): + * p = proc[i] + * if p is not None: # <<<<<<<<<<<<<< + * res[i] = p(res[i]) + * return tuple(res) + */ + } + } + + /* "sqlalchemy/engine/_row_cy.py":149 + * if p is not None: + * res[i] = p(res[i]) + * return tuple(res) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyList_AsTuple(__pyx_v_res); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":136 + * + * + * @cython.inline # <<<<<<<<<<<<<< + * @cython.cfunc + * def _apply_processors( + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy._apply_processors", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_res); + __Pyx_XDECREF(__pyx_v_p); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_row_cy.py":156 + * # Turn off annotation typing so the compiled version accepts the python + * # class too. + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def rowproxy_reconstructor( + * cls: Type[BaseRow], state: Dict[str, Any] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor = {"rowproxy_reconstructor", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_cls = 0; + PyObject *__pyx_v_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[2] = {0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("rowproxy_reconstructor (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_cls,&__pyx_n_s_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_cls)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 156, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 156, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("rowproxy_reconstructor", 1, 2, 2, 1); __PYX_ERR(0, 156, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "rowproxy_reconstructor") < 0)) __PYX_ERR(0, 156, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 2)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + } + __pyx_v_cls = values[0]; + __pyx_v_state = values[1]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("rowproxy_reconstructor", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 156, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.rowproxy_reconstructor", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_2rowproxy_reconstructor(__pyx_self, __pyx_v_cls, __pyx_v_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_2rowproxy_reconstructor(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_cls, PyObject *__pyx_v_state) { + PyObject *__pyx_v_obj = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("rowproxy_reconstructor", 1); + + /* "sqlalchemy/engine/_row_cy.py":160 + * cls: Type[BaseRow], state: Dict[str, Any] + * ) -> BaseRow: + * obj = cls.__new__(cls) # <<<<<<<<<<<<<< + * obj.__setstate__(state) + * return obj + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_cls, __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_cls}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __pyx_v_obj = __pyx_t_1; + __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":161 + * ) -> BaseRow: + * obj = cls.__new__(cls) + * obj.__setstate__(state) # <<<<<<<<<<<<<< + * return obj + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_obj, __pyx_n_s_setstate); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 161, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_state}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 161, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/engine/_row_cy.py":162 + * obj = cls.__new__(cls) + * obj.__setstate__(state) + * return obj # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_obj); + __pyx_r = __pyx_v_obj; + goto __pyx_L0; + + /* "sqlalchemy/engine/_row_cy.py":156 + * # Turn off annotation typing so the compiled version accepts the python + * # class too. + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def rowproxy_reconstructor( + * cls: Type[BaseRow], state: Dict[str, Any] + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.engine._row_cy.rowproxy_reconstructor", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_obj); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow __pyx_vtable_10sqlalchemy_6engine_7_row_cy_BaseRow; + +static PyObject *__pyx_tp_new_10sqlalchemy_6engine_7_row_cy_BaseRow(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *p; + PyObject *o; + #if CYTHON_COMPILING_IN_LIMITED_API + allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); + o = alloc_func(t, 0); + #else + if (likely(!__Pyx_PyType_HasFeature(t, Py_TPFLAGS_IS_ABSTRACT))) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + #endif + p = ((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)o); + p->__pyx_vtab = __pyx_vtabptr_10sqlalchemy_6engine_7_row_cy_BaseRow; + p->_parent = Py_None; Py_INCREF(Py_None); + p->_key_to_index = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_data = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o) { + struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *p = (struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely((PY_VERSION_HEX >= 0x03080000 || __Pyx_PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)) && __Pyx_PyObject_GetSlot(o, tp_finalize, destructor)) && !__Pyx_PyObject_GC_IsFinalized(o)) { + if (__Pyx_PyObject_GetSlot(o, tp_dealloc, destructor) == __pyx_tp_dealloc_10sqlalchemy_6engine_7_row_cy_BaseRow) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_parent); + Py_CLEAR(p->_key_to_index); + Py_CLEAR(p->_data); + #if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + (*Py_TYPE(o)->tp_free)(o); + #else + { + freefunc tp_free = (freefunc)PyType_GetSlot(Py_TYPE(o), Py_tp_free); + if (tp_free) tp_free(o); + } + #endif +} + +static int __pyx_tp_traverse_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *p = (struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)o; + if (p->_parent) { + e = (*v)(p->_parent, a); if (e) return e; + } + if (p->_key_to_index) { + e = (*v)(p->_key_to_index, a); if (e) return e; + } + if (p->_data) { + e = (*v)(p->_data, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *p = (struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)o; + tmp = ((PyObject*)p->_parent); + p->_parent = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_key_to_index); + p->_key_to_index = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_data); + p->_data = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} +static PyObject *__pyx_sq_item_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o, Py_ssize_t i) { + PyObject *r; + PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; + r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); + Py_DECREF(x); + return r; +} + +static PyObject *__pyx_tp_getattro_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o, PyObject *n) { + PyObject *v = __Pyx_PyObject_GenericGetAttr(o, n); + if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + v = __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_21__getattr__(o, n); + } + return v; +} + +static PyObject *__pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__parent(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent_1__get__(o); +} + +static PyObject *__pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__key_to_index(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index_1__get__(o); +} + +static PyObject *__pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__data(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data_1__get__(o); +} + +static PyMethodDef __pyx_methods_10sqlalchemy_6engine_7_row_cy_BaseRow[] = { + {"__reduce__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__getstate__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__setstate__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"_values_impl", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"_get_by_key_impl_mapping", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__getattr__", (PyCFunction)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_21__getattr__, METH_O|METH_COEXIST, 0}, + {"_to_tuple_instance", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_10sqlalchemy_6engine_7_row_cy_BaseRow[] = { + {(char *)"_parent", __pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__parent, 0, (char *)0, 0}, + {(char *)"_key_to_index", __pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__key_to_index, 0, (char *)0, 0}, + {(char *)"_data", __pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__data, 0, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_slots[] = { + {Py_tp_dealloc, (void *)__pyx_tp_dealloc_10sqlalchemy_6engine_7_row_cy_BaseRow}, + {Py_sq_length, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__}, + {Py_sq_item, (void *)__pyx_sq_item_10sqlalchemy_6engine_7_row_cy_BaseRow}, + {Py_mp_length, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__}, + {Py_mp_subscript, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_17__getitem__}, + {Py_tp_hash, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_15__hash__}, + {Py_tp_getattro, (void *)__pyx_tp_getattro_10sqlalchemy_6engine_7_row_cy_BaseRow}, + {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_6engine_7_row_cy_BaseRow}, + {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_6engine_7_row_cy_BaseRow}, + {Py_tp_iter, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_11__iter__}, + {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_6engine_7_row_cy_BaseRow}, + {Py_tp_getset, (void *)__pyx_getsets_10sqlalchemy_6engine_7_row_cy_BaseRow}, + {Py_tp_init, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_1__init__}, + {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_6engine_7_row_cy_BaseRow}, + {0, 0}, +}; +static PyType_Spec __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_spec = { + "sqlalchemy.engine._row_cy.BaseRow", + sizeof(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow), + 0, + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, + __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_slots, +}; +#else + +static PySequenceMethods __pyx_tp_as_sequence_BaseRow = { + __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__, /*sq_length*/ + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + __pyx_sq_item_10sqlalchemy_6engine_7_row_cy_BaseRow, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + 0, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyMappingMethods __pyx_tp_as_mapping_BaseRow = { + __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__, /*mp_length*/ + __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_17__getitem__, /*mp_subscript*/ + 0, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow = { + PyVarObject_HEAD_INIT(0, 0) + "sqlalchemy.engine._row_cy.""BaseRow", /*tp_name*/ + sizeof(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + &__pyx_tp_as_sequence_BaseRow, /*tp_as_sequence*/ + &__pyx_tp_as_mapping_BaseRow, /*tp_as_mapping*/ + __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_15__hash__, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + __pyx_tp_getattro_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_traverse*/ + __pyx_tp_clear_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_11__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + #if !CYTHON_USE_TYPE_SPECS + 0, /*tp_dictoffset*/ + #endif + __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + #if CYTHON_USE_TP_FINALIZE + 0, /*tp_finalize*/ + #else + NULL, /*tp_finalize*/ + #endif + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if __PYX_NEED_TP_PRINT_SLOT == 1 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030C0000 + 0, /*tp_watched*/ + #endif + #if PY_VERSION_HEX >= 0x030d00A4 + 0, /*tp_versions_used*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, /*tp_pypy_flags*/ + #endif +}; +#endif + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif +/* #### Code section: pystring_table ### */ + +static int __Pyx_CreateStringTabAndInitStrings(void) { + __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, + {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, + {&__pyx_n_s_AssertionError, __pyx_k_AssertionError, sizeof(__pyx_k_AssertionError), 0, 0, 1, 1}, + {&__pyx_n_s_BaseRow, __pyx_k_BaseRow, sizeof(__pyx_k_BaseRow), 0, 0, 1, 1}, + {&__pyx_n_s_BaseRow___getstate, __pyx_k_BaseRow___getstate, sizeof(__pyx_k_BaseRow___getstate), 0, 0, 1, 1}, + {&__pyx_n_s_BaseRow___reduce, __pyx_k_BaseRow___reduce, sizeof(__pyx_k_BaseRow___reduce), 0, 0, 1, 1}, + {&__pyx_n_s_BaseRow___setstate, __pyx_k_BaseRow___setstate, sizeof(__pyx_k_BaseRow___setstate), 0, 0, 1, 1}, + {&__pyx_n_s_BaseRow__get_by_key_impl_mapping, __pyx_k_BaseRow__get_by_key_impl_mapping, sizeof(__pyx_k_BaseRow__get_by_key_impl_mapping), 0, 0, 1, 1}, + {&__pyx_n_s_BaseRow__to_tuple_instance, __pyx_k_BaseRow__to_tuple_instance, sizeof(__pyx_k_BaseRow__to_tuple_instance), 0, 0, 1, 1}, + {&__pyx_n_s_BaseRow__values_impl, __pyx_k_BaseRow__values_impl, sizeof(__pyx_k_BaseRow__values_impl), 0, 0, 1, 1}, + {&__pyx_n_s_Dict, __pyx_k_Dict, sizeof(__pyx_k_Dict), 0, 0, 1, 1}, + {&__pyx_kp_s_Dict_str_Any, __pyx_k_Dict_str_Any, sizeof(__pyx_k_Dict_str_Any), 0, 0, 1, 0}, + {&__pyx_n_s_Iterator, __pyx_k_Iterator, sizeof(__pyx_k_Iterator), 0, 0, 1, 1}, + {&__pyx_n_s_KeyType, __pyx_k_KeyType, sizeof(__pyx_k_KeyType), 0, 0, 1, 1}, + {&__pyx_n_s_List, __pyx_k_List, sizeof(__pyx_k_List), 0, 0, 1, 1}, + {&__pyx_kp_s_List_Any, __pyx_k_List_Any, sizeof(__pyx_k_List_Any), 0, 0, 1, 0}, + {&__pyx_n_s_None, __pyx_k_None, sizeof(__pyx_k_None), 0, 0, 1, 1}, + {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, + {&__pyx_n_s_ProcessorsType, __pyx_k_ProcessorsType, sizeof(__pyx_k_ProcessorsType), 0, 0, 1, 1}, + {&__pyx_n_s_ResultMetaData, __pyx_k_ResultMetaData, sizeof(__pyx_k_ResultMetaData), 0, 0, 1, 1}, + {&__pyx_n_s_Sequence, __pyx_k_Sequence, sizeof(__pyx_k_Sequence), 0, 0, 1, 1}, + {&__pyx_n_s_TYPE_CHECKING, __pyx_k_TYPE_CHECKING, sizeof(__pyx_k_TYPE_CHECKING), 0, 0, 1, 1}, + {&__pyx_n_s_Tuple, __pyx_k_Tuple, sizeof(__pyx_k_Tuple), 0, 0, 1, 1}, + {&__pyx_kp_s_Tuple_Any, __pyx_k_Tuple_Any, sizeof(__pyx_k_Tuple_Any), 0, 0, 1, 0}, + {&__pyx_kp_s_Tuple_Any_Any, __pyx_k_Tuple_Any_Any, sizeof(__pyx_k_Tuple_Any_Any), 0, 0, 1, 0}, + {&__pyx_n_s_Type, __pyx_k_Type, sizeof(__pyx_k_Type), 0, 0, 1, 1}, + {&__pyx_kp_s_Type_BaseRow, __pyx_k_Type_BaseRow, sizeof(__pyx_k_Type_BaseRow), 0, 0, 1, 0}, + {&__pyx_n_s__15, __pyx_k__15, sizeof(__pyx_k__15), 0, 0, 1, 1}, + {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, + {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, + {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1}, + {&__pyx_n_s_class_getitem, __pyx_k_class_getitem, sizeof(__pyx_k_class_getitem), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_cls, __pyx_k_cls, sizeof(__pyx_k_cls), 0, 0, 1, 1}, + {&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1}, + {&__pyx_n_u_data_2, __pyx_k_data_2, sizeof(__pyx_k_data_2), 0, 1, 0, 1}, + {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, + {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, + {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, + {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, + {&__pyx_n_s_get_by_key_impl_mapping, __pyx_k_get_by_key_impl_mapping, sizeof(__pyx_k_get_by_key_impl_mapping), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, + {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, + {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, + {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, + {&__pyx_n_s_key_not_found, __pyx_k_key_not_found, sizeof(__pyx_k_key_not_found), 0, 0, 1, 1}, + {&__pyx_n_s_key_to_index, __pyx_k_key_to_index, sizeof(__pyx_k_key_to_index), 0, 0, 1, 1}, + {&__pyx_n_s_key_to_index_2, __pyx_k_key_to_index_2, sizeof(__pyx_k_key_to_index_2), 0, 0, 1, 1}, + {&__pyx_n_u_key_to_index_2, __pyx_k_key_to_index_2, sizeof(__pyx_k_key_to_index_2), 0, 1, 0, 1}, + {&__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_k_lib_sqlalchemy_engine__row_cy_py, sizeof(__pyx_k_lib_sqlalchemy_engine__row_cy_py), 0, 0, 1, 0}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1}, + {&__pyx_n_s_parent, __pyx_k_parent, sizeof(__pyx_k_parent), 0, 0, 1, 1}, + {&__pyx_n_u_parent_2, __pyx_k_parent_2, sizeof(__pyx_k_parent_2), 0, 1, 0, 1}, + {&__pyx_n_s_processors, __pyx_k_processors, sizeof(__pyx_k_processors), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, + {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, + {&__pyx_n_s_rowproxy_reconstructor, __pyx_k_rowproxy_reconstructor, sizeof(__pyx_k_rowproxy_reconstructor), 0, 0, 1, 1}, + {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_slots, __pyx_k_slots, sizeof(__pyx_k_slots), 0, 0, 1, 1}, + {&__pyx_n_s_sqlalchemy_engine__row_cy, __pyx_k_sqlalchemy_engine__row_cy, sizeof(__pyx_k_sqlalchemy_engine__row_cy), 0, 0, 1, 1}, + {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_to_tuple_instance, __pyx_k_to_tuple_instance, sizeof(__pyx_k_to_tuple_instance), 0, 0, 1, 1}, + {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, + {&__pyx_n_s_values_impl, __pyx_k_values_impl, sizeof(__pyx_k_values_impl), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} + }; + return __Pyx_InitStrings(__pyx_string_tab); +} +/* #### Code section: cached_builtins ### */ +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_AssertionError = __Pyx_GetBuiltinName(__pyx_n_s_AssertionError); if (!__pyx_builtin_AssertionError) __PYX_ERR(0, 144, __pyx_L1_error) + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 145, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: cached_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "sqlalchemy/engine/_row_cy.py":36 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_is_compiled, 36, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 36, __pyx_L1_error) + + /* "sqlalchemy/engine/_row_cy.py":46 + * @cython.cclass + * class BaseRow: + * __slots__ = ("_parent", "_data", "_key_to_index") # <<<<<<<<<<<<<< + * + * if cython.compiled: + */ + __pyx_tuple__3 = PyTuple_Pack(3, __pyx_n_u_parent_2, __pyx_n_u_data_2, __pyx_n_u_key_to_index_2); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "sqlalchemy/engine/_row_cy.py":90 + * object.__setattr__(self, "_data", data) + * + * def __reduce__(self) -> Tuple[Any, Any]: # <<<<<<<<<<<<<< + * return ( + * rowproxy_reconstructor, + */ + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + __pyx_codeobj__5 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_reduce, 90, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__5)) __PYX_ERR(0, 90, __pyx_L1_error) + + /* "sqlalchemy/engine/_row_cy.py":96 + * ) + * + * def __getstate__(self) -> Dict[str, Any]: # <<<<<<<<<<<<<< + * return {"_parent": self._parent, "_data": self._data} + * + */ + __pyx_codeobj__6 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_getstate, 96, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__6)) __PYX_ERR(0, 96, __pyx_L1_error) + + /* "sqlalchemy/engine/_row_cy.py":99 + * return {"_parent": self._parent, "_data": self._data} + * + * def __setstate__(self, state: Dict[str, Any]) -> None: # <<<<<<<<<<<<<< + * parent = state["_parent"] + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + */ + __pyx_tuple__7 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_parent); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__7, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_setstate, 99, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(0, 99, __pyx_L1_error) + + /* "sqlalchemy/engine/_row_cy.py":103 + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + * + * def _values_impl(self) -> List[Any]: # <<<<<<<<<<<<<< + * return list(self._data) + * + */ + __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_values_impl, 103, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 103, __pyx_L1_error) + + /* "sqlalchemy/engine/_row_cy.py":118 + * return self._data[key] + * + * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: # <<<<<<<<<<<<<< + * return self._get_by_key_impl(key, False) + * + */ + __pyx_tuple__10 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_key); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 118, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); + __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__10, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_get_by_key_impl_mapping, 118, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 118, __pyx_L1_error) + + /* "sqlalchemy/engine/_row_cy.py":132 + * return self._get_by_key_impl(name, True) + * + * def _to_tuple_instance(self) -> Tuple[Any, ...]: # <<<<<<<<<<<<<< + * return self._data + * + */ + __pyx_codeobj__12 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_to_tuple_instance, 132, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__12)) __PYX_ERR(0, 132, __pyx_L1_error) + + /* "sqlalchemy/engine/_row_cy.py":156 + * # Turn off annotation typing so the compiled version accepts the python + * # class too. + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def rowproxy_reconstructor( + * cls: Type[BaseRow], state: Dict[str, Any] + */ + __pyx_tuple__13 = PyTuple_Pack(3, __pyx_n_s_cls, __pyx_n_s_state, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_rowproxy_reconstructor, 156, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} +/* #### Code section: init_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { + __pyx_umethod_PyDict_Type_get.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyDict_Type_get.method_name = &__pyx_n_s_get; + if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: init_globals ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + /* AssertionsEnabled.init */ + if (likely(__Pyx_init_assertions_enabled() == 0)); else + +if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L1_error) + + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: init_module ### */ + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __pyx_vtabptr_10sqlalchemy_6engine_7_row_cy_BaseRow = &__pyx_vtable_10sqlalchemy_6engine_7_row_cy_BaseRow; + __pyx_vtable_10sqlalchemy_6engine_7_row_cy_BaseRow._set_attrs = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, PyObject *, PyObject *))__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs; + __pyx_vtable_10sqlalchemy_6engine_7_row_cy_BaseRow._get_by_key_impl = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, int))__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__get_by_key_impl; + #if CYTHON_USE_TYPE_SPECS + __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_spec, NULL); if (unlikely(!__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow)) __PYX_ERR(0, 45, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_spec, __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) + #else + __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow = &__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + #endif + #if !CYTHON_USE_TYPE_SPECS + if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) + #endif + #if PY_MAJOR_VERSION < 3 + __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow->tp_print = 0; + #endif + #if CYTHON_UPDATE_DESCRIPTOR_DOC + { + PyObject *wrapper = PyObject_GetAttrString((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, "__init__"); if (unlikely(!wrapper)) __PYX_ERR(0, 45, __pyx_L1_error) + if (__Pyx_IS_TYPE(wrapper, &PyWrapperDescr_Type)) { + __pyx_wrapperbase_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__ = *((PyWrapperDescrObject *)wrapper)->d_base; + __pyx_wrapperbase_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__.doc = __pyx_doc_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__; + ((PyWrapperDescrObject *)wrapper)->d_base = &__pyx_wrapperbase_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__; + } + } + #endif + if (__Pyx_SetVtable(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_vtabptr_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_MergeVtables(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_BaseRow, (PyObject *) __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__row_cy(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__row_cy}, + {0, NULL} +}; +#endif + +#ifdef __cplusplus +namespace { + struct PyModuleDef __pyx_moduledef = + #else + static struct PyModuleDef __pyx_moduledef = + #endif + { + PyModuleDef_HEAD_INIT, + "_row_cy", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #elif CYTHON_USE_MODULE_STATE + sizeof(__pyx_mstate), /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + #if CYTHON_USE_MODULE_STATE + __pyx_m_traverse, /* m_traverse */ + __pyx_m_clear, /* m_clear */ + NULL /* m_free */ + #else + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ + #endif + }; + #ifdef __cplusplus +} /* anonymous namespace */ +#endif +#endif + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_row_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_row_cy(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__row_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__row_cy(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) +#else +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) +#endif +{ + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { +#if CYTHON_COMPILING_IN_LIMITED_API + result = PyModule_AddObject(module, to_name, value); +#else + result = PyDict_SetItemString(moddict, to_name, value); +#endif + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + CYTHON_UNUSED_VAR(def); + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + moddict = module; +#else + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; +#endif + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__row_cy(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + int stringtab_initialized = 0; + #if CYTHON_USE_MODULE_STATE + int pystate_addmodule_run = 0; + #endif + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_row_cy' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_row_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #elif CYTHON_USE_MODULE_STATE + __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + { + int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_row_cy" pseudovariable */ + if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + pystate_addmodule_run = 1; + } + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #endif + CYTHON_UNUSED_VAR(__pyx_t_1); + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__row_cy(void)", 0); + if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + stringtab_initialized = 1; + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_sqlalchemy__engine___row_cy) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "sqlalchemy.engine._row_cy")) { + if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.engine._row_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "sqlalchemy/engine/_row_cy.py":10 + * from __future__ import annotations + * + * from typing import Any # <<<<<<<<<<<<<< + * from typing import Dict + * from typing import Iterator + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Any); + __Pyx_GIVEREF(__pyx_n_s_Any); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Any)) __PYX_ERR(0, 10, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Any); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":11 + * + * from typing import Any + * from typing import Dict # <<<<<<<<<<<<<< + * from typing import Iterator + * from typing import List + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Dict); + __Pyx_GIVEREF(__pyx_n_s_Dict); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Dict)) __PYX_ERR(0, 11, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Dict); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Dict, __pyx_t_3) < 0) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_row_cy.py":12 + * from typing import Any + * from typing import Dict + * from typing import Iterator # <<<<<<<<<<<<<< + * from typing import List + * from typing import Optional + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Iterator); + __Pyx_GIVEREF(__pyx_n_s_Iterator); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Iterator)) __PYX_ERR(0, 12, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Iterator); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Iterator, __pyx_t_2) < 0) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":13 + * from typing import Dict + * from typing import Iterator + * from typing import List # <<<<<<<<<<<<<< + * from typing import Optional + * from typing import Sequence + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_List); + __Pyx_GIVEREF(__pyx_n_s_List); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_List)) __PYX_ERR(0, 13, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_List); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_List, __pyx_t_3) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_row_cy.py":14 + * from typing import Iterator + * from typing import List + * from typing import Optional # <<<<<<<<<<<<<< + * from typing import Sequence + * from typing import Tuple + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Optional); + __Pyx_GIVEREF(__pyx_n_s_Optional); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 14, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Optional); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_2) < 0) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":15 + * from typing import List + * from typing import Optional + * from typing import Sequence # <<<<<<<<<<<<<< + * from typing import Tuple + * from typing import Type + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Sequence); + __Pyx_GIVEREF(__pyx_n_s_Sequence); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Sequence)) __PYX_ERR(0, 15, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Sequence); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Sequence, __pyx_t_3) < 0) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_row_cy.py":16 + * from typing import Optional + * from typing import Sequence + * from typing import Tuple # <<<<<<<<<<<<<< + * from typing import Type + * from typing import TYPE_CHECKING + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Tuple); + __Pyx_GIVEREF(__pyx_n_s_Tuple); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Tuple)) __PYX_ERR(0, 16, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Tuple); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Tuple, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":17 + * from typing import Sequence + * from typing import Tuple + * from typing import Type # <<<<<<<<<<<<<< + * from typing import TYPE_CHECKING + * + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Type); + __Pyx_GIVEREF(__pyx_n_s_Type); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Type)) __PYX_ERR(0, 17, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Type); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Type, __pyx_t_3) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_row_cy.py":18 + * from typing import Tuple + * from typing import Type + * from typing import TYPE_CHECKING # <<<<<<<<<<<<<< + * + * if TYPE_CHECKING: + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_TYPE_CHECKING); + __Pyx_GIVEREF(__pyx_n_s_TYPE_CHECKING); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_TYPE_CHECKING)) __PYX_ERR(0, 18, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TYPE_CHECKING, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":20 + * from typing import TYPE_CHECKING + * + * if TYPE_CHECKING: # <<<<<<<<<<<<<< + * from .result import _KeyType + * from .result import _ProcessorsType + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_4 < 0))) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_4) { + + /* "sqlalchemy/engine/_row_cy.py":21 + * + * if TYPE_CHECKING: + * from .result import _KeyType # <<<<<<<<<<<<<< + * from .result import _ProcessorsType + * from .result import ResultMetaData + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_KeyType); + __Pyx_GIVEREF(__pyx_n_s_KeyType); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_KeyType)) __PYX_ERR(0, 21, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_result, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_KeyType); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_KeyType, __pyx_t_3) < 0) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_row_cy.py":22 + * if TYPE_CHECKING: + * from .result import _KeyType + * from .result import _ProcessorsType # <<<<<<<<<<<<<< + * from .result import ResultMetaData + * + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_ProcessorsType); + __Pyx_GIVEREF(__pyx_n_s_ProcessorsType); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_ProcessorsType)) __PYX_ERR(0, 22, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_result, __pyx_t_2, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_ProcessorsType); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ProcessorsType, __pyx_t_2) < 0) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":23 + * from .result import _KeyType + * from .result import _ProcessorsType + * from .result import ResultMetaData # <<<<<<<<<<<<<< + * + * # START GENERATED CYTHON IMPORT + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_ResultMetaData); + __Pyx_GIVEREF(__pyx_n_s_ResultMetaData); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_ResultMetaData)) __PYX_ERR(0, 23, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_result, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ResultMetaData); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ResultMetaData, __pyx_t_3) < 0) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_row_cy.py":20 + * from typing import TYPE_CHECKING + * + * if TYPE_CHECKING: # <<<<<<<<<<<<<< + * from .result import _KeyType + * from .result import _ProcessorsType + */ + } + + /* "sqlalchemy/engine/_row_cy.py":27 + * # START GENERATED CYTHON IMPORT + * # This section is automatically generated by the script tools/cython_imports.py + * try: # <<<<<<<<<<<<<< + * # NOTE: the cython compiler needs this "import cython" in the file, it + * # can't be only "from sqlalchemy.util import cython" with the fallback + */ + { + (void)__pyx_t_1; (void)__pyx_t_5; (void)__pyx_t_6; /* mark used */ + /*try:*/ { + + /* "sqlalchemy/engine/_row_cy.py":31 + * # can't be only "from sqlalchemy.util import cython" with the fallback + * # in that module + * import cython # <<<<<<<<<<<<<< + * except ModuleNotFoundError: + * from sqlalchemy.util import cython + */ + } + } + + /* "sqlalchemy/engine/_row_cy.py":36 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 36, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 36, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__2)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 36, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(0, 36, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_row_cy.py":46 + * @cython.cclass + * class BaseRow: + * __slots__ = ("_parent", "_data", "_key_to_index") # <<<<<<<<<<<<<< + * + * if cython.compiled: + */ + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_slots, __pyx_tuple__3) < 0) __PYX_ERR(0, 46, __pyx_L1_error) + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + + /* "sqlalchemy/engine/_row_cy.py":90 + * object.__setattr__(self, "_data", data) + * + * def __reduce__(self) -> Tuple[Any, Any]: # <<<<<<<<<<<<<< + * return ( + * rowproxy_reconstructor, + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_Tuple_Any_Any) < 0) __PYX_ERR(0, 90, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow___reduce, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__5)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_reduce, __pyx_t_2) < 0) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + + /* "sqlalchemy/engine/_row_cy.py":96 + * ) + * + * def __getstate__(self) -> Dict[str, Any]: # <<<<<<<<<<<<<< + * return {"_parent": self._parent, "_data": self._data} + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Dict_str_Any) < 0) __PYX_ERR(0, 96, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow___getstate, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__6)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_getstate, __pyx_t_3) < 0) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + + /* "sqlalchemy/engine/_row_cy.py":99 + * return {"_parent": self._parent, "_data": self._data} + * + * def __setstate__(self, state: Dict[str, Any]) -> None: # <<<<<<<<<<<<<< + * parent = state["_parent"] + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_state, __pyx_kp_s_Dict_str_Any) < 0) __PYX_ERR(0, 99, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 99, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow___setstate, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__8)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_setstate, __pyx_t_2) < 0) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + + /* "sqlalchemy/engine/_row_cy.py":103 + * self._set_attrs(parent, parent._key_to_index, state["_data"]) + * + * def _values_impl(self) -> List[Any]: # <<<<<<<<<<<<<< + * return list(self._data) + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 103, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_List_Any) < 0) __PYX_ERR(0, 103, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow__values_impl, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__9)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 103, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_values_impl, __pyx_t_3) < 0) __PYX_ERR(0, 103, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + + /* "sqlalchemy/engine/_row_cy.py":118 + * return self._data[key] + * + * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: # <<<<<<<<<<<<<< + * return self._get_by_key_impl(key, False) + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 118, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_KeyType) < 0) __PYX_ERR(0, 118, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_Any) < 0) __PYX_ERR(0, 118, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow__get_by_key_impl_mapping, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__11)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 118, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_get_by_key_impl_mapping, __pyx_t_2) < 0) __PYX_ERR(0, 118, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + + /* "sqlalchemy/engine/_row_cy.py":132 + * return self._get_by_key_impl(name, True) + * + * def _to_tuple_instance(self) -> Tuple[Any, ...]: # <<<<<<<<<<<<<< + * return self._data + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Tuple_Any) < 0) __PYX_ERR(0, 132, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow__to_tuple_instance, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__12)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_to_tuple_instance, __pyx_t_3) < 0) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); + + /* "sqlalchemy/engine/_row_cy.py":156 + * # Turn off annotation typing so the compiled version accepts the python + * # class too. + * @cython.annotation_typing(False) # <<<<<<<<<<<<<< + * def rowproxy_reconstructor( + * cls: Type[BaseRow], state: Dict[str, Any] + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_cls, __pyx_kp_s_Type_BaseRow) < 0) __PYX_ERR(0, 156, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_state, __pyx_kp_s_Dict_str_Any) < 0) __PYX_ERR(0, 156, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_BaseRow) < 0) __PYX_ERR(0, 156, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor, 0, __pyx_n_s_rowproxy_reconstructor, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__14)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_rowproxy_reconstructor, __pyx_t_2) < 0) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_row_cy.py":1 + * # engine/_row_cy.py # <<<<<<<<<<<<<< + * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors + * # + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + if (__pyx_m) { + if (__pyx_d && stringtab_initialized) { + __Pyx_AddTraceback("init sqlalchemy.engine._row_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + #if !CYTHON_USE_MODULE_STATE + Py_CLEAR(__pyx_m); + #else + Py_DECREF(__pyx_m); + if (pystate_addmodule_run) { + PyObject *tp, *value, *tb; + PyErr_Fetch(&tp, &value, &tb); + PyState_RemoveModule(&__pyx_moduledef); + PyErr_Restore(tp, value, tb); + } + #endif + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init sqlalchemy.engine._row_cy"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} +/* #### Code section: cleanup_globals ### */ +/* #### Code section: cleanup_module ### */ +/* #### Code section: main_method ### */ +/* #### Code section: utility_code_pragmas ### */ +#ifdef _MSC_VER +#pragma warning( push ) +/* Warning 4127: conditional expression is constant + * Cython uses constant conditional expressions to allow in inline functions to be optimized at + * compile-time, so this warning is not useful + */ +#pragma warning( disable : 4127 ) +#endif + + + +/* #### Code section: utility_code_def ### */ + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030C00A6 + PyObject *current_exception = tstate->current_exception; + if (unlikely(!current_exception)) return 0; + exc_type = (PyObject*) Py_TYPE(current_exception); + if (exc_type == err) return 1; +#else + exc_type = tstate->curexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; +#endif + #if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(exc_type); + #endif + if (unlikely(PyTuple_Check(err))) { + result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + } else { + result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); + } + #if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(exc_type); + #endif + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject *tmp_value; + assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); + if (value) { + #if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) + #endif + PyException_SetTraceback(value, tb); + } + tmp_value = tstate->current_exception; + tstate->current_exception = value; + Py_XDECREF(tmp_value); + Py_XDECREF(type); + Py_XDECREF(tb); +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject* exc_value; + exc_value = tstate->current_exception; + tstate->current_exception = 0; + *value = exc_value; + *type = NULL; + *tb = NULL; + if (exc_value) { + *type = (PyObject*) Py_TYPE(exc_value); + Py_INCREF(*type); + #if CYTHON_COMPILING_IN_CPYTHON + *tb = ((PyBaseExceptionObject*) exc_value)->traceback; + Py_XINCREF(*tb); + #else + *tb = PyException_GetTraceback(exc_value); + #endif + } +#else + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#endif +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* PyObjectGetAttrStrNoError */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + (void) PyObject_GetOptionalAttr(obj, attr_name, &result); + return result; +#else +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +#endif +} + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); + if (unlikely(!result) && !PyErr_Occurred()) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* TupleAndListFromArray */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { + PyObject *v; + Py_ssize_t i; + for (i = 0; i < length; i++) { + v = dest[i] = src[i]; + Py_INCREF(v); + } +} +static CYTHON_INLINE PyObject * +__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + Py_INCREF(__pyx_empty_tuple); + return __pyx_empty_tuple; + } + res = PyTuple_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); + return res; +} +static CYTHON_INLINE PyObject * +__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + return PyList_New(0); + } + res = PyList_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); + return res; +} +#endif + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* fastcall */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) +{ + Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); + for (i = 0; i < n; i++) + { + if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; + } + for (i = 0; i < n; i++) + { + int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); + if (unlikely(eq != 0)) { + if (unlikely(eq < 0)) return NULL; + return kwvalues[i]; + } + } + return NULL; +} +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { + Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); + PyObject *dict; + dict = PyDict_New(); + if (unlikely(!dict)) + return NULL; + for (i=0; i= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); + while (1) { + Py_XDECREF(key); key = NULL; + Py_XDECREF(value); value = NULL; + if (kwds_is_tuple) { + Py_ssize_t size; +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(kwds); +#else + size = PyTuple_Size(kwds); + if (size < 0) goto bad; +#endif + if (pos >= size) break; +#if CYTHON_AVOID_BORROWED_REFS + key = __Pyx_PySequence_ITEM(kwds, pos); + if (!key) goto bad; +#elif CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kwds, pos); +#else + key = PyTuple_GetItem(kwds, pos); + if (!key) goto bad; +#endif + value = kwvalues[pos]; + pos++; + } + else + { + if (!PyDict_Next(kwds, &pos, &key, &value)) break; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + } + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(value); + Py_DECREF(key); +#endif + key = NULL; + value = NULL; + continue; + } +#if !CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + Py_INCREF(value); + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = ( + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key) + ); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + Py_XDECREF(key); + Py_XDECREF(value); + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + Py_XDECREF(key); + Py_XDECREF(value); + return -1; +} + +/* ArgTypeTest */ +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + __Pyx_TypeName type_name; + __Pyx_TypeName obj_type_name; + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + type_name = __Pyx_PyType_GetName(type); + obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected " __Pyx_FMT_TYPENAME + ", got " __Pyx_FMT_TYPENAME ")", name, type_name, obj_type_name); + __Pyx_DECREF_TypeName(type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kw, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { + Py_ssize_t kwsize; +#if CYTHON_ASSUME_SAFE_MACROS + kwsize = PyTuple_GET_SIZE(kw); +#else + kwsize = PyTuple_Size(kw); + if (kwsize < 0) return 0; +#endif + if (unlikely(kwsize == 0)) + return 1; + if (!kw_allowed) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, 0); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + goto invalid_keyword; + } +#if PY_VERSION_HEX < 0x03090000 + for (pos = 0; pos < kwsize; pos++) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, pos); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } +#endif + return 1; + } + while (PyDict_Next(kw, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if (!kw_allowed && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#elif CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(!__pyx_m)) { + return NULL; + } + result = PyObject_GetAttr(__pyx_m, name); + if (likely(result)) { + return result; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { + return NULL; + } + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { + return NULL; + } + #endif + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); + self = __Pyx_CyOrPyCFunction_GET_SELF(func); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectFastCall */ +#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API +static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { + PyObject *argstuple; + PyObject *result = 0; + size_t i; + argstuple = PyTuple_New((Py_ssize_t)nargs); + if (unlikely(!argstuple)) return NULL; + for (i = 0; i < nargs; i++) { + Py_INCREF(args[i]); + if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; + } + result = __Pyx_PyObject_Call(func, argstuple, kwargs); + bad: + Py_DECREF(argstuple); + return result; +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { + Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); +#if CYTHON_COMPILING_IN_CPYTHON + if (nargs == 0 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) + return __Pyx_PyObject_CallMethO(func, NULL); + } + else if (nargs == 1 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) + return __Pyx_PyObject_CallMethO(func, args[0]); + } +#endif + #if PY_VERSION_HEX < 0x030800B1 + #if CYTHON_FAST_PYCCALL + if (PyCFunction_Check(func)) { + if (kwargs) { + return _PyCFunction_FastCallDict(func, args, nargs, kwargs); + } else { + return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); + } + } + #if PY_VERSION_HEX >= 0x030700A1 + if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { + return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); + } + #endif + #endif + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); + } + #endif + #endif + if (kwargs == NULL) { + #if CYTHON_VECTORCALL + #if PY_VERSION_HEX < 0x03090000 + vectorcallfunc f = _PyVectorcall_Function(func); + #else + vectorcallfunc f = PyVectorcall_Function(func); + #endif + if (f) { + return f(func, args, (size_t)nargs, NULL); + } + #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL + if (__Pyx_CyFunction_CheckExact(func)) { + __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); + if (f) return f(func, args, (size_t)nargs, NULL); + } + #endif + } + if (nargs == 0) { + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); + } + #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API + return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); + #else + return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); + #endif +} + +/* DictGetItem */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + if (unlikely(PyTuple_Check(key))) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) { + PyErr_SetObject(PyExc_KeyError, args); + Py_DECREF(args); + } + } else { + PyErr_SetObject(PyExc_KeyError, key); + } + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* RaiseUnexpectedTypeError */ +static int +__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) +{ + __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, + expected, obj_type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (unlikely(!j)) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; + PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; + if (mm && mm->mp_subscript) { + PyObject *r, *key = PyInt_FromSsize_t(i); + if (unlikely(!key)) return NULL; + r = mm->mp_subscript(o, key); + Py_DECREF(key); + return r; + } + if (likely(sm && sm->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { + Py_ssize_t l = sm->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return sm->sq_item(o, i); + } + } +#else + if (is_list || !PyMapping_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* PyObjectCallOneArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *args[2] = {NULL, arg}; + return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* ObjectGetItem */ +#if CYTHON_USE_TYPE_SLOTS +static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject *index) { + PyObject *runerr = NULL; + Py_ssize_t key_value; + key_value = __Pyx_PyIndex_AsSsize_t(index); + if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { + return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); + } + if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { + __Pyx_TypeName index_type_name = __Pyx_PyType_GetName(Py_TYPE(index)); + PyErr_Clear(); + PyErr_Format(PyExc_IndexError, + "cannot fit '" __Pyx_FMT_TYPENAME "' into an index-sized integer", index_type_name); + __Pyx_DECREF_TypeName(index_type_name); + } + return NULL; +} +static PyObject *__Pyx_PyObject_GetItem_Slow(PyObject *obj, PyObject *key) { + __Pyx_TypeName obj_type_name; + if (likely(PyType_Check(obj))) { + PyObject *meth = __Pyx_PyObject_GetAttrStrNoError(obj, __pyx_n_s_class_getitem); + if (!meth) { + PyErr_Clear(); + } else { + PyObject *result = __Pyx_PyObject_CallOneArg(meth, key); + Py_DECREF(meth); + return result; + } + } + obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, + "'" __Pyx_FMT_TYPENAME "' object is not subscriptable", obj_type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return NULL; +} +static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject *key) { + PyTypeObject *tp = Py_TYPE(obj); + PyMappingMethods *mm = tp->tp_as_mapping; + PySequenceMethods *sm = tp->tp_as_sequence; + if (likely(mm && mm->mp_subscript)) { + return mm->mp_subscript(obj, key); + } + if (likely(sm && sm->sq_item)) { + return __Pyx_PyObject_GetIndex(obj, key); + } + return __Pyx_PyObject_GetItem_Slow(obj, key); +} +#endif + +/* UnpackUnboundCMethod */ +static PyObject *__Pyx_SelflessCall(PyObject *method, PyObject *args, PyObject *kwargs) { + PyObject *result; + PyObject *selfless_args = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); + if (unlikely(!selfless_args)) return NULL; + result = PyObject_Call(method, selfless_args, kwargs); + Py_DECREF(selfless_args); + return result; +} +static PyMethodDef __Pyx_UnboundCMethod_Def = { + "CythonUnboundCMethod", + __PYX_REINTERPRET_FUNCION(PyCFunction, __Pyx_SelflessCall), + METH_VARARGS | METH_KEYWORDS, + NULL +}; +static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { + PyObject *method; + method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); + if (unlikely(!method)) + return -1; + target->method = method; +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION >= 3 + if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) + #else + if (likely(!__Pyx_CyOrPyCFunction_Check(method))) + #endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject*) method; + target->func = descr->d_method->ml_meth; + target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); + } else +#endif +#if CYTHON_COMPILING_IN_PYPY +#else + if (PyCFunction_Check(method)) +#endif + { + PyObject *self; + int self_found; +#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY + self = PyObject_GetAttrString(method, "__self__"); + if (!self) { + PyErr_Clear(); + } +#else + self = PyCFunction_GET_SELF(method); +#endif + self_found = (self && self != Py_None); +#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY + Py_XDECREF(self); +#endif + if (self_found) { + PyObject *unbound_method = PyCFunction_New(&__Pyx_UnboundCMethod_Def, method); + if (unlikely(!unbound_method)) return -1; + Py_DECREF(method); + target->method = unbound_method; + } + } + return 0; +} + +/* CallUnboundCMethod1 */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { + if (likely(cfunc->func)) { + int flag = cfunc->flag; + if (flag == METH_O) { + return (*(cfunc->func))(self, arg); + } else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + #endif + } else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } + return __Pyx__CallUnboundCMethod1(cfunc, self, arg); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(2, self, arg); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* CallUnboundCMethod2 */ +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) { + if (likely(cfunc->func)) { + PyObject *args[2] = {arg1, arg2}; + if (cfunc->flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, args, 2); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif + } + #if PY_VERSION_HEX >= 0x030700A0 + if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS)) + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif + } + return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(3); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 1, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 2, arg2); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(3, self, arg1, arg2); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* dict_getitem_default */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { + PyObject* value; +#if PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (unlikely(PyErr_Occurred())) + return NULL; + value = default_value; + } + Py_INCREF(value); + if ((1)); +#else + if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { + value = PyDict_GetItem(d, key); + if (unlikely(!value)) { + value = default_value; + } + Py_INCREF(value); + } +#endif + else { + if (default_value == Py_None) + value = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_get, d, key); + else + value = __Pyx_CallUnboundCMethod2(&__pyx_umethod_PyDict_Type_get, d, key, default_value); + } + return value; +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + __Pyx_PyThreadState_declare + CYTHON_UNUSED_VAR(cause); + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + #if PY_VERSION_HEX >= 0x030C00A6 + PyException_SetTraceback(value, tb); + #elif CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* SetItemInt */ +static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v) { + int r; + if (unlikely(!j)) return -1; + r = PyObject_SetItem(o, j, v); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v, int is_list, + CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = (!wraparound) ? i : ((likely(i >= 0)) ? i : i + PyList_GET_SIZE(o)); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o)))) { + PyObject* old = PyList_GET_ITEM(o, n); + Py_INCREF(v); + PyList_SET_ITEM(o, n, v); + Py_DECREF(old); + return 1; + } + } else { + PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; + PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; + if (mm && mm->mp_ass_subscript) { + int r; + PyObject *key = PyInt_FromSsize_t(i); + if (unlikely(!key)) return -1; + r = mm->mp_ass_subscript(o, key, v); + Py_DECREF(key); + return r; + } + if (likely(sm && sm->sq_ass_item)) { + if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { + Py_ssize_t l = sm->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return -1; + PyErr_Clear(); + } + } + return sm->sq_ass_item(o, i, v); + } + } +#else + if (is_list || !PyMapping_Check(o)) + { + return PySequence_SetItem(o, i, v); + } +#endif + return __Pyx_SetItemInt_Generic(o, PyInt_FromSsize_t(i), v); +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, attr_name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(attr_name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* FixUpExtensionType */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { +#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + CYTHON_UNUSED_VAR(spec); + CYTHON_UNUSED_VAR(type); +#else + const PyType_Slot *slot = spec->slots; + while (slot && slot->slot && slot->slot != Py_tp_members) + slot++; + if (slot && slot->slot == Py_tp_members) { + int changed = 0; +#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) + const +#endif + PyMemberDef *memb = (PyMemberDef*) slot->pfunc; + while (memb && memb->name) { + if (memb->name[0] == '_' && memb->name[1] == '_') { +#if PY_VERSION_HEX < 0x030900b1 + if (strcmp(memb->name, "__weaklistoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_weaklistoffset = memb->offset; + changed = 1; + } + else if (strcmp(memb->name, "__dictoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_dictoffset = memb->offset; + changed = 1; + } +#if CYTHON_METH_FASTCALL + else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); +#if PY_VERSION_HEX >= 0x030800b4 + type->tp_vectorcall_offset = memb->offset; +#else + type->tp_print = (printfunc) memb->offset; +#endif + changed = 1; + } +#endif +#else + if ((0)); +#endif +#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON + else if (strcmp(memb->name, "__module__") == 0) { + PyObject *descr; + assert(memb->type == T_OBJECT); + assert(memb->flags == 0 || memb->flags == READONLY); + descr = PyDescr_NewMember(type, memb); + if (unlikely(!descr)) + return -1; + if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { + Py_DECREF(descr); + return -1; + } + Py_DECREF(descr); + changed = 1; + } +#endif + } + memb++; + } + if (changed) + PyType_Modified(type); + } +#endif + return 0; +} +#endif + +/* PyObjectCallNoArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { + PyObject *arg[2] = {NULL, NULL}; + return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + __Pyx_TypeName type_name; + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR + if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) +#elif PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (likely(descr != NULL)) { + *method = descr; + return 0; + } + type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod0 */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { + PyObject *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_CallOneArg(method, obj); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) goto bad; + result = __Pyx_PyObject_CallNoArg(method); + Py_DECREF(method); +bad: + return result; +} + +/* ValidateBasesTuple */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { + Py_ssize_t i, n; +#if CYTHON_ASSUME_SAFE_MACROS + n = PyTuple_GET_SIZE(bases); +#else + n = PyTuple_Size(bases); + if (n < 0) return -1; +#endif + for (i = 1; i < n; i++) + { +#if CYTHON_AVOID_BORROWED_REFS + PyObject *b0 = PySequence_GetItem(bases, i); + if (!b0) return -1; +#elif CYTHON_ASSUME_SAFE_MACROS + PyObject *b0 = PyTuple_GET_ITEM(bases, i); +#else + PyObject *b0 = PyTuple_GetItem(bases, i); + if (!b0) return -1; +#endif + PyTypeObject *b; +#if PY_MAJOR_VERSION < 3 + if (PyClass_Check(b0)) + { + PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", + PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } +#endif + b = (PyTypeObject*) b0; + if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); + __Pyx_DECREF_TypeName(b_name); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + if (dictoffset == 0) + { + Py_ssize_t b_dictoffset = 0; +#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + b_dictoffset = b->tp_dictoffset; +#else + PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); + if (!py_b_dictoffset) goto dictoffset_return; + b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); + Py_DECREF(py_b_dictoffset); + if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; +#endif + if (b_dictoffset) { + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "extension type '%.200s' has no __dict__ slot, " + "but base type '" __Pyx_FMT_TYPENAME "' has: " + "either add 'cdef dict __dict__' to the extension type " + "or add '__slots__ = [...]' to the base type", + type_name, b_name); + __Pyx_DECREF_TypeName(b_name); + } +#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) + dictoffset_return: +#endif +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + } +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + } + return 0; +} +#endif + +/* PyType_Ready */ +static int __Pyx_PyType_Ready(PyTypeObject *t) { +#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) + (void)__Pyx_PyObject_CallMethod0; +#if CYTHON_USE_TYPE_SPECS + (void)__Pyx_validate_bases_tuple; +#endif + return PyType_Ready(t); +#else + int r; + PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); + if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) + return -1; +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + { + int gc_was_enabled; + #if PY_VERSION_HEX >= 0x030A00b1 + gc_was_enabled = PyGC_Disable(); + (void)__Pyx_PyObject_CallMethod0; + #else + PyObject *ret, *py_status; + PyObject *gc = NULL; + #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) + gc = PyImport_GetModule(__pyx_kp_u_gc); + #endif + if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); + if (unlikely(!gc)) return -1; + py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); + if (unlikely(!py_status)) { + Py_DECREF(gc); + return -1; + } + gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); + Py_DECREF(py_status); + if (gc_was_enabled > 0) { + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); + if (unlikely(!ret)) { + Py_DECREF(gc); + return -1; + } + Py_DECREF(ret); + } else if (unlikely(gc_was_enabled == -1)) { + Py_DECREF(gc); + return -1; + } + #endif + t->tp_flags |= Py_TPFLAGS_HEAPTYPE; +#if PY_VERSION_HEX >= 0x030A0000 + t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; +#endif +#else + (void)__Pyx_PyObject_CallMethod0; +#endif + r = PyType_Ready(t); +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; + #if PY_VERSION_HEX >= 0x030A00b1 + if (gc_was_enabled) + PyGC_Enable(); + #else + if (gc_was_enabled) { + PyObject *tp, *v, *tb; + PyErr_Fetch(&tp, &v, &tb); + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); + if (likely(ret || r == -1)) { + Py_XDECREF(ret); + PyErr_Restore(tp, v, tb); + } else { + Py_XDECREF(tp); + Py_XDECREF(v); + Py_XDECREF(tb); + r = -1; + } + } + Py_DECREF(gc); + #endif + } +#endif + return r; +#endif +} + +/* SetVTable */ +static int __Pyx_SetVtable(PyTypeObject *type, void *vtable) { + PyObject *ob = PyCapsule_New(vtable, 0, 0); + if (unlikely(!ob)) + goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(PyObject_SetAttr((PyObject *) type, __pyx_n_s_pyx_vtable, ob) < 0)) +#else + if (unlikely(PyDict_SetItem(type->tp_dict, __pyx_n_s_pyx_vtable, ob) < 0)) +#endif + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* GetVTable */ +static void* __Pyx_GetVtable(PyTypeObject *type) { + void* ptr; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *ob = PyObject_GetAttr((PyObject *)type, __pyx_n_s_pyx_vtable); +#else + PyObject *ob = PyObject_GetItem(type->tp_dict, __pyx_n_s_pyx_vtable); +#endif + if (!ob) + goto bad; + ptr = PyCapsule_GetPointer(ob, 0); + if (!ptr && !PyErr_Occurred()) + PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); + Py_DECREF(ob); + return ptr; +bad: + Py_XDECREF(ob); + return NULL; +} + +/* MergeVTables */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_MergeVtables(PyTypeObject *type) { + int i; + void** base_vtables; + __Pyx_TypeName tp_base_name; + __Pyx_TypeName base_name; + void* unknown = (void*)-1; + PyObject* bases = type->tp_bases; + int base_depth = 0; + { + PyTypeObject* base = type->tp_base; + while (base) { + base_depth += 1; + base = base->tp_base; + } + } + base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1)); + base_vtables[0] = unknown; + for (i = 1; i < PyTuple_GET_SIZE(bases); i++) { + void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))); + if (base_vtable != NULL) { + int j; + PyTypeObject* base = type->tp_base; + for (j = 0; j < base_depth; j++) { + if (base_vtables[j] == unknown) { + base_vtables[j] = __Pyx_GetVtable(base); + base_vtables[j + 1] = unknown; + } + if (base_vtables[j] == base_vtable) { + break; + } else if (base_vtables[j] == NULL) { + goto bad; + } + base = base->tp_base; + } + } + } + PyErr_Clear(); + free(base_vtables); + return 0; +bad: + tp_base_name = __Pyx_PyType_GetName(type->tp_base); + base_name = __Pyx_PyType_GetName((PyTypeObject*)PyTuple_GET_ITEM(bases, i)); + PyErr_Format(PyExc_TypeError, + "multiple bases have vtable conflict: '" __Pyx_FMT_TYPENAME "' and '" __Pyx_FMT_TYPENAME "'", tp_base_name, base_name); + __Pyx_DECREF_TypeName(tp_base_name); + __Pyx_DECREF_TypeName(base_name); + free(base_vtables); + return -1; +} +#endif + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *module = 0; + PyObject *empty_dict = 0; + PyObject *empty_list = 0; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (unlikely(!py_import)) + goto bad; + if (!from_list) { + empty_list = PyList_New(0); + if (unlikely(!empty_list)) + goto bad; + from_list = empty_list; + } + #endif + empty_dict = PyDict_New(); + if (unlikely(!empty_dict)) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, 1); + if (unlikely(!module)) { + if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (unlikely(!py_level)) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, level); + #endif + } + } +bad: + Py_XDECREF(empty_dict); + Py_XDECREF(empty_list); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + const char* module_name_str = 0; + PyObject* module_name = 0; + PyObject* module_dot = 0; + PyObject* full_name = 0; + PyErr_Clear(); + module_name_str = PyModule_GetName(module); + if (unlikely(!module_name_str)) { goto modbad; } + module_name = PyUnicode_FromString(module_name_str); + if (unlikely(!module_name)) { goto modbad; } + module_dot = PyUnicode_Concat(module_name, __pyx_kp_u_); + if (unlikely(!module_dot)) { goto modbad; } + full_name = PyUnicode_Concat(module_dot, name); + if (unlikely(!full_name)) { goto modbad; } + #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) + { + PyObject *modules = PyImport_GetModuleDict(); + if (unlikely(!modules)) + goto modbad; + value = PyObject_GetItem(modules, full_name); + } + #else + value = PyImport_GetModule(full_name); + #endif + modbad: + Py_XDECREF(full_name); + Py_XDECREF(module_dot); + Py_XDECREF(module_name); + } + if (unlikely(!value)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* FetchSharedCythonModule */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void) { + return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); +} + +/* FetchCommonType */ +static int __Pyx_VerifyCachedType(PyObject *cached_type, + const char *name, + Py_ssize_t basicsize, + Py_ssize_t expected_basicsize) { + if (!PyType_Check(cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", name); + return -1; + } + if (basicsize != expected_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + name); + return -1; + } + return 0; +} +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { + PyObject* abi_module; + const char* object_name; + PyTypeObject *cached_type = NULL; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + object_name = strrchr(type->tp_name, '.'); + object_name = object_name ? object_name+1 : type->tp_name; + cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + if (__Pyx_VerifyCachedType( + (PyObject *)cached_type, + object_name, + cached_type->tp_basicsize, + type->tp_basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; +done: + Py_DECREF(abi_module); + return cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#else +static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { + PyObject *abi_module, *cached_type = NULL; + const char* object_name = strrchr(spec->name, '.'); + object_name = object_name ? object_name+1 : spec->name; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + cached_type = PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + Py_ssize_t basicsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_basicsize; + py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); + if (unlikely(!py_basicsize)) goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; +#else + basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; +#endif + if (__Pyx_VerifyCachedType( + cached_type, + object_name, + basicsize, + spec->basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + CYTHON_UNUSED_VAR(module); + cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); + if (unlikely(!cached_type)) goto bad; + if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; +done: + Py_DECREF(abi_module); + assert(cached_type == NULL || PyType_Check(cached_type)); + return (PyTypeObject *) cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#endif + +/* PyVectorcallFastCallDict */ +#if CYTHON_METH_FASTCALL +static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + PyObject *res = NULL; + PyObject *kwnames; + PyObject **newargs; + PyObject **kwvalues; + Py_ssize_t i, pos; + size_t j; + PyObject *key, *value; + unsigned long keys_are_strings; + Py_ssize_t nkw = PyDict_GET_SIZE(kw); + newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); + if (unlikely(newargs == NULL)) { + PyErr_NoMemory(); + return NULL; + } + for (j = 0; j < nargs; j++) newargs[j] = args[j]; + kwnames = PyTuple_New(nkw); + if (unlikely(kwnames == NULL)) { + PyMem_Free(newargs); + return NULL; + } + kwvalues = newargs + nargs; + pos = i = 0; + keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; + while (PyDict_Next(kw, &pos, &key, &value)) { + keys_are_strings &= Py_TYPE(key)->tp_flags; + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(kwnames, i, key); + kwvalues[i] = value; + i++; + } + if (unlikely(!keys_are_strings)) { + PyErr_SetString(PyExc_TypeError, "keywords must be strings"); + goto cleanup; + } + res = vc(func, newargs, nargs, kwnames); +cleanup: + Py_DECREF(kwnames); + for (i = 0; i < nkw; i++) + Py_DECREF(kwvalues[i]); + PyMem_Free(newargs); + return res; +} +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { + return vc(func, args, nargs, NULL); + } + return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); +} +#endif + +/* CythonFunctionShared */ +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + if (__Pyx_CyFunction_Check(func)) { + return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; + } else if (PyCFunction_Check(func)) { + return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; + } + return 0; +} +#else +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +} +#endif +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + __Pyx_Py_XDECREF_SET( + __Pyx_CyFunction_GetClassObj(f), + ((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#else + __Pyx_Py_XDECREF_SET( + ((PyCMethodObject *) (f))->mm_class, + (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#endif +} +static PyObject * +__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) +{ + CYTHON_UNUSED_VAR(closure); + if (unlikely(op->func_doc == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); + if (unlikely(!op->func_doc)) return NULL; +#else + if (((PyCFunctionObject*)op)->m_ml->ml_doc) { +#if PY_MAJOR_VERSION >= 3 + op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#else + op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#endif + if (unlikely(op->func_doc == NULL)) + return NULL; + } else { + Py_INCREF(Py_None); + return Py_None; + } +#endif + } + Py_INCREF(op->func_doc); + return op->func_doc; +} +static int +__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (value == NULL) { + value = Py_None; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_doc, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_name == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_name = PyObject_GetAttrString(op->func, "__name__"); +#elif PY_MAJOR_VERSION >= 3 + op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#else + op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#endif + if (unlikely(op->func_name == NULL)) + return NULL; + } + Py_INCREF(op->func_name); + return op->func_name; +} +static int +__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_name, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_qualname); + return op->func_qualname; +} +static int +__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_qualname, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_dict == NULL)) { + op->func_dict = PyDict_New(); + if (unlikely(op->func_dict == NULL)) + return NULL; + } + Py_INCREF(op->func_dict); + return op->func_dict; +} +static int +__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(value == NULL)) { + PyErr_SetString(PyExc_TypeError, + "function's dictionary may not be deleted"); + return -1; + } + if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "setting function's dictionary to a non-dict"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_dict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_globals); + return op->func_globals; +} +static PyObject * +__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(op); + CYTHON_UNUSED_VAR(context); + Py_INCREF(Py_None); + return Py_None; +} +static PyObject * +__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) +{ + PyObject* result = (op->func_code) ? op->func_code : Py_None; + CYTHON_UNUSED_VAR(context); + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { + int result = 0; + PyObject *res = op->defaults_getter((PyObject *) op); + if (unlikely(!res)) + return -1; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + op->defaults_tuple = PyTuple_GET_ITEM(res, 0); + Py_INCREF(op->defaults_tuple); + op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); + Py_INCREF(op->defaults_kwdict); + #else + op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); + if (unlikely(!op->defaults_tuple)) result = -1; + else { + op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); + if (unlikely(!op->defaults_kwdict)) result = -1; + } + #endif + Py_DECREF(res); + return result; +} +static int +__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__defaults__ must be set to a tuple object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_tuple; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_tuple; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__kwdefaults__ must be set to a dict object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_kwdict; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_kwdict; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value || value == Py_None) { + value = NULL; + } else if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__annotations__ must be set to a dict object"); + return -1; + } + Py_XINCREF(value); + __Pyx_Py_XDECREF_SET(op->func_annotations, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->func_annotations; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + result = PyDict_New(); + if (unlikely(!result)) return NULL; + op->func_annotations = result; + } + Py_INCREF(result); + return result; +} +static PyObject * +__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { + int is_coroutine; + CYTHON_UNUSED_VAR(context); + if (op->func_is_coroutine) { + return __Pyx_NewRef(op->func_is_coroutine); + } + is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; +#if PY_VERSION_HEX >= 0x03050000 + if (is_coroutine) { + PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; + fromlist = PyList_New(1); + if (unlikely(!fromlist)) return NULL; + Py_INCREF(marker); +#if CYTHON_ASSUME_SAFE_MACROS + PyList_SET_ITEM(fromlist, 0, marker); +#else + if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { + Py_DECREF(marker); + Py_DECREF(fromlist); + return NULL; + } +#endif + module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); + Py_DECREF(fromlist); + if (unlikely(!module)) goto ignore; + op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); + Py_DECREF(module); + if (likely(op->func_is_coroutine)) { + return __Pyx_NewRef(op->func_is_coroutine); + } +ignore: + PyErr_Clear(); + } +#endif + op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); + return __Pyx_NewRef(op->func_is_coroutine); +} +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject * +__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_GetAttrString(op->func, "__module__"); +} +static int +__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_SetAttrString(op->func, "__module__", value); +} +#endif +static PyGetSetDef __pyx_CyFunction_getsets[] = { + {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, + {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, + {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, + {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, +#if CYTHON_COMPILING_IN_LIMITED_API + {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, +#endif + {0, 0, 0, 0, 0} +}; +static PyMemberDef __pyx_CyFunction_members[] = { +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, +#endif +#if CYTHON_USE_TYPE_SPECS + {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, +#if CYTHON_METH_FASTCALL +#if CYTHON_BACKPORT_VECTORCALL + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, +#else +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, +#endif +#endif +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, +#else + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, +#endif +#endif + {0, 0, 0, 0, 0} +}; +static PyObject * +__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) +{ + CYTHON_UNUSED_VAR(args); +#if PY_MAJOR_VERSION >= 3 + Py_INCREF(m->func_qualname); + return m->func_qualname; +#else + return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); +#endif +} +static PyMethodDef __pyx_CyFunction_methods[] = { + {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, + {0, 0, 0, 0} +}; +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) +#else +#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) +#endif +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { +#if !CYTHON_COMPILING_IN_LIMITED_API + PyCFunctionObject *cf = (PyCFunctionObject*) op; +#endif + if (unlikely(op == NULL)) + return NULL; +#if CYTHON_COMPILING_IN_LIMITED_API + op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); + if (unlikely(!op->func)) return NULL; +#endif + op->flags = flags; + __Pyx_CyFunction_weakreflist(op) = NULL; +#if !CYTHON_COMPILING_IN_LIMITED_API + cf->m_ml = ml; + cf->m_self = (PyObject *) op; +#endif + Py_XINCREF(closure); + op->func_closure = closure; +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_XINCREF(module); + cf->m_module = module; +#endif + op->func_dict = NULL; + op->func_name = NULL; + Py_INCREF(qualname); + op->func_qualname = qualname; + op->func_doc = NULL; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + op->func_classobj = NULL; +#else + ((PyCMethodObject*)op)->mm_class = NULL; +#endif + op->func_globals = globals; + Py_INCREF(op->func_globals); + Py_XINCREF(code); + op->func_code = code; + op->defaults_pyobjects = 0; + op->defaults_size = 0; + op->defaults = NULL; + op->defaults_tuple = NULL; + op->defaults_kwdict = NULL; + op->defaults_getter = NULL; + op->func_annotations = NULL; + op->func_is_coroutine = NULL; +#if CYTHON_METH_FASTCALL + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { + case METH_NOARGS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; + break; + case METH_O: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; + break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; + break; + case METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; + break; + case METH_VARARGS | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = NULL; + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + Py_DECREF(op); + return NULL; + } +#endif + return (PyObject *) op; +} +static int +__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) +{ + Py_CLEAR(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_CLEAR(m->func); +#else + Py_CLEAR(((PyCFunctionObject*)m)->m_module); +#endif + Py_CLEAR(m->func_dict); + Py_CLEAR(m->func_name); + Py_CLEAR(m->func_qualname); + Py_CLEAR(m->func_doc); + Py_CLEAR(m->func_globals); + Py_CLEAR(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API +#if PY_VERSION_HEX < 0x030900B1 + Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); +#else + { + PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; + ((PyCMethodObject *) (m))->mm_class = NULL; + Py_XDECREF(cls); + } +#endif +#endif + Py_CLEAR(m->defaults_tuple); + Py_CLEAR(m->defaults_kwdict); + Py_CLEAR(m->func_annotations); + Py_CLEAR(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_XDECREF(pydefaults[i]); + PyObject_Free(m->defaults); + m->defaults = NULL; + } + return 0; +} +static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + if (__Pyx_CyFunction_weakreflist(m) != NULL) + PyObject_ClearWeakRefs((PyObject *) m); + __Pyx_CyFunction_clear(m); + __Pyx_PyHeapTypeObject_GC_Del(m); +} +static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + PyObject_GC_UnTrack(m); + __Pyx__CyFunction_dealloc(m); +} +static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) +{ + Py_VISIT(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(m->func); +#else + Py_VISIT(((PyCFunctionObject*)m)->m_module); +#endif + Py_VISIT(m->func_dict); + Py_VISIT(m->func_name); + Py_VISIT(m->func_qualname); + Py_VISIT(m->func_doc); + Py_VISIT(m->func_globals); + Py_VISIT(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); +#endif + Py_VISIT(m->defaults_tuple); + Py_VISIT(m->defaults_kwdict); + Py_VISIT(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_VISIT(pydefaults[i]); + } + return 0; +} +static PyObject* +__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) +{ +#if PY_MAJOR_VERSION >= 3 + return PyUnicode_FromFormat("", + op->func_qualname, (void *)op); +#else + return PyString_FromFormat("", + PyString_AsString(op->func_qualname), (void *)op); +#endif +} +static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *f = ((__pyx_CyFunctionObject*)func)->func; + PyObject *py_name = NULL; + PyCFunction meth; + int flags; + meth = PyCFunction_GetFunction(f); + if (unlikely(!meth)) return NULL; + flags = PyCFunction_GetFlags(f); + if (unlikely(flags < 0)) return NULL; +#else + PyCFunctionObject* f = (PyCFunctionObject*)func; + PyCFunction meth = f->m_ml->ml_meth; + int flags = f->m_ml->ml_flags; +#endif + Py_ssize_t size; + switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { + case METH_VARARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) + return (*meth)(self, arg); + break; + case METH_VARARGS | METH_KEYWORDS: + return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); + case METH_NOARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 0)) + return (*meth)(self, NULL); +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + case METH_O: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 1)) { + PyObject *result, *arg0; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + arg0 = PyTuple_GET_ITEM(arg, 0); + #else + arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; + #endif + result = (*meth)(self, arg0); + #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(arg0); + #endif + return result; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + return NULL; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", + py_name); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", + f->m_ml->ml_name); +#endif + return NULL; +} +static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *self, *result; +#if CYTHON_COMPILING_IN_LIMITED_API + self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); + if (unlikely(!self) && PyErr_Occurred()) return NULL; +#else + self = ((PyCFunctionObject*)func)->m_self; +#endif + result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); + return result; +} +static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { + PyObject *result; + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; +#if CYTHON_METH_FASTCALL + __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); + if (vc) { +#if CYTHON_ASSUME_SAFE_MACROS + return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); +#else + (void) &__Pyx_PyVectorcall_FastCallDict; + return PyVectorcall_Call(func, args, kw); +#endif + } +#endif + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + Py_ssize_t argc; + PyObject *new_args; + PyObject *self; +#if CYTHON_ASSUME_SAFE_MACROS + argc = PyTuple_GET_SIZE(args); +#else + argc = PyTuple_Size(args); + if (unlikely(!argc) < 0) return NULL; +#endif + new_args = PyTuple_GetSlice(args, 1, argc); + if (unlikely(!new_args)) + return NULL; + self = PyTuple_GetItem(args, 0); + if (unlikely(!self)) { + Py_DECREF(new_args); +#if PY_MAJOR_VERSION > 2 + PyErr_Format(PyExc_TypeError, + "unbound method %.200S() needs an argument", + cyfunc->func_qualname); +#else + PyErr_SetString(PyExc_TypeError, + "unbound method needs an argument"); +#endif + return NULL; + } + result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); + Py_DECREF(new_args); + } else { + result = __Pyx_CyFunction_Call(func, args, kw); + } + return result; +} +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) +{ + int ret = 0; + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + if (unlikely(nargs < 1)) { + PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", + ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + ret = 1; + } + if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + return ret; +} +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 0)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, NULL); +} +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 1)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, args[0]); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; + PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); +} +#endif +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_CyFunctionType_slots[] = { + {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, + {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, + {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, + {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, + {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, + {Py_tp_methods, (void *)__pyx_CyFunction_methods}, + {Py_tp_members, (void *)__pyx_CyFunction_members}, + {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, + {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, + {0, 0}, +}; +static PyType_Spec __pyx_CyFunctionType_spec = { + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + __pyx_CyFunctionType_slots +}; +#else +static PyTypeObject __pyx_CyFunctionType_type = { + PyVarObject_HEAD_INIT(0, 0) + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, + (destructor) __Pyx_CyFunction_dealloc, +#if !CYTHON_METH_FASTCALL + 0, +#elif CYTHON_BACKPORT_VECTORCALL + (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), +#else + offsetof(PyCFunctionObject, vectorcall), +#endif + 0, + 0, +#if PY_MAJOR_VERSION < 3 + 0, +#else + 0, +#endif + (reprfunc) __Pyx_CyFunction_repr, + 0, + 0, + 0, + 0, + __Pyx_CyFunction_CallAsMethod, + 0, + 0, + 0, + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + 0, + (traverseproc) __Pyx_CyFunction_traverse, + (inquiry) __Pyx_CyFunction_clear, + 0, +#if PY_VERSION_HEX < 0x030500A0 + offsetof(__pyx_CyFunctionObject, func_weakreflist), +#else + offsetof(PyCFunctionObject, m_weakreflist), +#endif + 0, + 0, + __pyx_CyFunction_methods, + __pyx_CyFunction_members, + __pyx_CyFunction_getsets, + 0, + 0, + __Pyx_PyMethod_New, + 0, + offsetof(__pyx_CyFunctionObject, func_dict), + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +#if PY_VERSION_HEX >= 0x030400a1 + 0, +#endif +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, +#endif +#if __PYX_NEED_TP_PRINT_SLOT + 0, +#endif +#if PY_VERSION_HEX >= 0x030C0000 + 0, +#endif +#if PY_VERSION_HEX >= 0x030d00A4 + 0, +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, +#endif +}; +#endif +static int __pyx_CyFunction_init(PyObject *module) { +#if CYTHON_USE_TYPE_SPECS + __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); +#else + CYTHON_UNUSED_VAR(module); + __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); +#endif + if (unlikely(__pyx_CyFunctionType == NULL)) { + return -1; + } + return 0; +} +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults = PyObject_Malloc(size); + if (unlikely(!m->defaults)) + return PyErr_NoMemory(); + memset(m->defaults, 0, size); + m->defaults_pyobjects = pyobjects; + m->defaults_size = size; + return m->defaults; +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_tuple = tuple; + Py_INCREF(tuple); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_kwdict = dict; + Py_INCREF(dict); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->func_annotations = dict; + Py_INCREF(dict); +} + +/* CythonFunction */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + PyObject *op = __Pyx_CyFunction_Init( + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + PyObject_GC_Track(op); + } + return op; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + CYTHON_MAYBE_UNUSED_VAR(tstate); + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} +#endif + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, + PyObject *firstlineno, PyObject *name) { + PyObject *replace = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; + replace = PyObject_GetAttrString(code, "replace"); + if (likely(replace)) { + PyObject *result; + result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); + Py_DECREF(replace); + return result; + } + PyErr_Clear(); + #if __PYX_LIMITED_VERSION_HEX < 0x030780000 + { + PyObject *compiled = NULL, *result = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; + compiled = Py_CompileString( + "out = type(code)(\n" + " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" + " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" + " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" + " code.co_lnotab)\n", "", Py_file_input); + if (!compiled) return NULL; + result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); + Py_DECREF(compiled); + if (!result) PyErr_Print(); + Py_DECREF(result); + result = PyDict_GetItemString(scratch_dict, "out"); + if (result) Py_INCREF(result); + return result; + } + #else + return NULL; + #endif +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; + PyObject *replace = NULL, *getframe = NULL, *frame = NULL; + PyObject *exc_type, *exc_value, *exc_traceback; + int success = 0; + if (c_line) { + (void) __pyx_cfilenm; + (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); + } + PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); + code_object = Py_CompileString("_getframe()", filename, Py_eval_input); + if (unlikely(!code_object)) goto bad; + py_py_line = PyLong_FromLong(py_line); + if (unlikely(!py_py_line)) goto bad; + py_funcname = PyUnicode_FromString(funcname); + if (unlikely(!py_funcname)) goto bad; + dict = PyDict_New(); + if (unlikely(!dict)) goto bad; + { + PyObject *old_code_object = code_object; + code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); + Py_DECREF(old_code_object); + } + if (unlikely(!code_object)) goto bad; + getframe = PySys_GetObject("_getframe"); + if (unlikely(!getframe)) goto bad; + if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; + frame = PyEval_EvalCode(code_object, dict, dict); + if (unlikely(!frame) || frame == Py_None) goto bad; + success = 1; + bad: + PyErr_Restore(exc_type, exc_value, exc_traceback); + Py_XDECREF(code_object); + Py_XDECREF(py_py_line); + Py_XDECREF(py_funcname); + Py_XDECREF(dict); + Py_XDECREF(replace); + if (success) { + PyTraceBack_Here( + (struct _frame*)frame); + } + Py_XDECREF(frame); +} +#else +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject *ptype, *pvalue, *ptraceback; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) { + /* If the code object creation fails, then we should clear the + fetched exception references and propagate the new exception */ + Py_XDECREF(ptype); + Py_XDECREF(pvalue); + Py_XDECREF(ptraceback); + goto bad; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} +#endif + +/* FormatTypeName */ +#if CYTHON_COMPILING_IN_LIMITED_API +static __Pyx_TypeName +__Pyx_PyType_GetName(PyTypeObject* tp) +{ + PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, + __pyx_n_s_name); + if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { + PyErr_Clear(); + Py_XDECREF(name); + name = __Pyx_NewRef(__pyx_n_s__15); + } + return name; +} +#endif + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(long) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(long) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(long) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + long val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (long) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (long) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (long) -1; + } else { + stepval = v; + } + v = NULL; + val = (long) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((long) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((long) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (long) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(int) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(int) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(int) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + int val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (int) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (int) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (int) -1; + } else { + stepval = v; + } + v = NULL; + val = (int) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((int) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((int) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (int) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (cls == a || cls == b) return 1; + mro = cls->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + PyObject *base = PyTuple_GET_ITEM(mro, i); + if (base == (PyObject *)a || base == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + if (exc_type1) { + return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); + } else { + return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030B00A4 + return Py_Version & ~0xFFUL; +#else + const char* rt_version = Py_GetVersion(); + unsigned long version = 0; + unsigned long factor = 0x01000000UL; + unsigned int digit = 0; + int i = 0; + while (factor) { + while ('0' <= rt_version[i] && rt_version[i] <= '9') { + digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); + ++i; + } + version += factor * digit; + if (rt_version[i] != '.') + break; + digit = 0; + factor >>= 8; + ++i; + } + return version; +#endif +} +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { + const unsigned long MAJOR_MINOR = 0xFFFF0000UL; + if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) + return 0; + if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) + return 1; + { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compile time Python version %d.%d " + "of module '%.100s' " + "%s " + "runtime version %d.%d", + (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), + __Pyx_MODULE_NAME, + (allow_newer) ? "was newer than" : "does not match", + (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) + ); + return PyErr_WarnEx(NULL, message, 1); + } +} + +/* InitStrings */ +#if PY_MAJOR_VERSION >= 3 +static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { + if (t.is_unicode | t.is_str) { + if (t.intern) { + *str = PyUnicode_InternFromString(t.s); + } else if (t.encoding) { + *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); + } else { + *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); + } + } else { + *str = PyBytes_FromStringAndSize(t.s, t.n - 1); + } + if (!*str) + return -1; + if (PyObject_Hash(*str) == -1) + return -1; + return 0; +} +#endif +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION >= 3 + __Pyx_InitString(*t, t->p); + #else + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + #endif + ++t; + } + return 0; +} + +#include +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { + size_t len = strlen(s); + if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { + PyErr_SetString(PyExc_OverflowError, "byte string is too long"); + return -1; + } + return (Py_ssize_t) len; +} +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return __Pyx_PyUnicode_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return PyByteArray_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { + __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " + "The ability to return an instance of a strict subclass of int is deprecated, " + "and may be removed in a future version of Python.", + result_type_name)) { + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; + } + __Pyx_DECREF_TypeName(result_type_name); + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", + type_name, type_name, result_type_name); + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(__Pyx_PyLong_IsCompact(b))) { + return __Pyx_PyLong_CompactValue(b); + } else { + const digit* digits = __Pyx_PyLong_Digits(b); + const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +/* #### Code section: utility_code_pragmas_end ### */ +#ifdef _MSC_VER +#pragma warning( pop ) +#endif + + + +/* #### Code section: end ### */ +#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/engine/_util_cy.c b/lib/sqlalchemy/engine/_util_cy.c new file mode 100644 index 00000000000..bab84947303 --- /dev/null +++ b/lib/sqlalchemy/engine/_util_cy.c @@ -0,0 +1,8853 @@ +/* Generated by Cython 3.0.11 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "name": "sqlalchemy.engine._util_cy", + "sources": [ + "lib/sqlalchemy/engine/_util_cy.py" + ] + }, + "module_name": "sqlalchemy.engine._util_cy" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#if defined(CYTHON_LIMITED_API) && 0 + #ifndef Py_LIMITED_API + #if CYTHON_LIMITED_API+0 > 0x03030000 + #define Py_LIMITED_API CYTHON_LIMITED_API + #else + #define Py_LIMITED_API 0x03030000 + #endif + #endif +#endif + +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.7+ or Python 3.3+. +#else +#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API +#define __PYX_EXTRA_ABI_MODULE_NAME "limited" +#else +#define __PYX_EXTRA_ABI_MODULE_NAME "" +#endif +#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME +#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI +#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." +#define CYTHON_HEX_VERSION 0x03000BF0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #define HAVE_LONG_LONG +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX +#if defined(GRAALVM_PYTHON) + /* For very preliminary testing purposes. Most variables are set the same as PyPy. + The existence of this section does not imply that anything works or is even tested */ + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 1 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(PYPY_VERSION) + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #if PY_VERSION_HEX < 0x03090000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(CYTHON_LIMITED_API) + #ifdef Py_LIMITED_API + #undef __PYX_LIMITED_VERSION_HEX + #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API + #endif + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 1 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_CLINE_IN_TRACEBACK + #define CYTHON_CLINE_IN_TRACEBACK 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 1 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #endif + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 1 + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #ifndef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) + #endif + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #if PY_VERSION_HEX < 0x030400a1 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #elif !defined(CYTHON_USE_TP_FINALIZE) + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #if PY_VERSION_HEX < 0x030600B1 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #elif !defined(CYTHON_USE_DICT_VERSIONS) + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) + #endif + #if PY_VERSION_HEX < 0x030700A3 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK 1 + #endif + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if !defined(CYTHON_VECTORCALL) +#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) +#endif +#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(maybe_unused) + #define CYTHON_UNUSED [[maybe_unused]] + #endif + #endif + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR + #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_USE_CPP_STD_MOVE + #if defined(__cplusplus) && (\ + __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) + #define CYTHON_USE_CPP_STD_MOVE 1 + #else + #define CYTHON_USE_CPP_STD_MOVE 0 + #endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + #endif + #endif + #if _MSC_VER < 1300 + #ifdef _WIN64 + typedef unsigned long long __pyx_uintptr_t; + #else + typedef unsigned int __pyx_uintptr_t; + #endif + #else + #ifdef _WIN64 + typedef unsigned __int64 __pyx_uintptr_t; + #else + typedef unsigned __int32 __pyx_uintptr_t; + #endif + #endif +#else + #include + typedef uintptr_t __pyx_uintptr_t; +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif +#ifdef __cplusplus + template + struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; + #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) +#else + #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) +#endif +#if CYTHON_COMPILING_IN_PYPY == 1 + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) +#else + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) +#endif +#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_DefaultClassType PyClass_Type + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if CYTHON_COMPILING_IN_LIMITED_API + static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *exception_table = NULL; + PyObject *types_module=NULL, *code_type=NULL, *result=NULL; + #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 + PyObject *version_info; + PyObject *py_minor_version = NULL; + #endif + long minor_version = 0; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 + minor_version = 11; + #else + if (!(version_info = PySys_GetObject("version_info"))) goto end; + if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; + minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); + if (minor_version == -1 && PyErr_Occurred()) goto end; + #endif + if (!(types_module = PyImport_ImportModule("types"))) goto end; + if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; + if (minor_version <= 7) { + (void)p; + result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else if (minor_version <= 10) { + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else { + if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); + } + end: + Py_XDECREF(code_type); + Py_XDECREF(exception_table); + Py_XDECREF(types_module); + if (type) { + PyErr_Restore(type, value, traceback); + } + return result; + } + #ifndef CO_OPTIMIZED + #define CO_OPTIMIZED 0x0001 + #endif + #ifndef CO_NEWLOCALS + #define CO_NEWLOCALS 0x0002 + #endif + #ifndef CO_VARARGS + #define CO_VARARGS 0x0004 + #endif + #ifndef CO_VARKEYWORDS + #define CO_VARKEYWORDS 0x0008 + #endif + #ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x0200 + #endif + #ifndef CO_GENERATOR + #define CO_GENERATOR 0x0020 + #endif + #ifndef CO_COROUTINE + #define CO_COROUTINE 0x0080 + #endif +#elif PY_VERSION_HEX >= 0x030B0000 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyCodeObject *result; + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); + if (!empty_bytes) return NULL; + result = + #if PY_VERSION_HEX >= 0x030C0000 + PyUnstable_Code_NewWithPosOnlyArgs + #else + PyCode_NewWithPosOnlyArgs + #endif + (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); + Py_DECREF(empty_bytes); + return result; + } +#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#endif +#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) + #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) +#else + #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) + #define __Pyx_Py_Is(x, y) Py_Is(x, y) +#else + #define __Pyx_Py_Is(x, y) ((x) == (y)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) + #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) +#else + #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) + #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) +#else + #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) + #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) +#else + #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) +#endif +#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) +#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) +#else + #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) +#endif +#ifndef CO_COROUTINE + #define CO_COROUTINE 0x80 +#endif +#ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x200 +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef Py_TPFLAGS_SEQUENCE + #define Py_TPFLAGS_SEQUENCE 0 +#endif +#ifndef Py_TPFLAGS_MAPPING + #define Py_TPFLAGS_MAPPING 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif +#endif +#if CYTHON_METH_FASTCALL + #define __Pyx_METH_FASTCALL METH_FASTCALL + #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast + #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords +#else + #define __Pyx_METH_FASTCALL METH_VARARGS + #define __Pyx_PyCFunction_FastCall PyCFunction + #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords +#endif +#if CYTHON_VECTORCALL + #define __pyx_vectorcallfunc vectorcallfunc + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET + #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) +#elif CYTHON_BACKPORT_VECTORCALL + typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames); + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) +#else + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) +#endif +#if PY_MAJOR_VERSION >= 0x030900B1 +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) +#else +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) +#endif +#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) +#elif !CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) +#endif +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) +static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { + return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; +} +#endif +static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { +#if CYTHON_COMPILING_IN_LIMITED_API + return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; +#else + return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +#endif +} +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) +#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) + typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); +#else + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) + #define __Pyx_PyCMethod PyCMethod +#endif +#ifndef METH_METHOD + #define METH_METHOD 0x200 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyThreadState_Current PyThreadState_Get() +#elif !CYTHON_FAST_THREAD_STATE + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) +{ + void *result; + result = PyModule_GetState(op); + if (!result) + Py_FatalError("Couldn't find the module state"); + return result; +} +#endif +#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) +#else + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if PY_MAJOR_VERSION < 3 + #if CYTHON_COMPILING_IN_PYPY + #if PYPY_VERSION_NUM < 0x07030600 + #if defined(__cplusplus) && __cplusplus >= 201402L + [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] + #elif defined(__GNUC__) || defined(__clang__) + __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) + #elif defined(_MSC_VER) + __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) + #endif + static CYTHON_INLINE int PyGILState_Check(void) { + return 0; + } + #else // PYPY_VERSION_NUM < 0x07030600 + #endif // PYPY_VERSION_NUM < 0x07030600 + #else + static CYTHON_INLINE int PyGILState_Check(void) { + PyThreadState * tstate = _PyThreadState_Current; + return tstate && (tstate == PyGILState_GetThisThreadState()); + } + #endif +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { + PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); + if (res == NULL) PyErr_Clear(); + return res; +} +#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) +#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#else +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { +#if CYTHON_COMPILING_IN_PYPY + return PyDict_GetItem(dict, name); +#else + PyDictEntry *ep; + PyDictObject *mp = (PyDictObject*) dict; + long hash = ((PyStringObject *) name)->ob_shash; + assert(hash != -1); + ep = (mp->ma_lookup)(mp, name, hash); + if (ep == NULL) { + return NULL; + } + return ep->me_value; +#endif +} +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#endif +#if CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) + #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) + #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) +#else + #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) + #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) + #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) +#else + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) +#endif +#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 +#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ + assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ + PyObject_GC_Del(obj);\ + Py_DECREF(type);\ +} +#else +#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) + #define __Pyx_PyUnicode_DATA(u) ((void*)u) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) +#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_READY(op) (0) + #else + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #else + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #if !defined(PyUnicode_DecodeUnicodeEscape) + #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) + #endif + #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) + #undef PyUnicode_Contains + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) + #endif + #if !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) + #endif + #if !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) + #endif +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#if CYTHON_COMPILING_IN_CPYTHON + #define __Pyx_PySequence_ListKeepNew(obj)\ + (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) +#else + #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) +#else + #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) + #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) +#endif +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) +#else + static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { + PyObject *module = PyImport_AddModule(name); + Py_XINCREF(module); + return module; + } +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define __Pyx_Py3Int_Check(op) PyLong_Check(op) + #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#else + #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) + #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) + #if !defined(_USE_MATH_DEFINES) + #define _USE_MATH_DEFINES + #endif +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifdef CYTHON_EXTERN_C + #undef __PYX_EXTERN_C + #define __PYX_EXTERN_C CYTHON_EXTERN_C +#elif defined(__PYX_EXTERN_C) + #ifdef _MSC_VER + #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") + #else + #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. + #endif +#else + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__sqlalchemy__engine___util_cy +#define __PYX_HAVE_API__sqlalchemy__engine___util_cy +/* Early includes */ +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_VERSION_HEX >= 0x030C00A7 + #ifndef _PyLong_SIGN_MASK + #define _PyLong_SIGN_MASK 3 + #endif + #ifndef _PyLong_NON_SIZE_BITS + #define _PyLong_NON_SIZE_BITS 3 + #endif + #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) + #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) + #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) + #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) + #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_SignedDigitCount(x)\ + ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) + #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) + #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) + #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) + #else + #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) + #endif + typedef Py_ssize_t __Pyx_compact_pylong; + typedef size_t __Pyx_compact_upylong; + #else + #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) + #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) + #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) + #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) + #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) + #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) + #define __Pyx_PyLong_CompactValue(x)\ + ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) + typedef sdigit __Pyx_compact_pylong; + typedef digit __Pyx_compact_upylong; + #endif + #if PY_VERSION_HEX >= 0x030C00A5 + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) + #else + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) + #endif +#endif +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +#include +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = (char) c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#include +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +#if !CYTHON_USE_MODULE_STATE +static PyObject *__pyx_m = NULL; +#endif +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm = __FILE__; +static const char *__pyx_filename; + +/* #### Code section: filename_table ### */ + +static const char *__pyx_f[] = { + "lib/sqlalchemy/engine/_util_cy.py", +}; +/* #### Code section: utility_code_proto_before_types ### */ +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + +/* #### Code section: numeric_typedefs ### */ +/* #### Code section: complex_type_declarations ### */ +/* #### Code section: type_declarations ### */ + +/*--- Type declarations ---*/ +/* #### Code section: utility_code_proto ### */ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, Py_ssize_t); + void (*DECREF)(void*, PyObject*, Py_ssize_t); + void (*GOTREF)(void*, PyObject*, Py_ssize_t); + void (*GIVEREF)(void*, PyObject*, Py_ssize_t); + void* (*SetupContext)(const char*, Py_ssize_t, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + } + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) + #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() +#endif + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContextNogil() + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_Py_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; Py_XDECREF(tmp);\ + } while (0) +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#if PY_VERSION_HEX >= 0x030C00A6 +#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) +#else +#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) +#endif +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) +#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* TupleAndListFromArray.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); +static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); +#endif + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* fastcall.proto */ +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) +#elif CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) +#else + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) +#endif +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) + #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) +#else + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) +#define __Pyx_KwValues_VARARGS(args, nargs) NULL +#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) +#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) +#if CYTHON_METH_FASTCALL + #define __Pyx_Arg_FASTCALL(args, i) args[i] + #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) + #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) + static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + #else + #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) + #endif + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) +#else + #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS + #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS + #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS + #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS + #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS + #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) + #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) +#else +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) +#endif + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) do {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, + const char* function_name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#if !CYTHON_VECTORCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if !CYTHON_VECTORCALL +#if PY_VERSION_HEX >= 0x03080000 + #include "frameobject.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif + #define __Pxy_PyFrame_Initialize_Offsets() + #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) +#else + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif +#endif +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectFastCall.proto */ +#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); + +/* RaiseUnexpectedTypeError.proto */ +static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* ImportDottedModule.proto */ +static PyObject *__Pyx_ImportDottedModule(PyObject *name, PyObject *parts_tuple); +#if PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx_ImportDottedModule_WalkParts(PyObject *module, PyObject *name, PyObject *parts_tuple); +#endif + +/* IncludeStructmemberH.proto */ +#include + +/* FixUpExtensionType.proto */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); +#endif + +/* FetchSharedCythonModule.proto */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void); + +/* FetchCommonType.proto */ +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); +#else +static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); +#endif + +/* PyMethodNew.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + typesModule = PyImport_ImportModule("types"); + if (!typesModule) return NULL; + methodType = PyObject_GetAttrString(typesModule, "MethodType"); + Py_DECREF(typesModule); + if (!methodType) return NULL; + result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); + Py_DECREF(methodType); + return result; +} +#elif PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + return PyMethod_New(func, self); +} +#else + #define __Pyx_PyMethod_New PyMethod_New +#endif + +/* PyVectorcallFastCallDict.proto */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); +#endif + +/* CythonFunctionShared.proto */ +#define __Pyx_CyFunction_USED +#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 +#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 +#define __Pyx_CYFUNCTION_CCLASS 0x04 +#define __Pyx_CYFUNCTION_COROUTINE 0x08 +#define __Pyx_CyFunction_GetClosure(f)\ + (((__pyx_CyFunctionObject *) (f))->func_closure) +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_CyFunction_GetClassObj(f)\ + (((__pyx_CyFunctionObject *) (f))->func_classobj) +#else + #define __Pyx_CyFunction_GetClassObj(f)\ + ((PyObject*) ((PyCMethodObject *) (f))->mm_class) +#endif +#define __Pyx_CyFunction_SetClassObj(f, classobj)\ + __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) +#define __Pyx_CyFunction_Defaults(type, f)\ + ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) +#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ + ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) +typedef struct { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject_HEAD + PyObject *func; +#elif PY_VERSION_HEX < 0x030900B1 + PyCFunctionObject func; +#else + PyCMethodObject func; +#endif +#if CYTHON_BACKPORT_VECTORCALL + __pyx_vectorcallfunc func_vectorcall; +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_weakreflist; +#endif + PyObject *func_dict; + PyObject *func_name; + PyObject *func_qualname; + PyObject *func_doc; + PyObject *func_globals; + PyObject *func_code; + PyObject *func_closure; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_classobj; +#endif + void *defaults; + int defaults_pyobjects; + size_t defaults_size; + int flags; + PyObject *defaults_tuple; + PyObject *defaults_kwdict; + PyObject *(*defaults_getter)(PyObject *); + PyObject *func_annotations; + PyObject *func_is_coroutine; +} __pyx_CyFunctionObject; +#undef __Pyx_CyOrPyCFunction_Check +#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) +#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) +#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); +#undef __Pyx_IsSameCFunction +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, + size_t size, + int pyobjects); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, + PyObject *tuple); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, + PyObject *dict); +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, + PyObject *dict); +static int __pyx_CyFunction_init(PyObject *module); +#if CYTHON_METH_FASTCALL +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +#if CYTHON_BACKPORT_VECTORCALL +#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) +#else +#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) +#endif +#endif + +/* CythonFunction.proto */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); +#endif + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* FormatTypeName.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +typedef PyObject *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%U" +static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); +#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) +#else +typedef const char *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%.200s" +#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) +#define __Pyx_DECREF_TypeName(obj) +#endif + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static unsigned long __Pyx_get_runtime_version(void); +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +/* #### Code section: module_declarations ### */ + +/* Module declarations from "cython" */ + +/* Module declarations from "sqlalchemy.engine._util_cy" */ +static PyObject *__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple = 0; +static CYTHON_INLINE int __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(PyObject *); /*proto*/ +static int __pyx_f_10sqlalchemy_6engine_8_util_cy__is_contiguous(PyObject *); /*proto*/ +/* #### Code section: typeinfo ### */ +/* #### Code section: before_global_var ### */ +#define __Pyx_MODULE_NAME "sqlalchemy.engine._util_cy" +extern int __pyx_module_is_main_sqlalchemy__engine___util_cy; +int __pyx_module_is_main_sqlalchemy__engine___util_cy = 0; + +/* Implementation of "sqlalchemy.engine._util_cy" */ +/* #### Code section: global_var ### */ +static PyObject *__pyx_builtin_range; +/* #### Code section: string_decls ### */ +static const char __pyx_k__2[] = "."; +static const char __pyx_k__3[] = "*"; +static const char __pyx_k__4[] = ""; +static const char __pyx_k_2_1[] = "2.1"; +static const char __pyx_k_Any[] = "Any"; +static const char __pyx_k__11[] = "?"; +static const char __pyx_k_exc[] = "exc"; +static const char __pyx_k_int[] = "int"; +static const char __pyx_k_bool[] = "bool"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_spec[] = "__spec__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_util[] = "util"; +static const char __pyx_k_Tuple[] = "Tuple"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_params[] = "params"; +static const char __pyx_k_result[] = "result"; +static const char __pyx_k_return[] = "return"; +static const char __pyx_k_typing[] = "typing"; +static const char __pyx_k_Mapping[] = "Mapping"; +static const char __pyx_k_indexes[] = "indexes"; +static const char __pyx_k_Optional[] = "Optional"; +static const char __pyx_k_operator[] = "operator"; +static const char __pyx_k_max_index[] = "max_index"; +static const char __pyx_k_interfaces[] = "interfaces"; +static const char __pyx_k_itemgetter[] = "itemgetter"; +static const char __pyx_k_is_compiled[] = "_is_compiled"; +static const char __pyx_k_tuplegetter[] = "tuplegetter"; +static const char __pyx_k_initializing[] = "_initializing"; +static const char __pyx_k_is_coroutine[] = "_is_coroutine"; +static const char __pyx_k_ArgumentError[] = "ArgumentError"; +static const char __pyx_k_TYPE_CHECKING[] = "TYPE_CHECKING"; +static const char __pyx_k_TupleGetterType[] = "_TupleGetterType"; +static const char __pyx_k_collections_abc[] = "collections.abc"; +static const char __pyx_k_warn_deprecated[] = "warn_deprecated"; +static const char __pyx_k_distill_params_20[] = "_distill_params_20"; +static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_distill_raw_params[] = "_distill_raw_params"; +static const char __pyx_k_CoreAnyExecuteParams[] = "_CoreAnyExecuteParams"; +static const char __pyx_k_DBAPIAnyExecuteParams[] = "_DBAPIAnyExecuteParams"; +static const char __pyx_k_CoreMultiExecuteParams[] = "_CoreMultiExecuteParams"; +static const char __pyx_k_DBAPIMultiExecuteParams[] = "_DBAPIMultiExecuteParams"; +static const char __pyx_k_sqlalchemy_engine__util_cy[] = "sqlalchemy.engine._util_cy"; +static const char __pyx_k_Optional__CoreAnyExecuteParams[] = "Optional[_CoreAnyExecuteParams]"; +static const char __pyx_k_Empty_parameter_sequence_passed[] = "Empty parameter sequence passed to execute(). This use is deprecated and will raise an exception in a future SQLAlchemy release"; +static const char __pyx_k_List_argument_must_consist_only[] = "List argument must consist only of tuples or dictionaries"; +static const char __pyx_k_Optional__DBAPIAnyExecuteParams[] = "Optional[_DBAPIAnyExecuteParams]"; +static const char __pyx_k_lib_sqlalchemy_engine__util_cy_p[] = "lib/sqlalchemy/engine/_util_cy.py"; +static const char __pyx_k_mapping_or_list_expected_for_par[] = "mapping or list expected for parameters"; +static const char __pyx_k_mapping_or_sequence_expected_for[] = "mapping or sequence expected for parameters"; +/* #### Code section: decls ### */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_2_distill_params_20(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_params); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_4_distill_raw_params(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_params); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_6tuplegetter(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_indexes); /* proto */ +/* #### Code section: late_includes ### */ +/* #### Code section: module_state ### */ +typedef struct { + PyObject *__pyx_d; + PyObject *__pyx_b; + PyObject *__pyx_cython_runtime; + PyObject *__pyx_empty_tuple; + PyObject *__pyx_empty_bytes; + PyObject *__pyx_empty_unicode; + #ifdef __Pyx_CyFunction_USED + PyTypeObject *__pyx_CyFunctionType; + #endif + #ifdef __Pyx_FusedFunction_USED + PyTypeObject *__pyx_FusedFunctionType; + #endif + #ifdef __Pyx_Generator_USED + PyTypeObject *__pyx_GeneratorType; + #endif + #ifdef __Pyx_IterableCoroutine_USED + PyTypeObject *__pyx_IterableCoroutineType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineAwaitType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineType; + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + PyObject *__pyx_kp_u_2_1; + PyObject *__pyx_n_s_Any; + PyObject *__pyx_n_s_ArgumentError; + PyObject *__pyx_n_s_CoreAnyExecuteParams; + PyObject *__pyx_n_s_CoreMultiExecuteParams; + PyObject *__pyx_n_s_DBAPIAnyExecuteParams; + PyObject *__pyx_n_s_DBAPIMultiExecuteParams; + PyObject *__pyx_kp_u_Empty_parameter_sequence_passed; + PyObject *__pyx_kp_u_List_argument_must_consist_only; + PyObject *__pyx_n_s_Mapping; + PyObject *__pyx_n_s_Optional; + PyObject *__pyx_kp_s_Optional__CoreAnyExecuteParams; + PyObject *__pyx_kp_s_Optional__DBAPIAnyExecuteParams; + PyObject *__pyx_n_s_TYPE_CHECKING; + PyObject *__pyx_n_s_Tuple; + PyObject *__pyx_n_s_TupleGetterType; + PyObject *__pyx_n_s__11; + PyObject *__pyx_kp_u__2; + PyObject *__pyx_n_s__3; + PyObject *__pyx_n_s__4; + PyObject *__pyx_n_s_asyncio_coroutines; + PyObject *__pyx_n_s_bool; + PyObject *__pyx_n_s_cline_in_traceback; + PyObject *__pyx_n_s_collections_abc; + PyObject *__pyx_n_s_distill_params_20; + PyObject *__pyx_n_s_distill_raw_params; + PyObject *__pyx_n_s_exc; + PyObject *__pyx_n_s_import; + PyObject *__pyx_n_s_indexes; + PyObject *__pyx_n_s_initializing; + PyObject *__pyx_n_s_int; + PyObject *__pyx_n_s_interfaces; + PyObject *__pyx_n_s_is_compiled; + PyObject *__pyx_n_s_is_coroutine; + PyObject *__pyx_n_s_itemgetter; + PyObject *__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p; + PyObject *__pyx_n_s_main; + PyObject *__pyx_kp_u_mapping_or_list_expected_for_par; + PyObject *__pyx_kp_u_mapping_or_sequence_expected_for; + PyObject *__pyx_n_s_max_index; + PyObject *__pyx_n_s_name; + PyObject *__pyx_n_s_operator; + PyObject *__pyx_n_s_params; + PyObject *__pyx_n_s_range; + PyObject *__pyx_n_s_result; + PyObject *__pyx_n_s_return; + PyObject *__pyx_n_s_spec; + PyObject *__pyx_n_s_sqlalchemy_engine__util_cy; + PyObject *__pyx_n_s_test; + PyObject *__pyx_n_s_tuplegetter; + PyObject *__pyx_n_s_typing; + PyObject *__pyx_n_s_util; + PyObject *__pyx_n_s_warn_deprecated; + PyObject *__pyx_int_1; + PyObject *__pyx_tuple_; + PyObject *__pyx_tuple__6; + PyObject *__pyx_tuple__9; + PyObject *__pyx_codeobj__5; + PyObject *__pyx_codeobj__7; + PyObject *__pyx_codeobj__8; + PyObject *__pyx_codeobj__10; +} __pyx_mstate; + +#if CYTHON_USE_MODULE_STATE +#ifdef __cplusplus +namespace { + extern struct PyModuleDef __pyx_moduledef; +} /* anonymous namespace */ +#else +static struct PyModuleDef __pyx_moduledef; +#endif + +#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) + +#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) + +#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) +#else +static __pyx_mstate __pyx_mstate_global_static = +#ifdef __cplusplus + {}; +#else + {0}; +#endif +static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; +#endif +/* #### Code section: module_state_clear ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_clear(PyObject *m) { + __pyx_mstate *clear_module_state = __pyx_mstate(m); + if (!clear_module_state) return 0; + Py_CLEAR(clear_module_state->__pyx_d); + Py_CLEAR(clear_module_state->__pyx_b); + Py_CLEAR(clear_module_state->__pyx_cython_runtime); + Py_CLEAR(clear_module_state->__pyx_empty_tuple); + Py_CLEAR(clear_module_state->__pyx_empty_bytes); + Py_CLEAR(clear_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_CLEAR(clear_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); + #endif + Py_CLEAR(clear_module_state->__pyx_kp_u_2_1); + Py_CLEAR(clear_module_state->__pyx_n_s_Any); + Py_CLEAR(clear_module_state->__pyx_n_s_ArgumentError); + Py_CLEAR(clear_module_state->__pyx_n_s_CoreAnyExecuteParams); + Py_CLEAR(clear_module_state->__pyx_n_s_CoreMultiExecuteParams); + Py_CLEAR(clear_module_state->__pyx_n_s_DBAPIAnyExecuteParams); + Py_CLEAR(clear_module_state->__pyx_n_s_DBAPIMultiExecuteParams); + Py_CLEAR(clear_module_state->__pyx_kp_u_Empty_parameter_sequence_passed); + Py_CLEAR(clear_module_state->__pyx_kp_u_List_argument_must_consist_only); + Py_CLEAR(clear_module_state->__pyx_n_s_Mapping); + Py_CLEAR(clear_module_state->__pyx_n_s_Optional); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional__CoreAnyExecuteParams); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional__DBAPIAnyExecuteParams); + Py_CLEAR(clear_module_state->__pyx_n_s_TYPE_CHECKING); + Py_CLEAR(clear_module_state->__pyx_n_s_Tuple); + Py_CLEAR(clear_module_state->__pyx_n_s_TupleGetterType); + Py_CLEAR(clear_module_state->__pyx_n_s__11); + Py_CLEAR(clear_module_state->__pyx_kp_u__2); + Py_CLEAR(clear_module_state->__pyx_n_s__3); + Py_CLEAR(clear_module_state->__pyx_n_s__4); + Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); + Py_CLEAR(clear_module_state->__pyx_n_s_bool); + Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); + Py_CLEAR(clear_module_state->__pyx_n_s_collections_abc); + Py_CLEAR(clear_module_state->__pyx_n_s_distill_params_20); + Py_CLEAR(clear_module_state->__pyx_n_s_distill_raw_params); + Py_CLEAR(clear_module_state->__pyx_n_s_exc); + Py_CLEAR(clear_module_state->__pyx_n_s_import); + Py_CLEAR(clear_module_state->__pyx_n_s_indexes); + Py_CLEAR(clear_module_state->__pyx_n_s_initializing); + Py_CLEAR(clear_module_state->__pyx_n_s_int); + Py_CLEAR(clear_module_state->__pyx_n_s_interfaces); + Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); + Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); + Py_CLEAR(clear_module_state->__pyx_n_s_itemgetter); + Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p); + Py_CLEAR(clear_module_state->__pyx_n_s_main); + Py_CLEAR(clear_module_state->__pyx_kp_u_mapping_or_list_expected_for_par); + Py_CLEAR(clear_module_state->__pyx_kp_u_mapping_or_sequence_expected_for); + Py_CLEAR(clear_module_state->__pyx_n_s_max_index); + Py_CLEAR(clear_module_state->__pyx_n_s_name); + Py_CLEAR(clear_module_state->__pyx_n_s_operator); + Py_CLEAR(clear_module_state->__pyx_n_s_params); + Py_CLEAR(clear_module_state->__pyx_n_s_range); + Py_CLEAR(clear_module_state->__pyx_n_s_result); + Py_CLEAR(clear_module_state->__pyx_n_s_return); + Py_CLEAR(clear_module_state->__pyx_n_s_spec); + Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_engine__util_cy); + Py_CLEAR(clear_module_state->__pyx_n_s_test); + Py_CLEAR(clear_module_state->__pyx_n_s_tuplegetter); + Py_CLEAR(clear_module_state->__pyx_n_s_typing); + Py_CLEAR(clear_module_state->__pyx_n_s_util); + Py_CLEAR(clear_module_state->__pyx_n_s_warn_deprecated); + Py_CLEAR(clear_module_state->__pyx_int_1); + Py_CLEAR(clear_module_state->__pyx_tuple_); + Py_CLEAR(clear_module_state->__pyx_tuple__6); + Py_CLEAR(clear_module_state->__pyx_tuple__9); + Py_CLEAR(clear_module_state->__pyx_codeobj__5); + Py_CLEAR(clear_module_state->__pyx_codeobj__7); + Py_CLEAR(clear_module_state->__pyx_codeobj__8); + Py_CLEAR(clear_module_state->__pyx_codeobj__10); + return 0; +} +#endif +/* #### Code section: module_state_traverse ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { + __pyx_mstate *traverse_module_state = __pyx_mstate(m); + if (!traverse_module_state) return 0; + Py_VISIT(traverse_module_state->__pyx_d); + Py_VISIT(traverse_module_state->__pyx_b); + Py_VISIT(traverse_module_state->__pyx_cython_runtime); + Py_VISIT(traverse_module_state->__pyx_empty_tuple); + Py_VISIT(traverse_module_state->__pyx_empty_bytes); + Py_VISIT(traverse_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_VISIT(traverse_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); + #endif + Py_VISIT(traverse_module_state->__pyx_kp_u_2_1); + Py_VISIT(traverse_module_state->__pyx_n_s_Any); + Py_VISIT(traverse_module_state->__pyx_n_s_ArgumentError); + Py_VISIT(traverse_module_state->__pyx_n_s_CoreAnyExecuteParams); + Py_VISIT(traverse_module_state->__pyx_n_s_CoreMultiExecuteParams); + Py_VISIT(traverse_module_state->__pyx_n_s_DBAPIAnyExecuteParams); + Py_VISIT(traverse_module_state->__pyx_n_s_DBAPIMultiExecuteParams); + Py_VISIT(traverse_module_state->__pyx_kp_u_Empty_parameter_sequence_passed); + Py_VISIT(traverse_module_state->__pyx_kp_u_List_argument_must_consist_only); + Py_VISIT(traverse_module_state->__pyx_n_s_Mapping); + Py_VISIT(traverse_module_state->__pyx_n_s_Optional); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional__CoreAnyExecuteParams); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional__DBAPIAnyExecuteParams); + Py_VISIT(traverse_module_state->__pyx_n_s_TYPE_CHECKING); + Py_VISIT(traverse_module_state->__pyx_n_s_Tuple); + Py_VISIT(traverse_module_state->__pyx_n_s_TupleGetterType); + Py_VISIT(traverse_module_state->__pyx_n_s__11); + Py_VISIT(traverse_module_state->__pyx_kp_u__2); + Py_VISIT(traverse_module_state->__pyx_n_s__3); + Py_VISIT(traverse_module_state->__pyx_n_s__4); + Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); + Py_VISIT(traverse_module_state->__pyx_n_s_bool); + Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); + Py_VISIT(traverse_module_state->__pyx_n_s_collections_abc); + Py_VISIT(traverse_module_state->__pyx_n_s_distill_params_20); + Py_VISIT(traverse_module_state->__pyx_n_s_distill_raw_params); + Py_VISIT(traverse_module_state->__pyx_n_s_exc); + Py_VISIT(traverse_module_state->__pyx_n_s_import); + Py_VISIT(traverse_module_state->__pyx_n_s_indexes); + Py_VISIT(traverse_module_state->__pyx_n_s_initializing); + Py_VISIT(traverse_module_state->__pyx_n_s_int); + Py_VISIT(traverse_module_state->__pyx_n_s_interfaces); + Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); + Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); + Py_VISIT(traverse_module_state->__pyx_n_s_itemgetter); + Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p); + Py_VISIT(traverse_module_state->__pyx_n_s_main); + Py_VISIT(traverse_module_state->__pyx_kp_u_mapping_or_list_expected_for_par); + Py_VISIT(traverse_module_state->__pyx_kp_u_mapping_or_sequence_expected_for); + Py_VISIT(traverse_module_state->__pyx_n_s_max_index); + Py_VISIT(traverse_module_state->__pyx_n_s_name); + Py_VISIT(traverse_module_state->__pyx_n_s_operator); + Py_VISIT(traverse_module_state->__pyx_n_s_params); + Py_VISIT(traverse_module_state->__pyx_n_s_range); + Py_VISIT(traverse_module_state->__pyx_n_s_result); + Py_VISIT(traverse_module_state->__pyx_n_s_return); + Py_VISIT(traverse_module_state->__pyx_n_s_spec); + Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_engine__util_cy); + Py_VISIT(traverse_module_state->__pyx_n_s_test); + Py_VISIT(traverse_module_state->__pyx_n_s_tuplegetter); + Py_VISIT(traverse_module_state->__pyx_n_s_typing); + Py_VISIT(traverse_module_state->__pyx_n_s_util); + Py_VISIT(traverse_module_state->__pyx_n_s_warn_deprecated); + Py_VISIT(traverse_module_state->__pyx_int_1); + Py_VISIT(traverse_module_state->__pyx_tuple_); + Py_VISIT(traverse_module_state->__pyx_tuple__6); + Py_VISIT(traverse_module_state->__pyx_tuple__9); + Py_VISIT(traverse_module_state->__pyx_codeobj__5); + Py_VISIT(traverse_module_state->__pyx_codeobj__7); + Py_VISIT(traverse_module_state->__pyx_codeobj__8); + Py_VISIT(traverse_module_state->__pyx_codeobj__10); + return 0; +} +#endif +/* #### Code section: module_state_defines ### */ +#define __pyx_d __pyx_mstate_global->__pyx_d +#define __pyx_b __pyx_mstate_global->__pyx_b +#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime +#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple +#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes +#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode +#ifdef __Pyx_CyFunction_USED +#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType +#endif +#ifdef __Pyx_FusedFunction_USED +#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType +#endif +#ifdef __Pyx_Generator_USED +#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType +#endif +#ifdef __Pyx_IterableCoroutine_USED +#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#define __pyx_kp_u_2_1 __pyx_mstate_global->__pyx_kp_u_2_1 +#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any +#define __pyx_n_s_ArgumentError __pyx_mstate_global->__pyx_n_s_ArgumentError +#define __pyx_n_s_CoreAnyExecuteParams __pyx_mstate_global->__pyx_n_s_CoreAnyExecuteParams +#define __pyx_n_s_CoreMultiExecuteParams __pyx_mstate_global->__pyx_n_s_CoreMultiExecuteParams +#define __pyx_n_s_DBAPIAnyExecuteParams __pyx_mstate_global->__pyx_n_s_DBAPIAnyExecuteParams +#define __pyx_n_s_DBAPIMultiExecuteParams __pyx_mstate_global->__pyx_n_s_DBAPIMultiExecuteParams +#define __pyx_kp_u_Empty_parameter_sequence_passed __pyx_mstate_global->__pyx_kp_u_Empty_parameter_sequence_passed +#define __pyx_kp_u_List_argument_must_consist_only __pyx_mstate_global->__pyx_kp_u_List_argument_must_consist_only +#define __pyx_n_s_Mapping __pyx_mstate_global->__pyx_n_s_Mapping +#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional +#define __pyx_kp_s_Optional__CoreAnyExecuteParams __pyx_mstate_global->__pyx_kp_s_Optional__CoreAnyExecuteParams +#define __pyx_kp_s_Optional__DBAPIAnyExecuteParams __pyx_mstate_global->__pyx_kp_s_Optional__DBAPIAnyExecuteParams +#define __pyx_n_s_TYPE_CHECKING __pyx_mstate_global->__pyx_n_s_TYPE_CHECKING +#define __pyx_n_s_Tuple __pyx_mstate_global->__pyx_n_s_Tuple +#define __pyx_n_s_TupleGetterType __pyx_mstate_global->__pyx_n_s_TupleGetterType +#define __pyx_n_s__11 __pyx_mstate_global->__pyx_n_s__11 +#define __pyx_kp_u__2 __pyx_mstate_global->__pyx_kp_u__2 +#define __pyx_n_s__3 __pyx_mstate_global->__pyx_n_s__3 +#define __pyx_n_s__4 __pyx_mstate_global->__pyx_n_s__4 +#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines +#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool +#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback +#define __pyx_n_s_collections_abc __pyx_mstate_global->__pyx_n_s_collections_abc +#define __pyx_n_s_distill_params_20 __pyx_mstate_global->__pyx_n_s_distill_params_20 +#define __pyx_n_s_distill_raw_params __pyx_mstate_global->__pyx_n_s_distill_raw_params +#define __pyx_n_s_exc __pyx_mstate_global->__pyx_n_s_exc +#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import +#define __pyx_n_s_indexes __pyx_mstate_global->__pyx_n_s_indexes +#define __pyx_n_s_initializing __pyx_mstate_global->__pyx_n_s_initializing +#define __pyx_n_s_int __pyx_mstate_global->__pyx_n_s_int +#define __pyx_n_s_interfaces __pyx_mstate_global->__pyx_n_s_interfaces +#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled +#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine +#define __pyx_n_s_itemgetter __pyx_mstate_global->__pyx_n_s_itemgetter +#define __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p +#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main +#define __pyx_kp_u_mapping_or_list_expected_for_par __pyx_mstate_global->__pyx_kp_u_mapping_or_list_expected_for_par +#define __pyx_kp_u_mapping_or_sequence_expected_for __pyx_mstate_global->__pyx_kp_u_mapping_or_sequence_expected_for +#define __pyx_n_s_max_index __pyx_mstate_global->__pyx_n_s_max_index +#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name +#define __pyx_n_s_operator __pyx_mstate_global->__pyx_n_s_operator +#define __pyx_n_s_params __pyx_mstate_global->__pyx_n_s_params +#define __pyx_n_s_range __pyx_mstate_global->__pyx_n_s_range +#define __pyx_n_s_result __pyx_mstate_global->__pyx_n_s_result +#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return +#define __pyx_n_s_spec __pyx_mstate_global->__pyx_n_s_spec +#define __pyx_n_s_sqlalchemy_engine__util_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_engine__util_cy +#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test +#define __pyx_n_s_tuplegetter __pyx_mstate_global->__pyx_n_s_tuplegetter +#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing +#define __pyx_n_s_util __pyx_mstate_global->__pyx_n_s_util +#define __pyx_n_s_warn_deprecated __pyx_mstate_global->__pyx_n_s_warn_deprecated +#define __pyx_int_1 __pyx_mstate_global->__pyx_int_1 +#define __pyx_tuple_ __pyx_mstate_global->__pyx_tuple_ +#define __pyx_tuple__6 __pyx_mstate_global->__pyx_tuple__6 +#define __pyx_tuple__9 __pyx_mstate_global->__pyx_tuple__9 +#define __pyx_codeobj__5 __pyx_mstate_global->__pyx_codeobj__5 +#define __pyx_codeobj__7 __pyx_mstate_global->__pyx_codeobj__7 +#define __pyx_codeobj__8 __pyx_mstate_global->__pyx_codeobj__8 +#define __pyx_codeobj__10 __pyx_mstate_global->__pyx_codeobj__10 +/* #### Code section: module_code ### */ + +/* "sqlalchemy/engine/_util_cy.py":38 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +PyDoc_STRVAR(__pyx_doc_10sqlalchemy_6engine_8_util_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_8_util_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_6engine_8_util_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_6engine_8_util_cy__is_compiled}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_6engine_8_util_cy__is_compiled(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled", 1); + + /* "sqlalchemy/engine/_util_cy.py":40 + * def _is_compiled() -> bool: + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":38 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_util_cy.py":48 + * + * + * @cython.inline # <<<<<<<<<<<<<< + * @cython.cfunc + * def _is_mapping_or_tuple(value: object, /) -> cython.bint: + */ + +static CYTHON_INLINE int __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_is_mapping_or_tuple", 1); + + /* "sqlalchemy/engine/_util_cy.py":52 + * def _is_mapping_or_tuple(value: object, /) -> cython.bint: + * return ( + * isinstance(value, dict) # <<<<<<<<<<<<<< + * or isinstance(value, tuple) + * or isinstance(value, Mapping) + */ + __pyx_t_2 = PyDict_Check(__pyx_v_value); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L3_bool_binop_done; + } + + /* "sqlalchemy/engine/_util_cy.py":53 + * return ( + * isinstance(value, dict) + * or isinstance(value, tuple) # <<<<<<<<<<<<<< + * or isinstance(value, Mapping) + * # only do immutabledict or abc.__instancecheck__ for Mapping after + */ + __pyx_t_2 = PyTuple_Check(__pyx_v_value); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L3_bool_binop_done; + } + + /* "sqlalchemy/engine/_util_cy.py":54 + * isinstance(value, dict) + * or isinstance(value, tuple) + * or isinstance(value, Mapping) # <<<<<<<<<<<<<< + * # only do immutabledict or abc.__instancecheck__ for Mapping after + * # we've checked for plain dictionaries and would otherwise raise + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 54, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PyObject_IsInstance(__pyx_v_value, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 54, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = __pyx_t_2; + __pyx_L3_bool_binop_done:; + __pyx_r = __pyx_t_1; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":48 + * + * + * @cython.inline # <<<<<<<<<<<<<< + * @cython.cfunc + * def _is_mapping_or_tuple(value: object, /) -> cython.bint: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.engine._util_cy._is_mapping_or_tuple", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_util_cy.py":61 + * + * # _is_mapping_or_tuple could be inlined if pure python perf is a problem + * def _distill_params_20( # <<<<<<<<<<<<<< + * params: Optional[_CoreAnyExecuteParams], + * ) -> _CoreMultiExecuteParams: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_3_distill_params_20(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_8_util_cy_3_distill_params_20 = {"_distill_params_20", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_8_util_cy_3_distill_params_20, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_3_distill_params_20(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_params = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_distill_params_20 (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_params,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_params)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 61, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_distill_params_20") < 0)) __PYX_ERR(0, 61, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_params = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("_distill_params_20", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 61, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._util_cy._distill_params_20", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_8_util_cy_2_distill_params_20(__pyx_self, __pyx_v_params); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_2_distill_params_20(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_params) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + unsigned int __pyx_t_7; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_distill_params_20", 1); + + /* "sqlalchemy/engine/_util_cy.py":64 + * params: Optional[_CoreAnyExecuteParams], + * ) -> _CoreMultiExecuteParams: + * if params is None: # <<<<<<<<<<<<<< + * return _Empty_Tuple + * # Assume list is more likely than tuple + */ + __pyx_t_1 = (__pyx_v_params == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_util_cy.py":65 + * ) -> _CoreMultiExecuteParams: + * if params is None: + * return _Empty_Tuple # <<<<<<<<<<<<<< + * # Assume list is more likely than tuple + * elif isinstance(params, list) or isinstance(params, tuple): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple); + __pyx_r = __pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":64 + * params: Optional[_CoreAnyExecuteParams], + * ) -> _CoreMultiExecuteParams: + * if params is None: # <<<<<<<<<<<<<< + * return _Empty_Tuple + * # Assume list is more likely than tuple + */ + } + + /* "sqlalchemy/engine/_util_cy.py":67 + * return _Empty_Tuple + * # Assume list is more likely than tuple + * elif isinstance(params, list) or isinstance(params, tuple): # <<<<<<<<<<<<<< + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) == 0: + */ + __pyx_t_2 = PyList_Check(__pyx_v_params); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = PyTuple_Check(__pyx_v_params); + __pyx_t_1 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_util_cy.py":69 + * elif isinstance(params, list) or isinstance(params, tuple): + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) == 0: # <<<<<<<<<<<<<< + * warn_deprecated( + * "Empty parameter sequence passed to execute(). " + */ + __pyx_t_3 = PyObject_Length(__pyx_v_params); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 69, __pyx_L1_error) + __pyx_t_1 = (__pyx_t_3 == 0); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_util_cy.py":70 + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) == 0: + * warn_deprecated( # <<<<<<<<<<<<<< + * "Empty parameter sequence passed to execute(). " + * "This use is deprecated and will raise an exception in a " + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_warn_deprecated); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "sqlalchemy/engine/_util_cy.py":69 + * elif isinstance(params, list) or isinstance(params, tuple): + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) == 0: # <<<<<<<<<<<<<< + * warn_deprecated( + * "Empty parameter sequence passed to execute(). " + */ + goto __pyx_L6; + } + + /* "sqlalchemy/engine/_util_cy.py":76 + * "2.1", + * ) + * elif not _is_mapping_or_tuple(params[0]): # <<<<<<<<<<<<<< + * raise exc.ArgumentError( + * "List argument must consist only of tuples or dictionaries" + */ + __pyx_t_5 = __Pyx_GetItemInt(__pyx_v_params, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 76, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(__pyx_t_5); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 76, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_2 = (!__pyx_t_1); + if (unlikely(__pyx_t_2)) { + + /* "sqlalchemy/engine/_util_cy.py":77 + * ) + * elif not _is_mapping_or_tuple(params[0]): + * raise exc.ArgumentError( # <<<<<<<<<<<<<< + * "List argument must consist only of tuples or dictionaries" + * ) + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_exc); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 77, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_ArgumentError); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 77, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + __pyx_t_7 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_7 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_kp_u_List_argument_must_consist_only}; + __pyx_t_5 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_7, 1+__pyx_t_7); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 77, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __PYX_ERR(0, 77, __pyx_L1_error) + + /* "sqlalchemy/engine/_util_cy.py":76 + * "2.1", + * ) + * elif not _is_mapping_or_tuple(params[0]): # <<<<<<<<<<<<<< + * raise exc.ArgumentError( + * "List argument must consist only of tuples or dictionaries" + */ + } + __pyx_L6:; + + /* "sqlalchemy/engine/_util_cy.py":80 + * "List argument must consist only of tuples or dictionaries" + * ) + * return params # <<<<<<<<<<<<<< + * elif isinstance(params, dict) or isinstance(params, Mapping): + * # only do immutabledict or abc.__instancecheck__ for Mapping after + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_params); + __pyx_r = __pyx_v_params; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":67 + * return _Empty_Tuple + * # Assume list is more likely than tuple + * elif isinstance(params, list) or isinstance(params, tuple): # <<<<<<<<<<<<<< + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) == 0: + */ + } + + /* "sqlalchemy/engine/_util_cy.py":81 + * ) + * return params + * elif isinstance(params, dict) or isinstance(params, Mapping): # <<<<<<<<<<<<<< + * # only do immutabledict or abc.__instancecheck__ for Mapping after + * # we've checked for plain dictionaries and would otherwise raise + */ + __pyx_t_1 = PyDict_Check(__pyx_v_params); + if (!__pyx_t_1) { + } else { + __pyx_t_2 = __pyx_t_1; + goto __pyx_L7_bool_binop_done; + } + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 81, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyObject_IsInstance(__pyx_v_params, __pyx_t_5); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 81, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_2 = __pyx_t_1; + __pyx_L7_bool_binop_done:; + if (likely(__pyx_t_2)) { + + /* "sqlalchemy/engine/_util_cy.py":84 + * # only do immutabledict or abc.__instancecheck__ for Mapping after + * # we've checked for plain dictionaries and would otherwise raise + * return [params] # <<<<<<<<<<<<<< + * else: + * raise exc.ArgumentError("mapping or list expected for parameters") + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_5 = PyList_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 84, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_params); + __Pyx_GIVEREF(__pyx_v_params); + if (__Pyx_PyList_SET_ITEM(__pyx_t_5, 0, __pyx_v_params)) __PYX_ERR(0, 84, __pyx_L1_error); + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":81 + * ) + * return params + * elif isinstance(params, dict) or isinstance(params, Mapping): # <<<<<<<<<<<<<< + * # only do immutabledict or abc.__instancecheck__ for Mapping after + * # we've checked for plain dictionaries and would otherwise raise + */ + } + + /* "sqlalchemy/engine/_util_cy.py":86 + * return [params] + * else: + * raise exc.ArgumentError("mapping or list expected for parameters") # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_exc); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_ArgumentError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = NULL; + __pyx_t_7 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_7 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_6, __pyx_kp_u_mapping_or_list_expected_for_par}; + __pyx_t_5 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_7, 1+__pyx_t_7); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __PYX_ERR(0, 86, __pyx_L1_error) + } + + /* "sqlalchemy/engine/_util_cy.py":61 + * + * # _is_mapping_or_tuple could be inlined if pure python perf is a problem + * def _distill_params_20( # <<<<<<<<<<<<<< + * params: Optional[_CoreAnyExecuteParams], + * ) -> _CoreMultiExecuteParams: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("sqlalchemy.engine._util_cy._distill_params_20", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_util_cy.py":89 + * + * + * def _distill_raw_params( # <<<<<<<<<<<<<< + * params: Optional[_DBAPIAnyExecuteParams], + * ) -> _DBAPIMultiExecuteParams: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params = {"_distill_raw_params", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_params = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_distill_raw_params (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_params,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_params)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 89, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_distill_raw_params") < 0)) __PYX_ERR(0, 89, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_params = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("_distill_raw_params", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 89, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.engine._util_cy._distill_raw_params", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_8_util_cy_4_distill_raw_params(__pyx_self, __pyx_v_params); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_4_distill_raw_params(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_params) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + Py_ssize_t __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + unsigned int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_distill_raw_params", 1); + + /* "sqlalchemy/engine/_util_cy.py":92 + * params: Optional[_DBAPIAnyExecuteParams], + * ) -> _DBAPIMultiExecuteParams: + * if params is None: # <<<<<<<<<<<<<< + * return _Empty_Tuple + * elif isinstance(params, list): + */ + __pyx_t_1 = (__pyx_v_params == Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_util_cy.py":93 + * ) -> _DBAPIMultiExecuteParams: + * if params is None: + * return _Empty_Tuple # <<<<<<<<<<<<<< + * elif isinstance(params, list): + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple); + __pyx_r = __pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":92 + * params: Optional[_DBAPIAnyExecuteParams], + * ) -> _DBAPIMultiExecuteParams: + * if params is None: # <<<<<<<<<<<<<< + * return _Empty_Tuple + * elif isinstance(params, list): + */ + } + + /* "sqlalchemy/engine/_util_cy.py":94 + * if params is None: + * return _Empty_Tuple + * elif isinstance(params, list): # <<<<<<<<<<<<<< + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): + */ + __pyx_t_1 = PyList_Check(__pyx_v_params); + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_util_cy.py":96 + * elif isinstance(params, list): + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): # <<<<<<<<<<<<<< + * raise exc.ArgumentError( + * "List argument must consist only of tuples or dictionaries" + */ + __pyx_t_2 = PyObject_Length(__pyx_v_params); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 96, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 > 0); + if (__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L5_bool_binop_done; + } + __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_params, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(__pyx_t_4); if (unlikely(__pyx_t_3 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_5 = (!__pyx_t_3); + __pyx_t_1 = __pyx_t_5; + __pyx_L5_bool_binop_done:; + if (unlikely(__pyx_t_1)) { + + /* "sqlalchemy/engine/_util_cy.py":97 + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): + * raise exc.ArgumentError( # <<<<<<<<<<<<<< + * "List argument must consist only of tuples or dictionaries" + * ) + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_exc); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 97, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_ArgumentError); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 97, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = NULL; + __pyx_t_8 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_8 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_6, __pyx_kp_u_List_argument_must_consist_only}; + __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_7, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 97, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(0, 97, __pyx_L1_error) + + /* "sqlalchemy/engine/_util_cy.py":96 + * elif isinstance(params, list): + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): # <<<<<<<<<<<<<< + * raise exc.ArgumentError( + * "List argument must consist only of tuples or dictionaries" + */ + } + + /* "sqlalchemy/engine/_util_cy.py":100 + * "List argument must consist only of tuples or dictionaries" + * ) + * return params # <<<<<<<<<<<<<< + * elif _is_mapping_or_tuple(params): + * return [params] # type: ignore[return-value] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_params); + __pyx_r = __pyx_v_params; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":94 + * if params is None: + * return _Empty_Tuple + * elif isinstance(params, list): # <<<<<<<<<<<<<< + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): + */ + } + + /* "sqlalchemy/engine/_util_cy.py":101 + * ) + * return params + * elif _is_mapping_or_tuple(params): # <<<<<<<<<<<<<< + * return [params] # type: ignore[return-value] + * else: + */ + __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(__pyx_v_params); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 101, __pyx_L1_error) + if (likely(__pyx_t_1)) { + + /* "sqlalchemy/engine/_util_cy.py":102 + * return params + * elif _is_mapping_or_tuple(params): + * return [params] # type: ignore[return-value] # <<<<<<<<<<<<<< + * else: + * raise exc.ArgumentError("mapping or sequence expected for parameters") + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyList_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 102, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(__pyx_v_params); + __Pyx_GIVEREF(__pyx_v_params); + if (__Pyx_PyList_SET_ITEM(__pyx_t_4, 0, __pyx_v_params)) __PYX_ERR(0, 102, __pyx_L1_error); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":101 + * ) + * return params + * elif _is_mapping_or_tuple(params): # <<<<<<<<<<<<<< + * return [params] # type: ignore[return-value] + * else: + */ + } + + /* "sqlalchemy/engine/_util_cy.py":104 + * return [params] # type: ignore[return-value] + * else: + * raise exc.ArgumentError("mapping or sequence expected for parameters") # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_exc); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_ArgumentError); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_kp_u_mapping_or_sequence_expected_for}; + __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 104, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(0, 104, __pyx_L1_error) + } + + /* "sqlalchemy/engine/_util_cy.py":89 + * + * + * def _distill_raw_params( # <<<<<<<<<<<<<< + * params: Optional[_DBAPIAnyExecuteParams], + * ) -> _DBAPIMultiExecuteParams: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("sqlalchemy.engine._util_cy._distill_raw_params", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_util_cy.py":107 + * + * + * @cython.cfunc # <<<<<<<<<<<<<< + * def _is_contiguous(indexes: Tuple[int, ...]) -> cython.bint: + * i: cython.Py_ssize_t + */ + +static int __pyx_f_10sqlalchemy_6engine_8_util_cy__is_contiguous(PyObject *__pyx_v_indexes) { + Py_ssize_t __pyx_v_i; + Py_ssize_t __pyx_v_prev; + Py_ssize_t __pyx_v_curr; + int __pyx_r; + __Pyx_RefNannyDeclarations + Py_ssize_t __pyx_t_1; + Py_ssize_t __pyx_t_2; + Py_ssize_t __pyx_t_3; + Py_ssize_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_is_contiguous", 1); + + /* "sqlalchemy/engine/_util_cy.py":112 + * prev: cython.Py_ssize_t + * curr: cython.Py_ssize_t + * for i in range(1, len(indexes)): # <<<<<<<<<<<<<< + * prev = indexes[i - 1] + * curr = indexes[i] + */ + __pyx_t_1 = __Pyx_PyTuple_GET_SIZE(__pyx_v_indexes); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 112, __pyx_L1_error) + __pyx_t_2 = __pyx_t_1; + for (__pyx_t_3 = 1; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { + __pyx_v_i = __pyx_t_3; + + /* "sqlalchemy/engine/_util_cy.py":113 + * curr: cython.Py_ssize_t + * for i in range(1, len(indexes)): + * prev = indexes[i - 1] # <<<<<<<<<<<<<< + * curr = indexes[i] + * if prev != curr - 1: + */ + __pyx_t_4 = (__pyx_v_i - 1); + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_indexes, __pyx_t_4, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = __Pyx_PyIndex_AsSsize_t(__pyx_t_5); if (unlikely((__pyx_t_4 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_v_prev = __pyx_t_4; + + /* "sqlalchemy/engine/_util_cy.py":114 + * for i in range(1, len(indexes)): + * prev = indexes[i - 1] + * curr = indexes[i] # <<<<<<<<<<<<<< + * if prev != curr - 1: + * return False + */ + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_indexes, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 114, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = __Pyx_PyIndex_AsSsize_t(__pyx_t_5); if (unlikely((__pyx_t_4 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 114, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_v_curr = __pyx_t_4; + + /* "sqlalchemy/engine/_util_cy.py":115 + * prev = indexes[i - 1] + * curr = indexes[i] + * if prev != curr - 1: # <<<<<<<<<<<<<< + * return False + * return True + */ + __pyx_t_6 = (__pyx_v_prev != (__pyx_v_curr - 1)); + if (__pyx_t_6) { + + /* "sqlalchemy/engine/_util_cy.py":116 + * curr = indexes[i] + * if prev != curr - 1: + * return False # <<<<<<<<<<<<<< + * return True + * + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":115 + * prev = indexes[i - 1] + * curr = indexes[i] + * if prev != curr - 1: # <<<<<<<<<<<<<< + * return False + * return True + */ + } + } + + /* "sqlalchemy/engine/_util_cy.py":117 + * if prev != curr - 1: + * return False + * return True # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = 1; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":107 + * + * + * @cython.cfunc # <<<<<<<<<<<<<< + * def _is_contiguous(indexes: Tuple[int, ...]) -> cython.bint: + * i: cython.Py_ssize_t + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.engine._util_cy._is_contiguous", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/engine/_util_cy.py":120 + * + * + * def tuplegetter(*indexes: int) -> _TupleGetterType: # <<<<<<<<<<<<<< + * max_index: int + * if len(indexes) == 1 or _is_contiguous(indexes): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_7tuplegetter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_8_util_cy_7tuplegetter = {"tuplegetter", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_6engine_8_util_cy_7tuplegetter, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_7tuplegetter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_indexes = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("tuplegetter (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "tuplegetter", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_indexes = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_6engine_8_util_cy_6tuplegetter(__pyx_self, __pyx_v_indexes); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_indexes); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_6tuplegetter(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_indexes) { + PyObject *__pyx_v_max_index = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + Py_ssize_t __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + unsigned int __pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("tuplegetter", 1); + + /* "sqlalchemy/engine/_util_cy.py":122 + * def tuplegetter(*indexes: int) -> _TupleGetterType: + * max_index: int + * if len(indexes) == 1 or _is_contiguous(indexes): # <<<<<<<<<<<<<< + * # slice form is faster but returns a list if input is list + * max_index = indexes[-1] + */ + __pyx_t_2 = __Pyx_PyTuple_GET_SIZE(__pyx_v_indexes); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 122, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 == 1); + if (!__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = __pyx_f_10sqlalchemy_6engine_8_util_cy__is_contiguous(__pyx_v_indexes); if (unlikely(__pyx_t_3 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L1_error) + __pyx_t_1 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + if (__pyx_t_1) { + + /* "sqlalchemy/engine/_util_cy.py":124 + * if len(indexes) == 1 or _is_contiguous(indexes): + * # slice form is faster but returns a list if input is list + * max_index = indexes[-1] # <<<<<<<<<<<<<< + * return operator.itemgetter(slice(indexes[0], max_index + 1)) + * else: + */ + __pyx_t_4 = __Pyx_GetItemInt_Tuple(__pyx_v_indexes, -1L, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 124, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_4))) __PYX_ERR(0, 124, __pyx_L1_error) + __pyx_v_max_index = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "sqlalchemy/engine/_util_cy.py":125 + * # slice form is faster but returns a list if input is list + * max_index = indexes[-1] + * return operator.itemgetter(slice(indexes[0], max_index + 1)) # <<<<<<<<<<<<<< + * else: + * return operator.itemgetter(*indexes) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_operator); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_itemgetter); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_indexes, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = PyNumber_Add(__pyx_v_max_index, __pyx_int_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PySlice_New(__pyx_t_5, __pyx_t_7, Py_None); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + __pyx_t_9 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_9 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_8}; + __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_9, 1+__pyx_t_9); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 125, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "sqlalchemy/engine/_util_cy.py":122 + * def tuplegetter(*indexes: int) -> _TupleGetterType: + * max_index: int + * if len(indexes) == 1 or _is_contiguous(indexes): # <<<<<<<<<<<<<< + * # slice form is faster but returns a list if input is list + * max_index = indexes[-1] + */ + } + + /* "sqlalchemy/engine/_util_cy.py":127 + * return operator.itemgetter(slice(indexes[0], max_index + 1)) + * else: + * return operator.itemgetter(*indexes) # <<<<<<<<<<<<<< + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_operator); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_itemgetter); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_v_indexes, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "sqlalchemy/engine/_util_cy.py":120 + * + * + * def tuplegetter(*indexes: int) -> _TupleGetterType: # <<<<<<<<<<<<<< + * max_index: int + * if len(indexes) == 1 or _is_contiguous(indexes): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("sqlalchemy.engine._util_cy.tuplegetter", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_max_index); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif +/* #### Code section: pystring_table ### */ + +static int __Pyx_CreateStringTabAndInitStrings(void) { + __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_u_2_1, __pyx_k_2_1, sizeof(__pyx_k_2_1), 0, 1, 0, 0}, + {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, + {&__pyx_n_s_ArgumentError, __pyx_k_ArgumentError, sizeof(__pyx_k_ArgumentError), 0, 0, 1, 1}, + {&__pyx_n_s_CoreAnyExecuteParams, __pyx_k_CoreAnyExecuteParams, sizeof(__pyx_k_CoreAnyExecuteParams), 0, 0, 1, 1}, + {&__pyx_n_s_CoreMultiExecuteParams, __pyx_k_CoreMultiExecuteParams, sizeof(__pyx_k_CoreMultiExecuteParams), 0, 0, 1, 1}, + {&__pyx_n_s_DBAPIAnyExecuteParams, __pyx_k_DBAPIAnyExecuteParams, sizeof(__pyx_k_DBAPIAnyExecuteParams), 0, 0, 1, 1}, + {&__pyx_n_s_DBAPIMultiExecuteParams, __pyx_k_DBAPIMultiExecuteParams, sizeof(__pyx_k_DBAPIMultiExecuteParams), 0, 0, 1, 1}, + {&__pyx_kp_u_Empty_parameter_sequence_passed, __pyx_k_Empty_parameter_sequence_passed, sizeof(__pyx_k_Empty_parameter_sequence_passed), 0, 1, 0, 0}, + {&__pyx_kp_u_List_argument_must_consist_only, __pyx_k_List_argument_must_consist_only, sizeof(__pyx_k_List_argument_must_consist_only), 0, 1, 0, 0}, + {&__pyx_n_s_Mapping, __pyx_k_Mapping, sizeof(__pyx_k_Mapping), 0, 0, 1, 1}, + {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, + {&__pyx_kp_s_Optional__CoreAnyExecuteParams, __pyx_k_Optional__CoreAnyExecuteParams, sizeof(__pyx_k_Optional__CoreAnyExecuteParams), 0, 0, 1, 0}, + {&__pyx_kp_s_Optional__DBAPIAnyExecuteParams, __pyx_k_Optional__DBAPIAnyExecuteParams, sizeof(__pyx_k_Optional__DBAPIAnyExecuteParams), 0, 0, 1, 0}, + {&__pyx_n_s_TYPE_CHECKING, __pyx_k_TYPE_CHECKING, sizeof(__pyx_k_TYPE_CHECKING), 0, 0, 1, 1}, + {&__pyx_n_s_Tuple, __pyx_k_Tuple, sizeof(__pyx_k_Tuple), 0, 0, 1, 1}, + {&__pyx_n_s_TupleGetterType, __pyx_k_TupleGetterType, sizeof(__pyx_k_TupleGetterType), 0, 0, 1, 1}, + {&__pyx_n_s__11, __pyx_k__11, sizeof(__pyx_k__11), 0, 0, 1, 1}, + {&__pyx_kp_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 0}, + {&__pyx_n_s__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 0, 1, 1}, + {&__pyx_n_s__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 1, 1}, + {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, + {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_collections_abc, __pyx_k_collections_abc, sizeof(__pyx_k_collections_abc), 0, 0, 1, 1}, + {&__pyx_n_s_distill_params_20, __pyx_k_distill_params_20, sizeof(__pyx_k_distill_params_20), 0, 0, 1, 1}, + {&__pyx_n_s_distill_raw_params, __pyx_k_distill_raw_params, sizeof(__pyx_k_distill_raw_params), 0, 0, 1, 1}, + {&__pyx_n_s_exc, __pyx_k_exc, sizeof(__pyx_k_exc), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_indexes, __pyx_k_indexes, sizeof(__pyx_k_indexes), 0, 0, 1, 1}, + {&__pyx_n_s_initializing, __pyx_k_initializing, sizeof(__pyx_k_initializing), 0, 0, 1, 1}, + {&__pyx_n_s_int, __pyx_k_int, sizeof(__pyx_k_int), 0, 0, 1, 1}, + {&__pyx_n_s_interfaces, __pyx_k_interfaces, sizeof(__pyx_k_interfaces), 0, 0, 1, 1}, + {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, + {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, + {&__pyx_n_s_itemgetter, __pyx_k_itemgetter, sizeof(__pyx_k_itemgetter), 0, 0, 1, 1}, + {&__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_k_lib_sqlalchemy_engine__util_cy_p, sizeof(__pyx_k_lib_sqlalchemy_engine__util_cy_p), 0, 0, 1, 0}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_kp_u_mapping_or_list_expected_for_par, __pyx_k_mapping_or_list_expected_for_par, sizeof(__pyx_k_mapping_or_list_expected_for_par), 0, 1, 0, 0}, + {&__pyx_kp_u_mapping_or_sequence_expected_for, __pyx_k_mapping_or_sequence_expected_for, sizeof(__pyx_k_mapping_or_sequence_expected_for), 0, 1, 0, 0}, + {&__pyx_n_s_max_index, __pyx_k_max_index, sizeof(__pyx_k_max_index), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_operator, __pyx_k_operator, sizeof(__pyx_k_operator), 0, 0, 1, 1}, + {&__pyx_n_s_params, __pyx_k_params, sizeof(__pyx_k_params), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, + {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, + {&__pyx_n_s_spec, __pyx_k_spec, sizeof(__pyx_k_spec), 0, 0, 1, 1}, + {&__pyx_n_s_sqlalchemy_engine__util_cy, __pyx_k_sqlalchemy_engine__util_cy, sizeof(__pyx_k_sqlalchemy_engine__util_cy), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_tuplegetter, __pyx_k_tuplegetter, sizeof(__pyx_k_tuplegetter), 0, 0, 1, 1}, + {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, + {&__pyx_n_s_util, __pyx_k_util, sizeof(__pyx_k_util), 0, 0, 1, 1}, + {&__pyx_n_s_warn_deprecated, __pyx_k_warn_deprecated, sizeof(__pyx_k_warn_deprecated), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} + }; + return __Pyx_InitStrings(__pyx_string_tab); +} +/* #### Code section: cached_builtins ### */ +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 112, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: cached_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "sqlalchemy/engine/_util_cy.py":70 + * # collections_abc.MutableSequence # avoid abc.__instancecheck__ + * if len(params) == 0: + * warn_deprecated( # <<<<<<<<<<<<<< + * "Empty parameter sequence passed to execute(). " + * "This use is deprecated and will raise an exception in a " + */ + __pyx_tuple_ = PyTuple_Pack(2, __pyx_kp_u_Empty_parameter_sequence_passed, __pyx_kp_u_2_1); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "sqlalchemy/engine/_util_cy.py":38 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_codeobj__5 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_n_s_is_compiled, 38, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__5)) __PYX_ERR(0, 38, __pyx_L1_error) + + /* "sqlalchemy/engine/_util_cy.py":61 + * + * # _is_mapping_or_tuple could be inlined if pure python perf is a problem + * def _distill_params_20( # <<<<<<<<<<<<<< + * params: Optional[_CoreAnyExecuteParams], + * ) -> _CoreMultiExecuteParams: + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_n_s_params); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + __pyx_codeobj__7 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__6, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_n_s_distill_params_20, 61, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__7)) __PYX_ERR(0, 61, __pyx_L1_error) + + /* "sqlalchemy/engine/_util_cy.py":89 + * + * + * def _distill_raw_params( # <<<<<<<<<<<<<< + * params: Optional[_DBAPIAnyExecuteParams], + * ) -> _DBAPIMultiExecuteParams: + */ + __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__6, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_n_s_distill_raw_params, 89, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(0, 89, __pyx_L1_error) + + /* "sqlalchemy/engine/_util_cy.py":120 + * + * + * def tuplegetter(*indexes: int) -> _TupleGetterType: # <<<<<<<<<<<<<< + * max_index: int + * if len(indexes) == 1 or _is_contiguous(indexes): + */ + __pyx_tuple__9 = PyTuple_Pack(2, __pyx_n_s_indexes, __pyx_n_s_max_index); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__9, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_n_s_tuplegetter, 120, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} +/* #### Code section: init_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { + if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: init_globals ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + return 0; +} +/* #### Code section: init_module ### */ + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple = ((PyObject*)Py_None); Py_INCREF(Py_None); + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__util_cy(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__util_cy}, + {0, NULL} +}; +#endif + +#ifdef __cplusplus +namespace { + struct PyModuleDef __pyx_moduledef = + #else + static struct PyModuleDef __pyx_moduledef = + #endif + { + PyModuleDef_HEAD_INIT, + "_util_cy", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #elif CYTHON_USE_MODULE_STATE + sizeof(__pyx_mstate), /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + #if CYTHON_USE_MODULE_STATE + __pyx_m_traverse, /* m_traverse */ + __pyx_m_clear, /* m_clear */ + NULL /* m_free */ + #else + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ + #endif + }; + #ifdef __cplusplus +} /* anonymous namespace */ +#endif +#endif + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_util_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_util_cy(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__util_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__util_cy(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) +#else +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) +#endif +{ + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { +#if CYTHON_COMPILING_IN_LIMITED_API + result = PyModule_AddObject(module, to_name, value); +#else + result = PyDict_SetItemString(moddict, to_name, value); +#endif + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + CYTHON_UNUSED_VAR(def); + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + moddict = module; +#else + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; +#endif + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__util_cy(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + int stringtab_initialized = 0; + #if CYTHON_USE_MODULE_STATE + int pystate_addmodule_run = 0; + #endif + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_util_cy' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_util_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #elif CYTHON_USE_MODULE_STATE + __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + { + int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_util_cy" pseudovariable */ + if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + pystate_addmodule_run = 1; + } + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #endif + CYTHON_UNUSED_VAR(__pyx_t_1); + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__util_cy(void)", 0); + if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + stringtab_initialized = 1; + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_sqlalchemy__engine___util_cy) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "sqlalchemy.engine._util_cy")) { + if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.engine._util_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + (void)__Pyx_modinit_type_init_code(); + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "sqlalchemy/engine/_util_cy.py":10 + * from __future__ import annotations + * + * from collections.abc import Mapping # <<<<<<<<<<<<<< + * import operator + * from typing import Any + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Mapping); + __Pyx_GIVEREF(__pyx_n_s_Mapping); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Mapping)) __PYX_ERR(0, 10, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_collections_abc, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Mapping, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":11 + * + * from collections.abc import Mapping + * import operator # <<<<<<<<<<<<<< + * from typing import Any + * from typing import Optional + */ + __pyx_t_3 = __Pyx_ImportDottedModule(__pyx_n_s_operator, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_operator, __pyx_t_3) < 0) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":12 + * from collections.abc import Mapping + * import operator + * from typing import Any # <<<<<<<<<<<<<< + * from typing import Optional + * from typing import Tuple + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Any); + __Pyx_GIVEREF(__pyx_n_s_Any); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Any)) __PYX_ERR(0, 12, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Any); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_3) < 0) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_util_cy.py":13 + * import operator + * from typing import Any + * from typing import Optional # <<<<<<<<<<<<<< + * from typing import Tuple + * from typing import TYPE_CHECKING + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Optional); + __Pyx_GIVEREF(__pyx_n_s_Optional); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 13, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Optional); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_2) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":14 + * from typing import Any + * from typing import Optional + * from typing import Tuple # <<<<<<<<<<<<<< + * from typing import TYPE_CHECKING + * + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Tuple); + __Pyx_GIVEREF(__pyx_n_s_Tuple); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Tuple)) __PYX_ERR(0, 14, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Tuple); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Tuple, __pyx_t_3) < 0) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_util_cy.py":15 + * from typing import Optional + * from typing import Tuple + * from typing import TYPE_CHECKING # <<<<<<<<<<<<<< + * + * from .. import exc + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_TYPE_CHECKING); + __Pyx_GIVEREF(__pyx_n_s_TYPE_CHECKING); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_TYPE_CHECKING)) __PYX_ERR(0, 15, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TYPE_CHECKING, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":17 + * from typing import TYPE_CHECKING + * + * from .. import exc # <<<<<<<<<<<<<< + * from ..util import warn_deprecated + * + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_exc); + __Pyx_GIVEREF(__pyx_n_s_exc); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_exc)) __PYX_ERR(0, 17, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s__4, __pyx_t_3, 2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_exc); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_exc, __pyx_t_3) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_util_cy.py":18 + * + * from .. import exc + * from ..util import warn_deprecated # <<<<<<<<<<<<<< + * + * if TYPE_CHECKING: + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_warn_deprecated); + __Pyx_GIVEREF(__pyx_n_s_warn_deprecated); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_warn_deprecated)) __PYX_ERR(0, 18, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_util, __pyx_t_2, 2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_warn_deprecated); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_warn_deprecated, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":20 + * from ..util import warn_deprecated + * + * if TYPE_CHECKING: # <<<<<<<<<<<<<< + * from .interfaces import _CoreAnyExecuteParams + * from .interfaces import _CoreMultiExecuteParams + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_4 < 0))) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_4) { + + /* "sqlalchemy/engine/_util_cy.py":21 + * + * if TYPE_CHECKING: + * from .interfaces import _CoreAnyExecuteParams # <<<<<<<<<<<<<< + * from .interfaces import _CoreMultiExecuteParams + * from .interfaces import _DBAPIAnyExecuteParams + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_CoreAnyExecuteParams); + __Pyx_GIVEREF(__pyx_n_s_CoreAnyExecuteParams); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_CoreAnyExecuteParams)) __PYX_ERR(0, 21, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_interfaces, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CoreAnyExecuteParams); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CoreAnyExecuteParams, __pyx_t_3) < 0) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_util_cy.py":22 + * if TYPE_CHECKING: + * from .interfaces import _CoreAnyExecuteParams + * from .interfaces import _CoreMultiExecuteParams # <<<<<<<<<<<<<< + * from .interfaces import _DBAPIAnyExecuteParams + * from .interfaces import _DBAPIMultiExecuteParams + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_CoreMultiExecuteParams); + __Pyx_GIVEREF(__pyx_n_s_CoreMultiExecuteParams); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_CoreMultiExecuteParams)) __PYX_ERR(0, 22, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_interfaces, __pyx_t_2, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_CoreMultiExecuteParams); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CoreMultiExecuteParams, __pyx_t_2) < 0) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":23 + * from .interfaces import _CoreAnyExecuteParams + * from .interfaces import _CoreMultiExecuteParams + * from .interfaces import _DBAPIAnyExecuteParams # <<<<<<<<<<<<<< + * from .interfaces import _DBAPIMultiExecuteParams + * from .result import _TupleGetterType + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_DBAPIAnyExecuteParams); + __Pyx_GIVEREF(__pyx_n_s_DBAPIAnyExecuteParams); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_DBAPIAnyExecuteParams)) __PYX_ERR(0, 23, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_interfaces, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_DBAPIAnyExecuteParams); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DBAPIAnyExecuteParams, __pyx_t_3) < 0) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_util_cy.py":24 + * from .interfaces import _CoreMultiExecuteParams + * from .interfaces import _DBAPIAnyExecuteParams + * from .interfaces import _DBAPIMultiExecuteParams # <<<<<<<<<<<<<< + * from .result import _TupleGetterType + * + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_DBAPIMultiExecuteParams); + __Pyx_GIVEREF(__pyx_n_s_DBAPIMultiExecuteParams); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_DBAPIMultiExecuteParams)) __PYX_ERR(0, 24, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_interfaces, __pyx_t_2, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_DBAPIMultiExecuteParams); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DBAPIMultiExecuteParams, __pyx_t_2) < 0) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":25 + * from .interfaces import _DBAPIAnyExecuteParams + * from .interfaces import _DBAPIMultiExecuteParams + * from .result import _TupleGetterType # <<<<<<<<<<<<<< + * + * # START GENERATED CYTHON IMPORT + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_TupleGetterType); + __Pyx_GIVEREF(__pyx_n_s_TupleGetterType); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_TupleGetterType)) __PYX_ERR(0, 25, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_result, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_TupleGetterType); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TupleGetterType, __pyx_t_3) < 0) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_util_cy.py":20 + * from ..util import warn_deprecated + * + * if TYPE_CHECKING: # <<<<<<<<<<<<<< + * from .interfaces import _CoreAnyExecuteParams + * from .interfaces import _CoreMultiExecuteParams + */ + } + + /* "sqlalchemy/engine/_util_cy.py":29 + * # START GENERATED CYTHON IMPORT + * # This section is automatically generated by the script tools/cython_imports.py + * try: # <<<<<<<<<<<<<< + * # NOTE: the cython compiler needs this "import cython" in the file, it + * # can't be only "from sqlalchemy.util import cython" with the fallback + */ + { + (void)__pyx_t_1; (void)__pyx_t_5; (void)__pyx_t_6; /* mark used */ + /*try:*/ { + + /* "sqlalchemy/engine/_util_cy.py":33 + * # can't be only "from sqlalchemy.util import cython" with the fallback + * # in that module + * import cython # <<<<<<<<<<<<<< + * except ModuleNotFoundError: + * from sqlalchemy.util import cython + */ + } + } + + /* "sqlalchemy/engine/_util_cy.py":38 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 38, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_8_util_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_engine__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__5)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":45 + * # END GENERATED CYTHON IMPORT + * + * _Empty_Tuple: Tuple[Any, ...] = cython.declare(tuple, ()) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_INCREF(__pyx_empty_tuple); + __Pyx_XGOTREF(__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple); + __Pyx_DECREF_SET(__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple, __pyx_empty_tuple); + __Pyx_GIVEREF(__pyx_empty_tuple); + + /* "sqlalchemy/engine/_util_cy.py":61 + * + * # _is_mapping_or_tuple could be inlined if pure python perf is a problem + * def _distill_params_20( # <<<<<<<<<<<<<< + * params: Optional[_CoreAnyExecuteParams], + * ) -> _CoreMultiExecuteParams: + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_params, __pyx_kp_s_Optional__CoreAnyExecuteParams) < 0) __PYX_ERR(0, 61, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_CoreMultiExecuteParams) < 0) __PYX_ERR(0, 61, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_8_util_cy_3_distill_params_20, 0, __pyx_n_s_distill_params_20, NULL, __pyx_n_s_sqlalchemy_engine__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__7)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_distill_params_20, __pyx_t_2) < 0) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_util_cy.py":89 + * + * + * def _distill_raw_params( # <<<<<<<<<<<<<< + * params: Optional[_DBAPIAnyExecuteParams], + * ) -> _DBAPIMultiExecuteParams: + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_params, __pyx_kp_s_Optional__DBAPIAnyExecuteParams) < 0) __PYX_ERR(0, 89, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_DBAPIMultiExecuteParams) < 0) __PYX_ERR(0, 89, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params, 0, __pyx_n_s_distill_raw_params, NULL, __pyx_n_s_sqlalchemy_engine__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__8)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_distill_raw_params, __pyx_t_3) < 0) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/engine/_util_cy.py":120 + * + * + * def tuplegetter(*indexes: int) -> _TupleGetterType: # <<<<<<<<<<<<<< + * max_index: int + * if len(indexes) == 1 or _is_contiguous(indexes): + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_indexes, __pyx_n_s_int) < 0) __PYX_ERR(0, 120, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_TupleGetterType) < 0) __PYX_ERR(0, 120, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_8_util_cy_7tuplegetter, 0, __pyx_n_s_tuplegetter, NULL, __pyx_n_s_sqlalchemy_engine__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__10)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_tuplegetter, __pyx_t_2) < 0) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/engine/_util_cy.py":1 + * # engine/_util_cy.py # <<<<<<<<<<<<<< + * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors + * # + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + if (__pyx_m) { + if (__pyx_d && stringtab_initialized) { + __Pyx_AddTraceback("init sqlalchemy.engine._util_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + #if !CYTHON_USE_MODULE_STATE + Py_CLEAR(__pyx_m); + #else + Py_DECREF(__pyx_m); + if (pystate_addmodule_run) { + PyObject *tp, *value, *tb; + PyErr_Fetch(&tp, &value, &tb); + PyState_RemoveModule(&__pyx_moduledef); + PyErr_Restore(tp, value, tb); + } + #endif + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init sqlalchemy.engine._util_cy"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} +/* #### Code section: cleanup_globals ### */ +/* #### Code section: cleanup_module ### */ +/* #### Code section: main_method ### */ +/* #### Code section: utility_code_pragmas ### */ +#ifdef _MSC_VER +#pragma warning( push ) +/* Warning 4127: conditional expression is constant + * Cython uses constant conditional expressions to allow in inline functions to be optimized at + * compile-time, so this warning is not useful + */ +#pragma warning( disable : 4127 ) +#endif + + + +/* #### Code section: utility_code_def ### */ + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030C00A6 + PyObject *current_exception = tstate->current_exception; + if (unlikely(!current_exception)) return 0; + exc_type = (PyObject*) Py_TYPE(current_exception); + if (exc_type == err) return 1; +#else + exc_type = tstate->curexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; +#endif + #if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(exc_type); + #endif + if (unlikely(PyTuple_Check(err))) { + result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + } else { + result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); + } + #if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(exc_type); + #endif + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject *tmp_value; + assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); + if (value) { + #if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) + #endif + PyException_SetTraceback(value, tb); + } + tmp_value = tstate->current_exception; + tstate->current_exception = value; + Py_XDECREF(tmp_value); + Py_XDECREF(type); + Py_XDECREF(tb); +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject* exc_value; + exc_value = tstate->current_exception; + tstate->current_exception = 0; + *value = exc_value; + *type = NULL; + *tb = NULL; + if (exc_value) { + *type = (PyObject*) Py_TYPE(exc_value); + Py_INCREF(*type); + #if CYTHON_COMPILING_IN_CPYTHON + *tb = ((PyBaseExceptionObject*) exc_value)->traceback; + Py_XINCREF(*tb); + #else + *tb = PyException_GetTraceback(exc_value); + #endif + } +#else + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#endif +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* PyObjectGetAttrStrNoError */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + (void) PyObject_GetOptionalAttr(obj, attr_name, &result); + return result; +#else +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +#endif +} + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); + if (unlikely(!result) && !PyErr_Occurred()) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* TupleAndListFromArray */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { + PyObject *v; + Py_ssize_t i; + for (i = 0; i < length; i++) { + v = dest[i] = src[i]; + Py_INCREF(v); + } +} +static CYTHON_INLINE PyObject * +__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + Py_INCREF(__pyx_empty_tuple); + return __pyx_empty_tuple; + } + res = PyTuple_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); + return res; +} +static CYTHON_INLINE PyObject * +__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + return PyList_New(0); + } + res = PyList_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); + return res; +} +#endif + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* fastcall */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) +{ + Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); + for (i = 0; i < n; i++) + { + if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; + } + for (i = 0; i < n; i++) + { + int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); + if (unlikely(eq != 0)) { + if (unlikely(eq < 0)) return NULL; + return kwvalues[i]; + } + } + return NULL; +} +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { + Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); + PyObject *dict; + dict = PyDict_New(); + if (unlikely(!dict)) + return NULL; + for (i=0; itp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#elif CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(!__pyx_m)) { + return NULL; + } + result = PyObject_GetAttr(__pyx_m, name); + if (likely(result)) { + return result; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); + while (1) { + Py_XDECREF(key); key = NULL; + Py_XDECREF(value); value = NULL; + if (kwds_is_tuple) { + Py_ssize_t size; +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(kwds); +#else + size = PyTuple_Size(kwds); + if (size < 0) goto bad; +#endif + if (pos >= size) break; +#if CYTHON_AVOID_BORROWED_REFS + key = __Pyx_PySequence_ITEM(kwds, pos); + if (!key) goto bad; +#elif CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kwds, pos); +#else + key = PyTuple_GetItem(kwds, pos); + if (!key) goto bad; +#endif + value = kwvalues[pos]; + pos++; + } + else + { + if (!PyDict_Next(kwds, &pos, &key, &value)) break; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + } + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(value); + Py_DECREF(key); +#endif + key = NULL; + value = NULL; + continue; + } +#if !CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + Py_INCREF(value); + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = ( + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key) + ); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + Py_XDECREF(key); + Py_XDECREF(value); + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + Py_XDECREF(key); + Py_XDECREF(value); + return -1; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (unlikely(!j)) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; + PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; + if (mm && mm->mp_subscript) { + PyObject *r, *key = PyInt_FromSsize_t(i); + if (unlikely(!key)) return NULL; + r = mm->mp_subscript(o, key); + Py_DECREF(key); + return r; + } + if (likely(sm && sm->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { + Py_ssize_t l = sm->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return sm->sq_item(o, i); + } + } +#else + if (is_list || !PyMapping_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { + return NULL; + } + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { + return NULL; + } + #endif + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); + self = __Pyx_CyOrPyCFunction_GET_SELF(func); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectFastCall */ +#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API +static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { + PyObject *argstuple; + PyObject *result = 0; + size_t i; + argstuple = PyTuple_New((Py_ssize_t)nargs); + if (unlikely(!argstuple)) return NULL; + for (i = 0; i < nargs; i++) { + Py_INCREF(args[i]); + if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; + } + result = __Pyx_PyObject_Call(func, argstuple, kwargs); + bad: + Py_DECREF(argstuple); + return result; +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { + Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); +#if CYTHON_COMPILING_IN_CPYTHON + if (nargs == 0 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) + return __Pyx_PyObject_CallMethO(func, NULL); + } + else if (nargs == 1 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) + return __Pyx_PyObject_CallMethO(func, args[0]); + } +#endif + #if PY_VERSION_HEX < 0x030800B1 + #if CYTHON_FAST_PYCCALL + if (PyCFunction_Check(func)) { + if (kwargs) { + return _PyCFunction_FastCallDict(func, args, nargs, kwargs); + } else { + return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); + } + } + #if PY_VERSION_HEX >= 0x030700A1 + if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { + return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); + } + #endif + #endif + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); + } + #endif + #endif + if (kwargs == NULL) { + #if CYTHON_VECTORCALL + #if PY_VERSION_HEX < 0x03090000 + vectorcallfunc f = _PyVectorcall_Function(func); + #else + vectorcallfunc f = PyVectorcall_Function(func); + #endif + if (f) { + return f(func, args, (size_t)nargs, NULL); + } + #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL + if (__Pyx_CyFunction_CheckExact(func)) { + __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); + if (f) return f(func, args, (size_t)nargs, NULL); + } + #endif + } + if (nargs == 0) { + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); + } + #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API + return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); + #else + return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); + #endif +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + __Pyx_PyThreadState_declare + CYTHON_UNUSED_VAR(cause); + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + #if PY_VERSION_HEX >= 0x030C00A6 + PyException_SetTraceback(value, tb); + #elif CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kw, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { + Py_ssize_t kwsize; +#if CYTHON_ASSUME_SAFE_MACROS + kwsize = PyTuple_GET_SIZE(kw); +#else + kwsize = PyTuple_Size(kw); + if (kwsize < 0) return 0; +#endif + if (unlikely(kwsize == 0)) + return 1; + if (!kw_allowed) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, 0); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + goto invalid_keyword; + } +#if PY_VERSION_HEX < 0x03090000 + for (pos = 0; pos < kwsize; pos++) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, pos); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } +#endif + return 1; + } + while (PyDict_Next(kw, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if (!kw_allowed && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* RaiseUnexpectedTypeError */ +static int +__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) +{ + __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, + expected, obj_type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *module = 0; + PyObject *empty_dict = 0; + PyObject *empty_list = 0; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (unlikely(!py_import)) + goto bad; + if (!from_list) { + empty_list = PyList_New(0); + if (unlikely(!empty_list)) + goto bad; + from_list = empty_list; + } + #endif + empty_dict = PyDict_New(); + if (unlikely(!empty_dict)) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, 1); + if (unlikely(!module)) { + if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (unlikely(!py_level)) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, level); + #endif + } + } +bad: + Py_XDECREF(empty_dict); + Py_XDECREF(empty_list); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + const char* module_name_str = 0; + PyObject* module_name = 0; + PyObject* module_dot = 0; + PyObject* full_name = 0; + PyErr_Clear(); + module_name_str = PyModule_GetName(module); + if (unlikely(!module_name_str)) { goto modbad; } + module_name = PyUnicode_FromString(module_name_str); + if (unlikely(!module_name)) { goto modbad; } + module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__2); + if (unlikely(!module_dot)) { goto modbad; } + full_name = PyUnicode_Concat(module_dot, name); + if (unlikely(!full_name)) { goto modbad; } + #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) + { + PyObject *modules = PyImport_GetModuleDict(); + if (unlikely(!modules)) + goto modbad; + value = PyObject_GetItem(modules, full_name); + } + #else + value = PyImport_GetModule(full_name); + #endif + modbad: + Py_XDECREF(full_name); + Py_XDECREF(module_dot); + Py_XDECREF(module_name); + } + if (unlikely(!value)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* ImportDottedModule */ +#if PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx__ImportDottedModule_Error(PyObject *name, PyObject *parts_tuple, Py_ssize_t count) { + PyObject *partial_name = NULL, *slice = NULL, *sep = NULL; + if (unlikely(PyErr_Occurred())) { + PyErr_Clear(); + } + if (likely(PyTuple_GET_SIZE(parts_tuple) == count)) { + partial_name = name; + } else { + slice = PySequence_GetSlice(parts_tuple, 0, count); + if (unlikely(!slice)) + goto bad; + sep = PyUnicode_FromStringAndSize(".", 1); + if (unlikely(!sep)) + goto bad; + partial_name = PyUnicode_Join(sep, slice); + } + PyErr_Format( +#if PY_MAJOR_VERSION < 3 + PyExc_ImportError, + "No module named '%s'", PyString_AS_STRING(partial_name)); +#else +#if PY_VERSION_HEX >= 0x030600B1 + PyExc_ModuleNotFoundError, +#else + PyExc_ImportError, +#endif + "No module named '%U'", partial_name); +#endif +bad: + Py_XDECREF(sep); + Py_XDECREF(slice); + Py_XDECREF(partial_name); + return NULL; +} +#endif +#if PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx__ImportDottedModule_Lookup(PyObject *name) { + PyObject *imported_module; +#if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) + PyObject *modules = PyImport_GetModuleDict(); + if (unlikely(!modules)) + return NULL; + imported_module = __Pyx_PyDict_GetItemStr(modules, name); + Py_XINCREF(imported_module); +#else + imported_module = PyImport_GetModule(name); +#endif + return imported_module; +} +#endif +#if PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx_ImportDottedModule_WalkParts(PyObject *module, PyObject *name, PyObject *parts_tuple) { + Py_ssize_t i, nparts; + nparts = PyTuple_GET_SIZE(parts_tuple); + for (i=1; i < nparts && module; i++) { + PyObject *part, *submodule; +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + part = PyTuple_GET_ITEM(parts_tuple, i); +#else + part = PySequence_ITEM(parts_tuple, i); +#endif + submodule = __Pyx_PyObject_GetAttrStrNoError(module, part); +#if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(part); +#endif + Py_DECREF(module); + module = submodule; + } + if (unlikely(!module)) { + return __Pyx__ImportDottedModule_Error(name, parts_tuple, i); + } + return module; +} +#endif +static PyObject *__Pyx__ImportDottedModule(PyObject *name, PyObject *parts_tuple) { +#if PY_MAJOR_VERSION < 3 + PyObject *module, *from_list, *star = __pyx_n_s__3; + CYTHON_UNUSED_VAR(parts_tuple); + from_list = PyList_New(1); + if (unlikely(!from_list)) + return NULL; + Py_INCREF(star); + PyList_SET_ITEM(from_list, 0, star); + module = __Pyx_Import(name, from_list, 0); + Py_DECREF(from_list); + return module; +#else + PyObject *imported_module; + PyObject *module = __Pyx_Import(name, NULL, 0); + if (!parts_tuple || unlikely(!module)) + return module; + imported_module = __Pyx__ImportDottedModule_Lookup(name); + if (likely(imported_module)) { + Py_DECREF(module); + return imported_module; + } + PyErr_Clear(); + return __Pyx_ImportDottedModule_WalkParts(module, name, parts_tuple); +#endif +} +static PyObject *__Pyx_ImportDottedModule(PyObject *name, PyObject *parts_tuple) { +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030400B1 + PyObject *module = __Pyx__ImportDottedModule_Lookup(name); + if (likely(module)) { + PyObject *spec = __Pyx_PyObject_GetAttrStrNoError(module, __pyx_n_s_spec); + if (likely(spec)) { + PyObject *unsafe = __Pyx_PyObject_GetAttrStrNoError(spec, __pyx_n_s_initializing); + if (likely(!unsafe || !__Pyx_PyObject_IsTrue(unsafe))) { + Py_DECREF(spec); + spec = NULL; + } + Py_XDECREF(unsafe); + } + if (likely(!spec)) { + PyErr_Clear(); + return module; + } + Py_DECREF(spec); + Py_DECREF(module); + } else if (PyErr_Occurred()) { + PyErr_Clear(); + } +#endif + return __Pyx__ImportDottedModule(name, parts_tuple); +} + +/* FixUpExtensionType */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { +#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + CYTHON_UNUSED_VAR(spec); + CYTHON_UNUSED_VAR(type); +#else + const PyType_Slot *slot = spec->slots; + while (slot && slot->slot && slot->slot != Py_tp_members) + slot++; + if (slot && slot->slot == Py_tp_members) { + int changed = 0; +#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) + const +#endif + PyMemberDef *memb = (PyMemberDef*) slot->pfunc; + while (memb && memb->name) { + if (memb->name[0] == '_' && memb->name[1] == '_') { +#if PY_VERSION_HEX < 0x030900b1 + if (strcmp(memb->name, "__weaklistoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_weaklistoffset = memb->offset; + changed = 1; + } + else if (strcmp(memb->name, "__dictoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_dictoffset = memb->offset; + changed = 1; + } +#if CYTHON_METH_FASTCALL + else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); +#if PY_VERSION_HEX >= 0x030800b4 + type->tp_vectorcall_offset = memb->offset; +#else + type->tp_print = (printfunc) memb->offset; +#endif + changed = 1; + } +#endif +#else + if ((0)); +#endif +#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON + else if (strcmp(memb->name, "__module__") == 0) { + PyObject *descr; + assert(memb->type == T_OBJECT); + assert(memb->flags == 0 || memb->flags == READONLY); + descr = PyDescr_NewMember(type, memb); + if (unlikely(!descr)) + return -1; + if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { + Py_DECREF(descr); + return -1; + } + Py_DECREF(descr); + changed = 1; + } +#endif + } + memb++; + } + if (changed) + PyType_Modified(type); + } +#endif + return 0; +} +#endif + +/* FetchSharedCythonModule */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void) { + return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); +} + +/* FetchCommonType */ +static int __Pyx_VerifyCachedType(PyObject *cached_type, + const char *name, + Py_ssize_t basicsize, + Py_ssize_t expected_basicsize) { + if (!PyType_Check(cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", name); + return -1; + } + if (basicsize != expected_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + name); + return -1; + } + return 0; +} +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { + PyObject* abi_module; + const char* object_name; + PyTypeObject *cached_type = NULL; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + object_name = strrchr(type->tp_name, '.'); + object_name = object_name ? object_name+1 : type->tp_name; + cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + if (__Pyx_VerifyCachedType( + (PyObject *)cached_type, + object_name, + cached_type->tp_basicsize, + type->tp_basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; +done: + Py_DECREF(abi_module); + return cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#else +static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { + PyObject *abi_module, *cached_type = NULL; + const char* object_name = strrchr(spec->name, '.'); + object_name = object_name ? object_name+1 : spec->name; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + cached_type = PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + Py_ssize_t basicsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_basicsize; + py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); + if (unlikely(!py_basicsize)) goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; +#else + basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; +#endif + if (__Pyx_VerifyCachedType( + cached_type, + object_name, + basicsize, + spec->basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + CYTHON_UNUSED_VAR(module); + cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); + if (unlikely(!cached_type)) goto bad; + if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; +done: + Py_DECREF(abi_module); + assert(cached_type == NULL || PyType_Check(cached_type)); + return (PyTypeObject *) cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#endif + +/* PyVectorcallFastCallDict */ +#if CYTHON_METH_FASTCALL +static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + PyObject *res = NULL; + PyObject *kwnames; + PyObject **newargs; + PyObject **kwvalues; + Py_ssize_t i, pos; + size_t j; + PyObject *key, *value; + unsigned long keys_are_strings; + Py_ssize_t nkw = PyDict_GET_SIZE(kw); + newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); + if (unlikely(newargs == NULL)) { + PyErr_NoMemory(); + return NULL; + } + for (j = 0; j < nargs; j++) newargs[j] = args[j]; + kwnames = PyTuple_New(nkw); + if (unlikely(kwnames == NULL)) { + PyMem_Free(newargs); + return NULL; + } + kwvalues = newargs + nargs; + pos = i = 0; + keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; + while (PyDict_Next(kw, &pos, &key, &value)) { + keys_are_strings &= Py_TYPE(key)->tp_flags; + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(kwnames, i, key); + kwvalues[i] = value; + i++; + } + if (unlikely(!keys_are_strings)) { + PyErr_SetString(PyExc_TypeError, "keywords must be strings"); + goto cleanup; + } + res = vc(func, newargs, nargs, kwnames); +cleanup: + Py_DECREF(kwnames); + for (i = 0; i < nkw; i++) + Py_DECREF(kwvalues[i]); + PyMem_Free(newargs); + return res; +} +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { + return vc(func, args, nargs, NULL); + } + return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); +} +#endif + +/* CythonFunctionShared */ +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + if (__Pyx_CyFunction_Check(func)) { + return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; + } else if (PyCFunction_Check(func)) { + return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; + } + return 0; +} +#else +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +} +#endif +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + __Pyx_Py_XDECREF_SET( + __Pyx_CyFunction_GetClassObj(f), + ((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#else + __Pyx_Py_XDECREF_SET( + ((PyCMethodObject *) (f))->mm_class, + (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#endif +} +static PyObject * +__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) +{ + CYTHON_UNUSED_VAR(closure); + if (unlikely(op->func_doc == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); + if (unlikely(!op->func_doc)) return NULL; +#else + if (((PyCFunctionObject*)op)->m_ml->ml_doc) { +#if PY_MAJOR_VERSION >= 3 + op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#else + op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#endif + if (unlikely(op->func_doc == NULL)) + return NULL; + } else { + Py_INCREF(Py_None); + return Py_None; + } +#endif + } + Py_INCREF(op->func_doc); + return op->func_doc; +} +static int +__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (value == NULL) { + value = Py_None; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_doc, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_name == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_name = PyObject_GetAttrString(op->func, "__name__"); +#elif PY_MAJOR_VERSION >= 3 + op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#else + op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#endif + if (unlikely(op->func_name == NULL)) + return NULL; + } + Py_INCREF(op->func_name); + return op->func_name; +} +static int +__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_name, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_qualname); + return op->func_qualname; +} +static int +__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_qualname, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_dict == NULL)) { + op->func_dict = PyDict_New(); + if (unlikely(op->func_dict == NULL)) + return NULL; + } + Py_INCREF(op->func_dict); + return op->func_dict; +} +static int +__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(value == NULL)) { + PyErr_SetString(PyExc_TypeError, + "function's dictionary may not be deleted"); + return -1; + } + if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "setting function's dictionary to a non-dict"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_dict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_globals); + return op->func_globals; +} +static PyObject * +__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(op); + CYTHON_UNUSED_VAR(context); + Py_INCREF(Py_None); + return Py_None; +} +static PyObject * +__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) +{ + PyObject* result = (op->func_code) ? op->func_code : Py_None; + CYTHON_UNUSED_VAR(context); + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { + int result = 0; + PyObject *res = op->defaults_getter((PyObject *) op); + if (unlikely(!res)) + return -1; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + op->defaults_tuple = PyTuple_GET_ITEM(res, 0); + Py_INCREF(op->defaults_tuple); + op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); + Py_INCREF(op->defaults_kwdict); + #else + op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); + if (unlikely(!op->defaults_tuple)) result = -1; + else { + op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); + if (unlikely(!op->defaults_kwdict)) result = -1; + } + #endif + Py_DECREF(res); + return result; +} +static int +__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__defaults__ must be set to a tuple object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_tuple; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_tuple; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__kwdefaults__ must be set to a dict object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_kwdict; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_kwdict; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value || value == Py_None) { + value = NULL; + } else if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__annotations__ must be set to a dict object"); + return -1; + } + Py_XINCREF(value); + __Pyx_Py_XDECREF_SET(op->func_annotations, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->func_annotations; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + result = PyDict_New(); + if (unlikely(!result)) return NULL; + op->func_annotations = result; + } + Py_INCREF(result); + return result; +} +static PyObject * +__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { + int is_coroutine; + CYTHON_UNUSED_VAR(context); + if (op->func_is_coroutine) { + return __Pyx_NewRef(op->func_is_coroutine); + } + is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; +#if PY_VERSION_HEX >= 0x03050000 + if (is_coroutine) { + PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; + fromlist = PyList_New(1); + if (unlikely(!fromlist)) return NULL; + Py_INCREF(marker); +#if CYTHON_ASSUME_SAFE_MACROS + PyList_SET_ITEM(fromlist, 0, marker); +#else + if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { + Py_DECREF(marker); + Py_DECREF(fromlist); + return NULL; + } +#endif + module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); + Py_DECREF(fromlist); + if (unlikely(!module)) goto ignore; + op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); + Py_DECREF(module); + if (likely(op->func_is_coroutine)) { + return __Pyx_NewRef(op->func_is_coroutine); + } +ignore: + PyErr_Clear(); + } +#endif + op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); + return __Pyx_NewRef(op->func_is_coroutine); +} +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject * +__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_GetAttrString(op->func, "__module__"); +} +static int +__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_SetAttrString(op->func, "__module__", value); +} +#endif +static PyGetSetDef __pyx_CyFunction_getsets[] = { + {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, + {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, + {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, + {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, +#if CYTHON_COMPILING_IN_LIMITED_API + {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, +#endif + {0, 0, 0, 0, 0} +}; +static PyMemberDef __pyx_CyFunction_members[] = { +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, +#endif +#if CYTHON_USE_TYPE_SPECS + {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, +#if CYTHON_METH_FASTCALL +#if CYTHON_BACKPORT_VECTORCALL + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, +#else +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, +#endif +#endif +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, +#else + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, +#endif +#endif + {0, 0, 0, 0, 0} +}; +static PyObject * +__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) +{ + CYTHON_UNUSED_VAR(args); +#if PY_MAJOR_VERSION >= 3 + Py_INCREF(m->func_qualname); + return m->func_qualname; +#else + return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); +#endif +} +static PyMethodDef __pyx_CyFunction_methods[] = { + {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, + {0, 0, 0, 0} +}; +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) +#else +#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) +#endif +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { +#if !CYTHON_COMPILING_IN_LIMITED_API + PyCFunctionObject *cf = (PyCFunctionObject*) op; +#endif + if (unlikely(op == NULL)) + return NULL; +#if CYTHON_COMPILING_IN_LIMITED_API + op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); + if (unlikely(!op->func)) return NULL; +#endif + op->flags = flags; + __Pyx_CyFunction_weakreflist(op) = NULL; +#if !CYTHON_COMPILING_IN_LIMITED_API + cf->m_ml = ml; + cf->m_self = (PyObject *) op; +#endif + Py_XINCREF(closure); + op->func_closure = closure; +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_XINCREF(module); + cf->m_module = module; +#endif + op->func_dict = NULL; + op->func_name = NULL; + Py_INCREF(qualname); + op->func_qualname = qualname; + op->func_doc = NULL; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + op->func_classobj = NULL; +#else + ((PyCMethodObject*)op)->mm_class = NULL; +#endif + op->func_globals = globals; + Py_INCREF(op->func_globals); + Py_XINCREF(code); + op->func_code = code; + op->defaults_pyobjects = 0; + op->defaults_size = 0; + op->defaults = NULL; + op->defaults_tuple = NULL; + op->defaults_kwdict = NULL; + op->defaults_getter = NULL; + op->func_annotations = NULL; + op->func_is_coroutine = NULL; +#if CYTHON_METH_FASTCALL + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { + case METH_NOARGS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; + break; + case METH_O: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; + break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; + break; + case METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; + break; + case METH_VARARGS | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = NULL; + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + Py_DECREF(op); + return NULL; + } +#endif + return (PyObject *) op; +} +static int +__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) +{ + Py_CLEAR(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_CLEAR(m->func); +#else + Py_CLEAR(((PyCFunctionObject*)m)->m_module); +#endif + Py_CLEAR(m->func_dict); + Py_CLEAR(m->func_name); + Py_CLEAR(m->func_qualname); + Py_CLEAR(m->func_doc); + Py_CLEAR(m->func_globals); + Py_CLEAR(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API +#if PY_VERSION_HEX < 0x030900B1 + Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); +#else + { + PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; + ((PyCMethodObject *) (m))->mm_class = NULL; + Py_XDECREF(cls); + } +#endif +#endif + Py_CLEAR(m->defaults_tuple); + Py_CLEAR(m->defaults_kwdict); + Py_CLEAR(m->func_annotations); + Py_CLEAR(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_XDECREF(pydefaults[i]); + PyObject_Free(m->defaults); + m->defaults = NULL; + } + return 0; +} +static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + if (__Pyx_CyFunction_weakreflist(m) != NULL) + PyObject_ClearWeakRefs((PyObject *) m); + __Pyx_CyFunction_clear(m); + __Pyx_PyHeapTypeObject_GC_Del(m); +} +static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + PyObject_GC_UnTrack(m); + __Pyx__CyFunction_dealloc(m); +} +static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) +{ + Py_VISIT(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(m->func); +#else + Py_VISIT(((PyCFunctionObject*)m)->m_module); +#endif + Py_VISIT(m->func_dict); + Py_VISIT(m->func_name); + Py_VISIT(m->func_qualname); + Py_VISIT(m->func_doc); + Py_VISIT(m->func_globals); + Py_VISIT(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); +#endif + Py_VISIT(m->defaults_tuple); + Py_VISIT(m->defaults_kwdict); + Py_VISIT(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_VISIT(pydefaults[i]); + } + return 0; +} +static PyObject* +__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) +{ +#if PY_MAJOR_VERSION >= 3 + return PyUnicode_FromFormat("", + op->func_qualname, (void *)op); +#else + return PyString_FromFormat("", + PyString_AsString(op->func_qualname), (void *)op); +#endif +} +static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *f = ((__pyx_CyFunctionObject*)func)->func; + PyObject *py_name = NULL; + PyCFunction meth; + int flags; + meth = PyCFunction_GetFunction(f); + if (unlikely(!meth)) return NULL; + flags = PyCFunction_GetFlags(f); + if (unlikely(flags < 0)) return NULL; +#else + PyCFunctionObject* f = (PyCFunctionObject*)func; + PyCFunction meth = f->m_ml->ml_meth; + int flags = f->m_ml->ml_flags; +#endif + Py_ssize_t size; + switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { + case METH_VARARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) + return (*meth)(self, arg); + break; + case METH_VARARGS | METH_KEYWORDS: + return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); + case METH_NOARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 0)) + return (*meth)(self, NULL); +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + case METH_O: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 1)) { + PyObject *result, *arg0; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + arg0 = PyTuple_GET_ITEM(arg, 0); + #else + arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; + #endif + result = (*meth)(self, arg0); + #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(arg0); + #endif + return result; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + return NULL; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", + py_name); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", + f->m_ml->ml_name); +#endif + return NULL; +} +static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *self, *result; +#if CYTHON_COMPILING_IN_LIMITED_API + self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); + if (unlikely(!self) && PyErr_Occurred()) return NULL; +#else + self = ((PyCFunctionObject*)func)->m_self; +#endif + result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); + return result; +} +static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { + PyObject *result; + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; +#if CYTHON_METH_FASTCALL + __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); + if (vc) { +#if CYTHON_ASSUME_SAFE_MACROS + return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); +#else + (void) &__Pyx_PyVectorcall_FastCallDict; + return PyVectorcall_Call(func, args, kw); +#endif + } +#endif + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + Py_ssize_t argc; + PyObject *new_args; + PyObject *self; +#if CYTHON_ASSUME_SAFE_MACROS + argc = PyTuple_GET_SIZE(args); +#else + argc = PyTuple_Size(args); + if (unlikely(!argc) < 0) return NULL; +#endif + new_args = PyTuple_GetSlice(args, 1, argc); + if (unlikely(!new_args)) + return NULL; + self = PyTuple_GetItem(args, 0); + if (unlikely(!self)) { + Py_DECREF(new_args); +#if PY_MAJOR_VERSION > 2 + PyErr_Format(PyExc_TypeError, + "unbound method %.200S() needs an argument", + cyfunc->func_qualname); +#else + PyErr_SetString(PyExc_TypeError, + "unbound method needs an argument"); +#endif + return NULL; + } + result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); + Py_DECREF(new_args); + } else { + result = __Pyx_CyFunction_Call(func, args, kw); + } + return result; +} +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) +{ + int ret = 0; + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + if (unlikely(nargs < 1)) { + PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", + ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + ret = 1; + } + if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + return ret; +} +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 0)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, NULL); +} +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 1)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, args[0]); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; + PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); +} +#endif +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_CyFunctionType_slots[] = { + {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, + {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, + {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, + {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, + {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, + {Py_tp_methods, (void *)__pyx_CyFunction_methods}, + {Py_tp_members, (void *)__pyx_CyFunction_members}, + {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, + {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, + {0, 0}, +}; +static PyType_Spec __pyx_CyFunctionType_spec = { + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + __pyx_CyFunctionType_slots +}; +#else +static PyTypeObject __pyx_CyFunctionType_type = { + PyVarObject_HEAD_INIT(0, 0) + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, + (destructor) __Pyx_CyFunction_dealloc, +#if !CYTHON_METH_FASTCALL + 0, +#elif CYTHON_BACKPORT_VECTORCALL + (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), +#else + offsetof(PyCFunctionObject, vectorcall), +#endif + 0, + 0, +#if PY_MAJOR_VERSION < 3 + 0, +#else + 0, +#endif + (reprfunc) __Pyx_CyFunction_repr, + 0, + 0, + 0, + 0, + __Pyx_CyFunction_CallAsMethod, + 0, + 0, + 0, + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + 0, + (traverseproc) __Pyx_CyFunction_traverse, + (inquiry) __Pyx_CyFunction_clear, + 0, +#if PY_VERSION_HEX < 0x030500A0 + offsetof(__pyx_CyFunctionObject, func_weakreflist), +#else + offsetof(PyCFunctionObject, m_weakreflist), +#endif + 0, + 0, + __pyx_CyFunction_methods, + __pyx_CyFunction_members, + __pyx_CyFunction_getsets, + 0, + 0, + __Pyx_PyMethod_New, + 0, + offsetof(__pyx_CyFunctionObject, func_dict), + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +#if PY_VERSION_HEX >= 0x030400a1 + 0, +#endif +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, +#endif +#if __PYX_NEED_TP_PRINT_SLOT + 0, +#endif +#if PY_VERSION_HEX >= 0x030C0000 + 0, +#endif +#if PY_VERSION_HEX >= 0x030d00A4 + 0, +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, +#endif +}; +#endif +static int __pyx_CyFunction_init(PyObject *module) { +#if CYTHON_USE_TYPE_SPECS + __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); +#else + CYTHON_UNUSED_VAR(module); + __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); +#endif + if (unlikely(__pyx_CyFunctionType == NULL)) { + return -1; + } + return 0; +} +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults = PyObject_Malloc(size); + if (unlikely(!m->defaults)) + return PyErr_NoMemory(); + memset(m->defaults, 0, size); + m->defaults_pyobjects = pyobjects; + m->defaults_size = size; + return m->defaults; +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_tuple = tuple; + Py_INCREF(tuple); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_kwdict = dict; + Py_INCREF(dict); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->func_annotations = dict; + Py_INCREF(dict); +} + +/* CythonFunction */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + PyObject *op = __Pyx_CyFunction_Init( + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + PyObject_GC_Track(op); + } + return op; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + CYTHON_MAYBE_UNUSED_VAR(tstate); + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} +#endif + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, + PyObject *firstlineno, PyObject *name) { + PyObject *replace = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; + replace = PyObject_GetAttrString(code, "replace"); + if (likely(replace)) { + PyObject *result; + result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); + Py_DECREF(replace); + return result; + } + PyErr_Clear(); + #if __PYX_LIMITED_VERSION_HEX < 0x030780000 + { + PyObject *compiled = NULL, *result = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; + compiled = Py_CompileString( + "out = type(code)(\n" + " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" + " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" + " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" + " code.co_lnotab)\n", "", Py_file_input); + if (!compiled) return NULL; + result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); + Py_DECREF(compiled); + if (!result) PyErr_Print(); + Py_DECREF(result); + result = PyDict_GetItemString(scratch_dict, "out"); + if (result) Py_INCREF(result); + return result; + } + #else + return NULL; + #endif +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; + PyObject *replace = NULL, *getframe = NULL, *frame = NULL; + PyObject *exc_type, *exc_value, *exc_traceback; + int success = 0; + if (c_line) { + (void) __pyx_cfilenm; + (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); + } + PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); + code_object = Py_CompileString("_getframe()", filename, Py_eval_input); + if (unlikely(!code_object)) goto bad; + py_py_line = PyLong_FromLong(py_line); + if (unlikely(!py_py_line)) goto bad; + py_funcname = PyUnicode_FromString(funcname); + if (unlikely(!py_funcname)) goto bad; + dict = PyDict_New(); + if (unlikely(!dict)) goto bad; + { + PyObject *old_code_object = code_object; + code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); + Py_DECREF(old_code_object); + } + if (unlikely(!code_object)) goto bad; + getframe = PySys_GetObject("_getframe"); + if (unlikely(!getframe)) goto bad; + if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; + frame = PyEval_EvalCode(code_object, dict, dict); + if (unlikely(!frame) || frame == Py_None) goto bad; + success = 1; + bad: + PyErr_Restore(exc_type, exc_value, exc_traceback); + Py_XDECREF(code_object); + Py_XDECREF(py_py_line); + Py_XDECREF(py_funcname); + Py_XDECREF(dict); + Py_XDECREF(replace); + if (success) { + PyTraceBack_Here( + (struct _frame*)frame); + } + Py_XDECREF(frame); +} +#else +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject *ptype, *pvalue, *ptraceback; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) { + /* If the code object creation fails, then we should clear the + fetched exception references and propagate the new exception */ + Py_XDECREF(ptype); + Py_XDECREF(pvalue); + Py_XDECREF(ptraceback); + goto bad; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} +#endif + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* FormatTypeName */ +#if CYTHON_COMPILING_IN_LIMITED_API +static __Pyx_TypeName +__Pyx_PyType_GetName(PyTypeObject* tp) +{ + PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, + __pyx_n_s_name); + if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { + PyErr_Clear(); + Py_XDECREF(name); + name = __Pyx_NewRef(__pyx_n_s__11); + } + return name; +} +#endif + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(long) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(long) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(long) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + long val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (long) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (long) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (long) -1; + } else { + stepval = v; + } + v = NULL; + val = (long) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((long) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((long) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (long) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(int) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(int) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(int) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + int val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (int) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (int) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (int) -1; + } else { + stepval = v; + } + v = NULL; + val = (int) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((int) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((int) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (int) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (cls == a || cls == b) return 1; + mro = cls->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + PyObject *base = PyTuple_GET_ITEM(mro, i); + if (base == (PyObject *)a || base == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + if (exc_type1) { + return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); + } else { + return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030B00A4 + return Py_Version & ~0xFFUL; +#else + const char* rt_version = Py_GetVersion(); + unsigned long version = 0; + unsigned long factor = 0x01000000UL; + unsigned int digit = 0; + int i = 0; + while (factor) { + while ('0' <= rt_version[i] && rt_version[i] <= '9') { + digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); + ++i; + } + version += factor * digit; + if (rt_version[i] != '.') + break; + digit = 0; + factor >>= 8; + ++i; + } + return version; +#endif +} +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { + const unsigned long MAJOR_MINOR = 0xFFFF0000UL; + if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) + return 0; + if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) + return 1; + { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compile time Python version %d.%d " + "of module '%.100s' " + "%s " + "runtime version %d.%d", + (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), + __Pyx_MODULE_NAME, + (allow_newer) ? "was newer than" : "does not match", + (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) + ); + return PyErr_WarnEx(NULL, message, 1); + } +} + +/* InitStrings */ +#if PY_MAJOR_VERSION >= 3 +static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { + if (t.is_unicode | t.is_str) { + if (t.intern) { + *str = PyUnicode_InternFromString(t.s); + } else if (t.encoding) { + *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); + } else { + *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); + } + } else { + *str = PyBytes_FromStringAndSize(t.s, t.n - 1); + } + if (!*str) + return -1; + if (PyObject_Hash(*str) == -1) + return -1; + return 0; +} +#endif +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION >= 3 + __Pyx_InitString(*t, t->p); + #else + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + #endif + ++t; + } + return 0; +} + +#include +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { + size_t len = strlen(s); + if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { + PyErr_SetString(PyExc_OverflowError, "byte string is too long"); + return -1; + } + return (Py_ssize_t) len; +} +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return __Pyx_PyUnicode_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return PyByteArray_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { + __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " + "The ability to return an instance of a strict subclass of int is deprecated, " + "and may be removed in a future version of Python.", + result_type_name)) { + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; + } + __Pyx_DECREF_TypeName(result_type_name); + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", + type_name, type_name, result_type_name); + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(__Pyx_PyLong_IsCompact(b))) { + return __Pyx_PyLong_CompactValue(b); + } else { + const digit* digits = __Pyx_PyLong_Digits(b); + const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +/* #### Code section: utility_code_pragmas_end ### */ +#ifdef _MSC_VER +#pragma warning( pop ) +#endif + + + +/* #### Code section: end ### */ +#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 5e91cdf9e14..b04d6d48c28 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -265,10 +265,10 @@ def _init_global_attributes( @classmethod def create_for_statement( cls, - statement: Union[Select, FromStatement], - compiler: Optional[SQLCompiler], + statement: Executable, + compiler: SQLCompiler, **kw: Any, - ) -> AbstractORMCompileState: + ) -> CompileState: """Create a context for a statement given a :class:`.Compiler`. This method is always invoked in the context of SQLCompiler.process(). @@ -437,15 +437,30 @@ class default_compile_options(CacheableOptions): def __init__(self, *arg, **kw): raise NotImplementedError() - if TYPE_CHECKING: + @classmethod + def create_for_statement( + cls, + statement: Executable, + compiler: SQLCompiler, + **kw: Any, + ) -> ORMCompileState: + return cls._create_orm_context( + cast("Union[Select, FromStatement]", statement), + toplevel=not compiler.stack, + compiler=compiler, + **kw, + ) - @classmethod - def create_for_statement( - cls, - statement: Union[Select, FromStatement], - compiler: Optional[SQLCompiler], - **kw: Any, - ) -> ORMCompileState: ... + @classmethod + def _create_orm_context( + cls, + statement: Union[Select, FromStatement], + *, + toplevel: bool, + compiler: Optional[SQLCompiler], + **kw: Any, + ) -> ORMCompileState: + raise NotImplementedError() def _append_dedupe_col_collection(self, obj, col_collection): dedupe = self.dedupe_columns @@ -755,12 +770,16 @@ class ORMFromStatementCompileState(ORMCompileState): eager_joins = _EMPTY_DICT @classmethod - def create_for_statement( + def _create_orm_context( cls, - statement_container: Union[Select, FromStatement], + statement: Union[Select, FromStatement], + *, + toplevel: bool, compiler: Optional[SQLCompiler], **kw: Any, ) -> ORMFromStatementCompileState: + statement_container = statement + assert isinstance(statement_container, FromStatement) if compiler is not None and compiler.stack: @@ -1067,21 +1086,17 @@ class ORMSelectCompileState(ORMCompileState, SelectState): _having_criteria = () @classmethod - def create_for_statement( + def _create_orm_context( cls, statement: Union[Select, FromStatement], + *, + toplevel: bool, compiler: Optional[SQLCompiler], **kw: Any, ) -> ORMSelectCompileState: - """compiler hook, we arrive here from compiler.visit_select() only.""" self = cls.__new__(cls) - if compiler is not None: - toplevel = not compiler.stack - else: - toplevel = True - select_statement = statement # if we are a select() that was never a legacy Query, we won't diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 4dbb3009b39..af496b245f4 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -3340,7 +3340,9 @@ def _compile_state( ORMCompileState._get_plugin_class_for_plugin(stmt, "orm"), ) - return compile_state_cls.create_for_statement(stmt, None) + return compile_state_cls._create_orm_context( + stmt, toplevel=True, compiler=None + ) def _compile_context(self, for_statement: bool = False) -> QueryContext: compile_state = self._compile_state(for_statement=for_statement) diff --git a/lib/sqlalchemy/sql/_util_cy.c b/lib/sqlalchemy/sql/_util_cy.c new file mode 100644 index 00000000000..70663d86b16 --- /dev/null +++ b/lib/sqlalchemy/sql/_util_cy.c @@ -0,0 +1,11241 @@ +/* Generated by Cython 3.0.11 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "name": "sqlalchemy.sql._util_cy", + "sources": [ + "lib/sqlalchemy/sql/_util_cy.py" + ] + }, + "module_name": "sqlalchemy.sql._util_cy" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#if defined(CYTHON_LIMITED_API) && 0 + #ifndef Py_LIMITED_API + #if CYTHON_LIMITED_API+0 > 0x03030000 + #define Py_LIMITED_API CYTHON_LIMITED_API + #else + #define Py_LIMITED_API 0x03030000 + #endif + #endif +#endif + +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.7+ or Python 3.3+. +#else +#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API +#define __PYX_EXTRA_ABI_MODULE_NAME "limited" +#else +#define __PYX_EXTRA_ABI_MODULE_NAME "" +#endif +#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME +#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI +#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." +#define CYTHON_HEX_VERSION 0x03000BF0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #define HAVE_LONG_LONG +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX +#if defined(GRAALVM_PYTHON) + /* For very preliminary testing purposes. Most variables are set the same as PyPy. + The existence of this section does not imply that anything works or is even tested */ + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 1 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(PYPY_VERSION) + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #if PY_VERSION_HEX < 0x03090000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(CYTHON_LIMITED_API) + #ifdef Py_LIMITED_API + #undef __PYX_LIMITED_VERSION_HEX + #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API + #endif + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 1 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_CLINE_IN_TRACEBACK + #define CYTHON_CLINE_IN_TRACEBACK 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 1 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #endif + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 1 + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #ifndef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) + #endif + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #if PY_VERSION_HEX < 0x030400a1 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #elif !defined(CYTHON_USE_TP_FINALIZE) + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #if PY_VERSION_HEX < 0x030600B1 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #elif !defined(CYTHON_USE_DICT_VERSIONS) + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) + #endif + #if PY_VERSION_HEX < 0x030700A3 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK 1 + #endif + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if !defined(CYTHON_VECTORCALL) +#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) +#endif +#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(maybe_unused) + #define CYTHON_UNUSED [[maybe_unused]] + #endif + #endif + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR + #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_USE_CPP_STD_MOVE + #if defined(__cplusplus) && (\ + __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) + #define CYTHON_USE_CPP_STD_MOVE 1 + #else + #define CYTHON_USE_CPP_STD_MOVE 0 + #endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + #endif + #endif + #if _MSC_VER < 1300 + #ifdef _WIN64 + typedef unsigned long long __pyx_uintptr_t; + #else + typedef unsigned int __pyx_uintptr_t; + #endif + #else + #ifdef _WIN64 + typedef unsigned __int64 __pyx_uintptr_t; + #else + typedef unsigned __int32 __pyx_uintptr_t; + #endif + #endif +#else + #include + typedef uintptr_t __pyx_uintptr_t; +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif +#ifdef __cplusplus + template + struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; + #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) +#else + #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) +#endif +#if CYTHON_COMPILING_IN_PYPY == 1 + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) +#else + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) +#endif +#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_DefaultClassType PyClass_Type + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if CYTHON_COMPILING_IN_LIMITED_API + static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *exception_table = NULL; + PyObject *types_module=NULL, *code_type=NULL, *result=NULL; + #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 + PyObject *version_info; + PyObject *py_minor_version = NULL; + #endif + long minor_version = 0; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 + minor_version = 11; + #else + if (!(version_info = PySys_GetObject("version_info"))) goto end; + if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; + minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); + if (minor_version == -1 && PyErr_Occurred()) goto end; + #endif + if (!(types_module = PyImport_ImportModule("types"))) goto end; + if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; + if (minor_version <= 7) { + (void)p; + result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else if (minor_version <= 10) { + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else { + if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); + } + end: + Py_XDECREF(code_type); + Py_XDECREF(exception_table); + Py_XDECREF(types_module); + if (type) { + PyErr_Restore(type, value, traceback); + } + return result; + } + #ifndef CO_OPTIMIZED + #define CO_OPTIMIZED 0x0001 + #endif + #ifndef CO_NEWLOCALS + #define CO_NEWLOCALS 0x0002 + #endif + #ifndef CO_VARARGS + #define CO_VARARGS 0x0004 + #endif + #ifndef CO_VARKEYWORDS + #define CO_VARKEYWORDS 0x0008 + #endif + #ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x0200 + #endif + #ifndef CO_GENERATOR + #define CO_GENERATOR 0x0020 + #endif + #ifndef CO_COROUTINE + #define CO_COROUTINE 0x0080 + #endif +#elif PY_VERSION_HEX >= 0x030B0000 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyCodeObject *result; + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); + if (!empty_bytes) return NULL; + result = + #if PY_VERSION_HEX >= 0x030C0000 + PyUnstable_Code_NewWithPosOnlyArgs + #else + PyCode_NewWithPosOnlyArgs + #endif + (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); + Py_DECREF(empty_bytes); + return result; + } +#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#endif +#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) + #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) +#else + #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) + #define __Pyx_Py_Is(x, y) Py_Is(x, y) +#else + #define __Pyx_Py_Is(x, y) ((x) == (y)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) + #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) +#else + #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) + #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) +#else + #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) + #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) +#else + #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) +#endif +#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) +#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) +#else + #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) +#endif +#ifndef CO_COROUTINE + #define CO_COROUTINE 0x80 +#endif +#ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x200 +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef Py_TPFLAGS_SEQUENCE + #define Py_TPFLAGS_SEQUENCE 0 +#endif +#ifndef Py_TPFLAGS_MAPPING + #define Py_TPFLAGS_MAPPING 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif +#endif +#if CYTHON_METH_FASTCALL + #define __Pyx_METH_FASTCALL METH_FASTCALL + #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast + #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords +#else + #define __Pyx_METH_FASTCALL METH_VARARGS + #define __Pyx_PyCFunction_FastCall PyCFunction + #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords +#endif +#if CYTHON_VECTORCALL + #define __pyx_vectorcallfunc vectorcallfunc + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET + #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) +#elif CYTHON_BACKPORT_VECTORCALL + typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames); + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) +#else + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) +#endif +#if PY_MAJOR_VERSION >= 0x030900B1 +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) +#else +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) +#endif +#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) +#elif !CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) +#endif +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) +static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { + return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; +} +#endif +static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { +#if CYTHON_COMPILING_IN_LIMITED_API + return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; +#else + return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +#endif +} +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) +#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) + typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); +#else + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) + #define __Pyx_PyCMethod PyCMethod +#endif +#ifndef METH_METHOD + #define METH_METHOD 0x200 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyThreadState_Current PyThreadState_Get() +#elif !CYTHON_FAST_THREAD_STATE + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) +{ + void *result; + result = PyModule_GetState(op); + if (!result) + Py_FatalError("Couldn't find the module state"); + return result; +} +#endif +#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) +#else + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if PY_MAJOR_VERSION < 3 + #if CYTHON_COMPILING_IN_PYPY + #if PYPY_VERSION_NUM < 0x07030600 + #if defined(__cplusplus) && __cplusplus >= 201402L + [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] + #elif defined(__GNUC__) || defined(__clang__) + __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) + #elif defined(_MSC_VER) + __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) + #endif + static CYTHON_INLINE int PyGILState_Check(void) { + return 0; + } + #else // PYPY_VERSION_NUM < 0x07030600 + #endif // PYPY_VERSION_NUM < 0x07030600 + #else + static CYTHON_INLINE int PyGILState_Check(void) { + PyThreadState * tstate = _PyThreadState_Current; + return tstate && (tstate == PyGILState_GetThisThreadState()); + } + #endif +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { + PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); + if (res == NULL) PyErr_Clear(); + return res; +} +#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) +#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#else +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { +#if CYTHON_COMPILING_IN_PYPY + return PyDict_GetItem(dict, name); +#else + PyDictEntry *ep; + PyDictObject *mp = (PyDictObject*) dict; + long hash = ((PyStringObject *) name)->ob_shash; + assert(hash != -1); + ep = (mp->ma_lookup)(mp, name, hash); + if (ep == NULL) { + return NULL; + } + return ep->me_value; +#endif +} +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#endif +#if CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) + #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) + #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) +#else + #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) + #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) + #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) +#else + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) +#endif +#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 +#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ + assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ + PyObject_GC_Del(obj);\ + Py_DECREF(type);\ +} +#else +#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) + #define __Pyx_PyUnicode_DATA(u) ((void*)u) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) +#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_READY(op) (0) + #else + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #else + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #if !defined(PyUnicode_DecodeUnicodeEscape) + #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) + #endif + #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) + #undef PyUnicode_Contains + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) + #endif + #if !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) + #endif + #if !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) + #endif +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#if CYTHON_COMPILING_IN_CPYTHON + #define __Pyx_PySequence_ListKeepNew(obj)\ + (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) +#else + #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) +#else + #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) + #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) +#endif +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) +#else + static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { + PyObject *module = PyImport_AddModule(name); + Py_XINCREF(module); + return module; + } +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define __Pyx_Py3Int_Check(op) PyLong_Check(op) + #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#else + #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) + #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) + #if !defined(_USE_MATH_DEFINES) + #define _USE_MATH_DEFINES + #endif +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifdef CYTHON_EXTERN_C + #undef __PYX_EXTERN_C + #define __PYX_EXTERN_C CYTHON_EXTERN_C +#elif defined(__PYX_EXTERN_C) + #ifdef _MSC_VER + #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") + #else + #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. + #endif +#else + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__sqlalchemy__sql___util_cy +#define __PYX_HAVE_API__sqlalchemy__sql___util_cy +/* Early includes */ +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_VERSION_HEX >= 0x030C00A7 + #ifndef _PyLong_SIGN_MASK + #define _PyLong_SIGN_MASK 3 + #endif + #ifndef _PyLong_NON_SIZE_BITS + #define _PyLong_NON_SIZE_BITS 3 + #endif + #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) + #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) + #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) + #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) + #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_SignedDigitCount(x)\ + ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) + #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) + #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) + #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) + #else + #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) + #endif + typedef Py_ssize_t __Pyx_compact_pylong; + typedef size_t __Pyx_compact_upylong; + #else + #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) + #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) + #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) + #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) + #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) + #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) + #define __Pyx_PyLong_CompactValue(x)\ + ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) + typedef sdigit __Pyx_compact_pylong; + typedef digit __Pyx_compact_upylong; + #endif + #if PY_VERSION_HEX >= 0x030C00A5 + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) + #else + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) + #endif +#endif +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +#include +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = (char) c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#include +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +#if !CYTHON_USE_MODULE_STATE +static PyObject *__pyx_m = NULL; +#endif +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm = __FILE__; +static const char *__pyx_filename; + +/* #### Code section: filename_table ### */ + +static const char *__pyx_f[] = { + "", + "lib/sqlalchemy/sql/_util_cy.py", +}; +/* #### Code section: utility_code_proto_before_types ### */ +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + +/* #### Code section: numeric_typedefs ### */ +/* #### Code section: complex_type_declarations ### */ +/* #### Code section: type_declarations ### */ + +/*--- Type declarations ---*/ +struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map; +struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map; + +/* "sqlalchemy/sql/_util_cy.py":45 + * + * @cython.cclass + * class prefix_anon_map(Dict[str, str]): # <<<<<<<<<<<<<< + * """A map that creates new keys for missing key access. + * + */ +struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map { + PyDictObject __pyx_base; +}; + + +/* "sqlalchemy/sql/_util_cy.py":72 + * + * @cython.cclass + * class anon_map( # <<<<<<<<<<<<<< + * Dict[ + * Union[int, str, "Literal[CacheConst.NO_CACHE]"], + */ +struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map { + PyDictObject __pyx_base; + struct __pyx_vtabstruct_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_vtab; + unsigned int _index; +}; + + + +struct __pyx_vtabstruct_10sqlalchemy_3sql_8_util_cy_anon_map { + PyObject *(*_add_missing)(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *, PyObject *); +}; +static struct __pyx_vtabstruct_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_vtabptr_10sqlalchemy_3sql_8_util_cy_anon_map; +static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *, PyObject *); +/* #### Code section: utility_code_proto ### */ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, Py_ssize_t); + void (*DECREF)(void*, PyObject*, Py_ssize_t); + void (*GOTREF)(void*, PyObject*, Py_ssize_t); + void (*GIVEREF)(void*, PyObject*, Py_ssize_t); + void* (*SetupContext)(const char*, Py_ssize_t, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + } + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) + #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() +#endif + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContextNogil() + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_Py_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; Py_XDECREF(tmp);\ + } while (0) +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#if PY_VERSION_HEX >= 0x030C00A6 +#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) +#else +#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) +#endif +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) +#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* TupleAndListFromArray.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); +static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); +#endif + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* fastcall.proto */ +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) +#elif CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) +#else + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) +#endif +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) + #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) +#else + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) +#define __Pyx_KwValues_VARARGS(args, nargs) NULL +#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) +#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) +#if CYTHON_METH_FASTCALL + #define __Pyx_Arg_FASTCALL(args, i) args[i] + #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) + #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) + static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + #else + #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) + #endif + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) +#else + #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS + #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS + #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS + #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS + #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS + #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) + #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) +#else +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) +#endif + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* RaiseUnexpectedTypeError.proto */ +static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); + +/* dict_getitem_default.proto */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* UnpackUnboundCMethod.proto */ +typedef struct { + PyObject *type; + PyObject **method_name; + PyCFunction func; + PyObject *method; + int flag; +} __Pyx_CachedCFunction; + +/* CallUnboundCMethod1.proto */ +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#else +#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) +#endif + +/* CallUnboundCMethod2.proto */ +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); +#else +#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2) +#endif + +/* PyUnicode_Unicode.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj); + +/* PyObjectFormatSimple.proto */ +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + PyObject_Format(s, f)) +#elif PY_MAJOR_VERSION < 3 + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + likely(PyString_CheckExact(s)) ? PyUnicode_FromEncodedObject(s, NULL, "strict") :\ + PyObject_Format(s, f)) +#elif CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + likely(PyLong_CheckExact(s)) ? PyLong_Type.tp_repr(s) :\ + likely(PyFloat_CheckExact(s)) ? PyFloat_Type.tp_repr(s) :\ + PyObject_Format(s, f)) +#else + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + PyObject_Format(s, f)) +#endif + +/* JoinPyUnicode.proto */ +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) do {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, + const char* function_name); + +/* PyDictContains.proto */ +static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { + int result = PyDict_Contains(dict, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#if !CYTHON_VECTORCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if !CYTHON_VECTORCALL +#if PY_VERSION_HEX >= 0x03080000 + #include "frameobject.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif + #define __Pxy_PyFrame_Initialize_Offsets() + #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) +#else + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif +#endif +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectFastCall.proto */ +#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* IncludeStructmemberH.proto */ +#include + +/* FixUpExtensionType.proto */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); +#endif + +/* FormatTypeName.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +typedef PyObject *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%U" +static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); +#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) +#else +typedef const char *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%.200s" +#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) +#define __Pyx_DECREF_TypeName(obj) +#endif + +/* ValidateExternBase.proto */ +static int __Pyx_validate_extern_base(PyTypeObject *base); + +/* PyObjectCallNoArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod0.proto */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); + +/* ValidateBasesTuple.proto */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); +#endif + +/* PyType_Ready.proto */ +CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetupReduce.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_setup_reduce(PyObject* type_obj); +#endif + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyTypeObject* typeptr , void* vtable); + +/* GetVTable.proto */ +static void* __Pyx_GetVtable(PyTypeObject *type); + +/* MergeVTables.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_MergeVtables(PyTypeObject *type); +#endif + +/* FetchSharedCythonModule.proto */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void); + +/* FetchCommonType.proto */ +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); +#else +static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); +#endif + +/* PyMethodNew.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + typesModule = PyImport_ImportModule("types"); + if (!typesModule) return NULL; + methodType = PyObject_GetAttrString(typesModule, "MethodType"); + Py_DECREF(typesModule); + if (!methodType) return NULL; + result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); + Py_DECREF(methodType); + return result; +} +#elif PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + return PyMethod_New(func, self); +} +#else + #define __Pyx_PyMethod_New PyMethod_New +#endif + +/* PyVectorcallFastCallDict.proto */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); +#endif + +/* CythonFunctionShared.proto */ +#define __Pyx_CyFunction_USED +#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 +#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 +#define __Pyx_CYFUNCTION_CCLASS 0x04 +#define __Pyx_CYFUNCTION_COROUTINE 0x08 +#define __Pyx_CyFunction_GetClosure(f)\ + (((__pyx_CyFunctionObject *) (f))->func_closure) +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_CyFunction_GetClassObj(f)\ + (((__pyx_CyFunctionObject *) (f))->func_classobj) +#else + #define __Pyx_CyFunction_GetClassObj(f)\ + ((PyObject*) ((PyCMethodObject *) (f))->mm_class) +#endif +#define __Pyx_CyFunction_SetClassObj(f, classobj)\ + __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) +#define __Pyx_CyFunction_Defaults(type, f)\ + ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) +#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ + ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) +typedef struct { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject_HEAD + PyObject *func; +#elif PY_VERSION_HEX < 0x030900B1 + PyCFunctionObject func; +#else + PyCMethodObject func; +#endif +#if CYTHON_BACKPORT_VECTORCALL + __pyx_vectorcallfunc func_vectorcall; +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_weakreflist; +#endif + PyObject *func_dict; + PyObject *func_name; + PyObject *func_qualname; + PyObject *func_doc; + PyObject *func_globals; + PyObject *func_code; + PyObject *func_closure; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_classobj; +#endif + void *defaults; + int defaults_pyobjects; + size_t defaults_size; + int flags; + PyObject *defaults_tuple; + PyObject *defaults_kwdict; + PyObject *(*defaults_getter)(PyObject *); + PyObject *func_annotations; + PyObject *func_is_coroutine; +} __pyx_CyFunctionObject; +#undef __Pyx_CyOrPyCFunction_Check +#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) +#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) +#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); +#undef __Pyx_IsSameCFunction +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, + size_t size, + int pyobjects); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, + PyObject *tuple); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, + PyObject *dict); +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, + PyObject *dict); +static int __pyx_CyFunction_init(PyObject *module); +#if CYTHON_METH_FASTCALL +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +#if CYTHON_BACKPORT_VECTORCALL +#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) +#else +#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) +#endif +#endif + +/* CythonFunction.proto */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); +#endif + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static unsigned long __Pyx_get_runtime_version(void); +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); + +/* FunctionImport.proto */ +static int __Pyx_ImportFunction_3_0_11(PyObject *module, const char *funcname, void (**f)(void), const char *sig); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +/* #### Code section: module_declarations ### */ +static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key); /* proto*/ + +/* Module declarations from "cython" */ + +/* Module declarations from "sqlalchemy.util._collections_cy" */ +static unsigned PY_LONG_LONG (*__pyx_f_10sqlalchemy_4util_15_collections_cy__get_id)(PyObject *); /*proto*/ + +/* Module declarations from "sqlalchemy.sql._util_cy" */ +static PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy___pyx_unpickle_prefix_anon_map__set_state(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *, PyObject *); /*proto*/ +/* #### Code section: typeinfo ### */ +/* #### Code section: before_global_var ### */ +#define __Pyx_MODULE_NAME "sqlalchemy.sql._util_cy" +extern int __pyx_module_is_main_sqlalchemy__sql___util_cy; +int __pyx_module_is_main_sqlalchemy__sql___util_cy = 0; + +/* Implementation of "sqlalchemy.sql._util_cy" */ +/* #### Code section: global_var ### */ +static PyObject *__pyx_builtin_TypeError; +/* #### Code section: string_decls ### */ +static const char __pyx_k_[] = " "; +static const char __pyx_k__2[] = "_"; +static const char __pyx_k__4[] = "."; +static const char __pyx_k__5[] = "?"; +static const char __pyx_k_gc[] = "gc"; +static const char __pyx_k_get[] = "get"; +static const char __pyx_k_int[] = "int"; +static const char __pyx_k_key[] = "key"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_obj[] = "obj"; +static const char __pyx_k_str[] = "str"; +static const char __pyx_k_Dict[] = "Dict"; +static const char __pyx_k_None[] = "None"; +static const char __pyx_k_bool[] = "bool"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_self[] = "self"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_Tuple[] = "Tuple"; +static const char __pyx_k_Union[] = "Union"; +static const char __pyx_k_state[] = "state"; +static const char __pyx_k_value[] = "value"; +static const char __pyx_k_dict_2[] = "_dict"; +static const char __pyx_k_enable[] = "enable"; +static const char __pyx_k_idself[] = "idself"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_object[] = "object"; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_return[] = "return"; +static const char __pyx_k_typing[] = "typing"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_Literal[] = "Literal"; +static const char __pyx_k_derived[] = "derived"; +static const char __pyx_k_disable[] = "disable"; +static const char __pyx_k_missing[] = "__missing__"; +static const char __pyx_k_anon_map[] = "anon_map"; +static const char __pyx_k_get_anon[] = "get_anon"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_cache_key[] = "cache_key"; +static const char __pyx_k_isenabled[] = "isenabled"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_self_dict[] = "self_dict"; +static const char __pyx_k_CacheConst[] = "CacheConst"; +static const char __pyx_k_pyx_result[] = "__pyx_result"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_is_compiled[] = "_is_compiled"; +static const char __pyx_k_util_typing[] = "util.typing"; +static const char __pyx_k_is_coroutine[] = "_is_coroutine"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_stringsource[] = ""; +static const char __pyx_k_use_setstate[] = "use_setstate"; +static const char __pyx_k_TYPE_CHECKING[] = "TYPE_CHECKING"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_Tuple_int_bool[] = "Tuple[int, bool]"; +static const char __pyx_k_prefix_anon_map[] = "prefix_anon_map"; +static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_anon_map_get_anon[] = "anon_map.get_anon"; +static const char __pyx_k_anonymous_counter[] = "anonymous_counter"; +static const char __pyx_k_anon_map___missing[] = "anon_map.__missing__"; +static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_sqlalchemy_sql__util_cy[] = "sqlalchemy.sql._util_cy"; +static const char __pyx_k_anon_map___reduce_cython[] = "anon_map.__reduce_cython__"; +static const char __pyx_k_prefix_anon_map___missing[] = "prefix_anon_map.__missing__"; +static const char __pyx_k_anon_map___setstate_cython[] = "anon_map.__setstate_cython__"; +static const char __pyx_k_pyx_unpickle_prefix_anon_map[] = "__pyx_unpickle_prefix_anon_map"; +static const char __pyx_k_lib_sqlalchemy_sql__util_cy_py[] = "lib/sqlalchemy/sql/_util_cy.py"; +static const char __pyx_k_prefix_anon_map___reduce_cython[] = "prefix_anon_map.__reduce_cython__"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())"; +static const char __pyx_k_Union_int_str_Literal_CacheConst[] = "Union[int, str, 'Literal[CacheConst.NO_CACHE]']"; +static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__"; +static const char __pyx_k_prefix_anon_map___setstate_cytho[] = "prefix_anon_map.__setstate_cython__"; +/* #### Code section: decls ### */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map___missing__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_2__reduce_cython__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_4__setstate_cython__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static int __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map___cinit__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_2get_anon(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_obj); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_4__getitem__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_6__missing__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_8__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_10__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_2__pyx_unpickle_prefix_anon_map(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_tp_new_10sqlalchemy_3sql_8_util_cy_anon_map(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_get = {0, 0, 0, 0, 0}; +/* #### Code section: late_includes ### */ +/* #### Code section: module_state ### */ +typedef struct { + PyObject *__pyx_d; + PyObject *__pyx_b; + PyObject *__pyx_cython_runtime; + PyObject *__pyx_empty_tuple; + PyObject *__pyx_empty_bytes; + PyObject *__pyx_empty_unicode; + #ifdef __Pyx_CyFunction_USED + PyTypeObject *__pyx_CyFunctionType; + #endif + #ifdef __Pyx_FusedFunction_USED + PyTypeObject *__pyx_FusedFunctionType; + #endif + #ifdef __Pyx_Generator_USED + PyTypeObject *__pyx_GeneratorType; + #endif + #ifdef __Pyx_IterableCoroutine_USED + PyTypeObject *__pyx_IterableCoroutineType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineAwaitType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineType; + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + PyObject *__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map; + PyObject *__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map; + #endif + PyTypeObject *__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map; + PyTypeObject *__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map; + PyObject *__pyx_kp_u_; + PyObject *__pyx_n_s_CacheConst; + PyObject *__pyx_n_s_Dict; + PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; + PyObject *__pyx_n_s_Literal; + PyObject *__pyx_kp_u_None; + PyObject *__pyx_n_s_PickleError; + PyObject *__pyx_n_s_TYPE_CHECKING; + PyObject *__pyx_n_s_Tuple; + PyObject *__pyx_kp_s_Tuple_int_bool; + PyObject *__pyx_n_s_TypeError; + PyObject *__pyx_n_s_Union; + PyObject *__pyx_kp_s_Union_int_str_Literal_CacheConst; + PyObject *__pyx_n_u__2; + PyObject *__pyx_kp_u__4; + PyObject *__pyx_n_s__5; + PyObject *__pyx_n_s_anon_map; + PyObject *__pyx_n_s_anon_map___missing; + PyObject *__pyx_n_s_anon_map___reduce_cython; + PyObject *__pyx_n_s_anon_map___setstate_cython; + PyObject *__pyx_n_s_anon_map_get_anon; + PyObject *__pyx_n_s_anonymous_counter; + PyObject *__pyx_n_s_asyncio_coroutines; + PyObject *__pyx_n_s_bool; + PyObject *__pyx_n_s_cache_key; + PyObject *__pyx_n_s_cline_in_traceback; + PyObject *__pyx_n_s_derived; + PyObject *__pyx_n_s_dict; + PyObject *__pyx_n_s_dict_2; + PyObject *__pyx_kp_u_disable; + PyObject *__pyx_kp_u_enable; + PyObject *__pyx_kp_u_gc; + PyObject *__pyx_n_s_get; + PyObject *__pyx_n_s_get_anon; + PyObject *__pyx_n_s_getstate; + PyObject *__pyx_n_s_idself; + PyObject *__pyx_n_s_import; + PyObject *__pyx_n_s_int; + PyObject *__pyx_n_s_is_compiled; + PyObject *__pyx_n_s_is_coroutine; + PyObject *__pyx_kp_u_isenabled; + PyObject *__pyx_n_s_key; + PyObject *__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py; + PyObject *__pyx_n_s_main; + PyObject *__pyx_n_s_missing; + PyObject *__pyx_n_s_name; + PyObject *__pyx_n_s_new; + PyObject *__pyx_kp_s_no_default___reduce___due_to_non; + PyObject *__pyx_n_s_obj; + PyObject *__pyx_n_s_object; + PyObject *__pyx_n_s_pickle; + PyObject *__pyx_n_s_prefix_anon_map; + PyObject *__pyx_n_s_prefix_anon_map___missing; + PyObject *__pyx_n_s_prefix_anon_map___reduce_cython; + PyObject *__pyx_n_s_prefix_anon_map___setstate_cytho; + PyObject *__pyx_n_s_pyx_PickleError; + PyObject *__pyx_n_s_pyx_checksum; + PyObject *__pyx_n_s_pyx_result; + PyObject *__pyx_n_s_pyx_state; + PyObject *__pyx_n_s_pyx_type; + PyObject *__pyx_n_s_pyx_unpickle_prefix_anon_map; + PyObject *__pyx_n_s_pyx_vtable; + PyObject *__pyx_n_s_reduce; + PyObject *__pyx_n_s_reduce_cython; + PyObject *__pyx_n_s_reduce_ex; + PyObject *__pyx_n_s_return; + PyObject *__pyx_n_s_self; + PyObject *__pyx_n_s_self_dict; + PyObject *__pyx_n_s_setstate; + PyObject *__pyx_n_s_setstate_cython; + PyObject *__pyx_n_s_sqlalchemy_sql__util_cy; + PyObject *__pyx_n_s_state; + PyObject *__pyx_n_s_str; + PyObject *__pyx_kp_s_stringsource; + PyObject *__pyx_n_s_test; + PyObject *__pyx_n_s_typing; + PyObject *__pyx_n_s_update; + PyObject *__pyx_n_s_use_setstate; + PyObject *__pyx_n_s_util_typing; + PyObject *__pyx_n_s_value; + PyObject *__pyx_int_1; + PyObject *__pyx_int_222419149; + PyObject *__pyx_int_228825662; + PyObject *__pyx_int_238750788; + PyObject *__pyx_tuple__3; + PyObject *__pyx_tuple__7; + PyObject *__pyx_tuple__9; + PyObject *__pyx_tuple__11; + PyObject *__pyx_tuple__13; + PyObject *__pyx_tuple__15; + PyObject *__pyx_tuple__17; + PyObject *__pyx_tuple__20; + PyObject *__pyx_codeobj__6; + PyObject *__pyx_codeobj__8; + PyObject *__pyx_codeobj__10; + PyObject *__pyx_codeobj__12; + PyObject *__pyx_codeobj__14; + PyObject *__pyx_codeobj__16; + PyObject *__pyx_codeobj__18; + PyObject *__pyx_codeobj__19; + PyObject *__pyx_codeobj__21; +} __pyx_mstate; + +#if CYTHON_USE_MODULE_STATE +#ifdef __cplusplus +namespace { + extern struct PyModuleDef __pyx_moduledef; +} /* anonymous namespace */ +#else +static struct PyModuleDef __pyx_moduledef; +#endif + +#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) + +#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) + +#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) +#else +static __pyx_mstate __pyx_mstate_global_static = +#ifdef __cplusplus + {}; +#else + {0}; +#endif +static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; +#endif +/* #### Code section: module_state_clear ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_clear(PyObject *m) { + __pyx_mstate *clear_module_state = __pyx_mstate(m); + if (!clear_module_state) return 0; + Py_CLEAR(clear_module_state->__pyx_d); + Py_CLEAR(clear_module_state->__pyx_b); + Py_CLEAR(clear_module_state->__pyx_cython_runtime); + Py_CLEAR(clear_module_state->__pyx_empty_tuple); + Py_CLEAR(clear_module_state->__pyx_empty_bytes); + Py_CLEAR(clear_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_CLEAR(clear_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); + #endif + Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); + Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); + Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map); + Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map); + Py_CLEAR(clear_module_state->__pyx_kp_u_); + Py_CLEAR(clear_module_state->__pyx_n_s_CacheConst); + Py_CLEAR(clear_module_state->__pyx_n_s_Dict); + Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); + Py_CLEAR(clear_module_state->__pyx_n_s_Literal); + Py_CLEAR(clear_module_state->__pyx_kp_u_None); + Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); + Py_CLEAR(clear_module_state->__pyx_n_s_TYPE_CHECKING); + Py_CLEAR(clear_module_state->__pyx_n_s_Tuple); + Py_CLEAR(clear_module_state->__pyx_kp_s_Tuple_int_bool); + Py_CLEAR(clear_module_state->__pyx_n_s_TypeError); + Py_CLEAR(clear_module_state->__pyx_n_s_Union); + Py_CLEAR(clear_module_state->__pyx_kp_s_Union_int_str_Literal_CacheConst); + Py_CLEAR(clear_module_state->__pyx_n_u__2); + Py_CLEAR(clear_module_state->__pyx_kp_u__4); + Py_CLEAR(clear_module_state->__pyx_n_s__5); + Py_CLEAR(clear_module_state->__pyx_n_s_anon_map); + Py_CLEAR(clear_module_state->__pyx_n_s_anon_map___missing); + Py_CLEAR(clear_module_state->__pyx_n_s_anon_map___reduce_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_anon_map___setstate_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_anon_map_get_anon); + Py_CLEAR(clear_module_state->__pyx_n_s_anonymous_counter); + Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); + Py_CLEAR(clear_module_state->__pyx_n_s_bool); + Py_CLEAR(clear_module_state->__pyx_n_s_cache_key); + Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); + Py_CLEAR(clear_module_state->__pyx_n_s_derived); + Py_CLEAR(clear_module_state->__pyx_n_s_dict); + Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); + Py_CLEAR(clear_module_state->__pyx_kp_u_disable); + Py_CLEAR(clear_module_state->__pyx_kp_u_enable); + Py_CLEAR(clear_module_state->__pyx_kp_u_gc); + Py_CLEAR(clear_module_state->__pyx_n_s_get); + Py_CLEAR(clear_module_state->__pyx_n_s_get_anon); + Py_CLEAR(clear_module_state->__pyx_n_s_getstate); + Py_CLEAR(clear_module_state->__pyx_n_s_idself); + Py_CLEAR(clear_module_state->__pyx_n_s_import); + Py_CLEAR(clear_module_state->__pyx_n_s_int); + Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); + Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); + Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); + Py_CLEAR(clear_module_state->__pyx_n_s_key); + Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py); + Py_CLEAR(clear_module_state->__pyx_n_s_main); + Py_CLEAR(clear_module_state->__pyx_n_s_missing); + Py_CLEAR(clear_module_state->__pyx_n_s_name); + Py_CLEAR(clear_module_state->__pyx_n_s_new); + Py_CLEAR(clear_module_state->__pyx_kp_s_no_default___reduce___due_to_non); + Py_CLEAR(clear_module_state->__pyx_n_s_obj); + Py_CLEAR(clear_module_state->__pyx_n_s_object); + Py_CLEAR(clear_module_state->__pyx_n_s_pickle); + Py_CLEAR(clear_module_state->__pyx_n_s_prefix_anon_map); + Py_CLEAR(clear_module_state->__pyx_n_s_prefix_anon_map___missing); + Py_CLEAR(clear_module_state->__pyx_n_s_prefix_anon_map___reduce_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_prefix_anon_map___setstate_cytho); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_result); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_state); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_type); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_prefix_anon_map); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_vtable); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce_ex); + Py_CLEAR(clear_module_state->__pyx_n_s_return); + Py_CLEAR(clear_module_state->__pyx_n_s_self); + Py_CLEAR(clear_module_state->__pyx_n_s_self_dict); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_sql__util_cy); + Py_CLEAR(clear_module_state->__pyx_n_s_state); + Py_CLEAR(clear_module_state->__pyx_n_s_str); + Py_CLEAR(clear_module_state->__pyx_kp_s_stringsource); + Py_CLEAR(clear_module_state->__pyx_n_s_test); + Py_CLEAR(clear_module_state->__pyx_n_s_typing); + Py_CLEAR(clear_module_state->__pyx_n_s_update); + Py_CLEAR(clear_module_state->__pyx_n_s_use_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_util_typing); + Py_CLEAR(clear_module_state->__pyx_n_s_value); + Py_CLEAR(clear_module_state->__pyx_int_1); + Py_CLEAR(clear_module_state->__pyx_int_222419149); + Py_CLEAR(clear_module_state->__pyx_int_228825662); + Py_CLEAR(clear_module_state->__pyx_int_238750788); + Py_CLEAR(clear_module_state->__pyx_tuple__3); + Py_CLEAR(clear_module_state->__pyx_tuple__7); + Py_CLEAR(clear_module_state->__pyx_tuple__9); + Py_CLEAR(clear_module_state->__pyx_tuple__11); + Py_CLEAR(clear_module_state->__pyx_tuple__13); + Py_CLEAR(clear_module_state->__pyx_tuple__15); + Py_CLEAR(clear_module_state->__pyx_tuple__17); + Py_CLEAR(clear_module_state->__pyx_tuple__20); + Py_CLEAR(clear_module_state->__pyx_codeobj__6); + Py_CLEAR(clear_module_state->__pyx_codeobj__8); + Py_CLEAR(clear_module_state->__pyx_codeobj__10); + Py_CLEAR(clear_module_state->__pyx_codeobj__12); + Py_CLEAR(clear_module_state->__pyx_codeobj__14); + Py_CLEAR(clear_module_state->__pyx_codeobj__16); + Py_CLEAR(clear_module_state->__pyx_codeobj__18); + Py_CLEAR(clear_module_state->__pyx_codeobj__19); + Py_CLEAR(clear_module_state->__pyx_codeobj__21); + return 0; +} +#endif +/* #### Code section: module_state_traverse ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { + __pyx_mstate *traverse_module_state = __pyx_mstate(m); + if (!traverse_module_state) return 0; + Py_VISIT(traverse_module_state->__pyx_d); + Py_VISIT(traverse_module_state->__pyx_b); + Py_VISIT(traverse_module_state->__pyx_cython_runtime); + Py_VISIT(traverse_module_state->__pyx_empty_tuple); + Py_VISIT(traverse_module_state->__pyx_empty_bytes); + Py_VISIT(traverse_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_VISIT(traverse_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); + #endif + Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); + Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); + Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map); + Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map); + Py_VISIT(traverse_module_state->__pyx_kp_u_); + Py_VISIT(traverse_module_state->__pyx_n_s_CacheConst); + Py_VISIT(traverse_module_state->__pyx_n_s_Dict); + Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); + Py_VISIT(traverse_module_state->__pyx_n_s_Literal); + Py_VISIT(traverse_module_state->__pyx_kp_u_None); + Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); + Py_VISIT(traverse_module_state->__pyx_n_s_TYPE_CHECKING); + Py_VISIT(traverse_module_state->__pyx_n_s_Tuple); + Py_VISIT(traverse_module_state->__pyx_kp_s_Tuple_int_bool); + Py_VISIT(traverse_module_state->__pyx_n_s_TypeError); + Py_VISIT(traverse_module_state->__pyx_n_s_Union); + Py_VISIT(traverse_module_state->__pyx_kp_s_Union_int_str_Literal_CacheConst); + Py_VISIT(traverse_module_state->__pyx_n_u__2); + Py_VISIT(traverse_module_state->__pyx_kp_u__4); + Py_VISIT(traverse_module_state->__pyx_n_s__5); + Py_VISIT(traverse_module_state->__pyx_n_s_anon_map); + Py_VISIT(traverse_module_state->__pyx_n_s_anon_map___missing); + Py_VISIT(traverse_module_state->__pyx_n_s_anon_map___reduce_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_anon_map___setstate_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_anon_map_get_anon); + Py_VISIT(traverse_module_state->__pyx_n_s_anonymous_counter); + Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); + Py_VISIT(traverse_module_state->__pyx_n_s_bool); + Py_VISIT(traverse_module_state->__pyx_n_s_cache_key); + Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); + Py_VISIT(traverse_module_state->__pyx_n_s_derived); + Py_VISIT(traverse_module_state->__pyx_n_s_dict); + Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); + Py_VISIT(traverse_module_state->__pyx_kp_u_disable); + Py_VISIT(traverse_module_state->__pyx_kp_u_enable); + Py_VISIT(traverse_module_state->__pyx_kp_u_gc); + Py_VISIT(traverse_module_state->__pyx_n_s_get); + Py_VISIT(traverse_module_state->__pyx_n_s_get_anon); + Py_VISIT(traverse_module_state->__pyx_n_s_getstate); + Py_VISIT(traverse_module_state->__pyx_n_s_idself); + Py_VISIT(traverse_module_state->__pyx_n_s_import); + Py_VISIT(traverse_module_state->__pyx_n_s_int); + Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); + Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); + Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); + Py_VISIT(traverse_module_state->__pyx_n_s_key); + Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py); + Py_VISIT(traverse_module_state->__pyx_n_s_main); + Py_VISIT(traverse_module_state->__pyx_n_s_missing); + Py_VISIT(traverse_module_state->__pyx_n_s_name); + Py_VISIT(traverse_module_state->__pyx_n_s_new); + Py_VISIT(traverse_module_state->__pyx_kp_s_no_default___reduce___due_to_non); + Py_VISIT(traverse_module_state->__pyx_n_s_obj); + Py_VISIT(traverse_module_state->__pyx_n_s_object); + Py_VISIT(traverse_module_state->__pyx_n_s_pickle); + Py_VISIT(traverse_module_state->__pyx_n_s_prefix_anon_map); + Py_VISIT(traverse_module_state->__pyx_n_s_prefix_anon_map___missing); + Py_VISIT(traverse_module_state->__pyx_n_s_prefix_anon_map___reduce_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_prefix_anon_map___setstate_cytho); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_result); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_state); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_type); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_prefix_anon_map); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_vtable); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce_ex); + Py_VISIT(traverse_module_state->__pyx_n_s_return); + Py_VISIT(traverse_module_state->__pyx_n_s_self); + Py_VISIT(traverse_module_state->__pyx_n_s_self_dict); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_sql__util_cy); + Py_VISIT(traverse_module_state->__pyx_n_s_state); + Py_VISIT(traverse_module_state->__pyx_n_s_str); + Py_VISIT(traverse_module_state->__pyx_kp_s_stringsource); + Py_VISIT(traverse_module_state->__pyx_n_s_test); + Py_VISIT(traverse_module_state->__pyx_n_s_typing); + Py_VISIT(traverse_module_state->__pyx_n_s_update); + Py_VISIT(traverse_module_state->__pyx_n_s_use_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_util_typing); + Py_VISIT(traverse_module_state->__pyx_n_s_value); + Py_VISIT(traverse_module_state->__pyx_int_1); + Py_VISIT(traverse_module_state->__pyx_int_222419149); + Py_VISIT(traverse_module_state->__pyx_int_228825662); + Py_VISIT(traverse_module_state->__pyx_int_238750788); + Py_VISIT(traverse_module_state->__pyx_tuple__3); + Py_VISIT(traverse_module_state->__pyx_tuple__7); + Py_VISIT(traverse_module_state->__pyx_tuple__9); + Py_VISIT(traverse_module_state->__pyx_tuple__11); + Py_VISIT(traverse_module_state->__pyx_tuple__13); + Py_VISIT(traverse_module_state->__pyx_tuple__15); + Py_VISIT(traverse_module_state->__pyx_tuple__17); + Py_VISIT(traverse_module_state->__pyx_tuple__20); + Py_VISIT(traverse_module_state->__pyx_codeobj__6); + Py_VISIT(traverse_module_state->__pyx_codeobj__8); + Py_VISIT(traverse_module_state->__pyx_codeobj__10); + Py_VISIT(traverse_module_state->__pyx_codeobj__12); + Py_VISIT(traverse_module_state->__pyx_codeobj__14); + Py_VISIT(traverse_module_state->__pyx_codeobj__16); + Py_VISIT(traverse_module_state->__pyx_codeobj__18); + Py_VISIT(traverse_module_state->__pyx_codeobj__19); + Py_VISIT(traverse_module_state->__pyx_codeobj__21); + return 0; +} +#endif +/* #### Code section: module_state_defines ### */ +#define __pyx_d __pyx_mstate_global->__pyx_d +#define __pyx_b __pyx_mstate_global->__pyx_b +#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime +#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple +#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes +#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode +#ifdef __Pyx_CyFunction_USED +#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType +#endif +#ifdef __Pyx_FusedFunction_USED +#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType +#endif +#ifdef __Pyx_Generator_USED +#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType +#endif +#ifdef __Pyx_IterableCoroutine_USED +#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#define __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map __pyx_mstate_global->__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map +#define __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map __pyx_mstate_global->__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map +#endif +#define __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map __pyx_mstate_global->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map +#define __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map __pyx_mstate_global->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map +#define __pyx_kp_u_ __pyx_mstate_global->__pyx_kp_u_ +#define __pyx_n_s_CacheConst __pyx_mstate_global->__pyx_n_s_CacheConst +#define __pyx_n_s_Dict __pyx_mstate_global->__pyx_n_s_Dict +#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 +#define __pyx_n_s_Literal __pyx_mstate_global->__pyx_n_s_Literal +#define __pyx_kp_u_None __pyx_mstate_global->__pyx_kp_u_None +#define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError +#define __pyx_n_s_TYPE_CHECKING __pyx_mstate_global->__pyx_n_s_TYPE_CHECKING +#define __pyx_n_s_Tuple __pyx_mstate_global->__pyx_n_s_Tuple +#define __pyx_kp_s_Tuple_int_bool __pyx_mstate_global->__pyx_kp_s_Tuple_int_bool +#define __pyx_n_s_TypeError __pyx_mstate_global->__pyx_n_s_TypeError +#define __pyx_n_s_Union __pyx_mstate_global->__pyx_n_s_Union +#define __pyx_kp_s_Union_int_str_Literal_CacheConst __pyx_mstate_global->__pyx_kp_s_Union_int_str_Literal_CacheConst +#define __pyx_n_u__2 __pyx_mstate_global->__pyx_n_u__2 +#define __pyx_kp_u__4 __pyx_mstate_global->__pyx_kp_u__4 +#define __pyx_n_s__5 __pyx_mstate_global->__pyx_n_s__5 +#define __pyx_n_s_anon_map __pyx_mstate_global->__pyx_n_s_anon_map +#define __pyx_n_s_anon_map___missing __pyx_mstate_global->__pyx_n_s_anon_map___missing +#define __pyx_n_s_anon_map___reduce_cython __pyx_mstate_global->__pyx_n_s_anon_map___reduce_cython +#define __pyx_n_s_anon_map___setstate_cython __pyx_mstate_global->__pyx_n_s_anon_map___setstate_cython +#define __pyx_n_s_anon_map_get_anon __pyx_mstate_global->__pyx_n_s_anon_map_get_anon +#define __pyx_n_s_anonymous_counter __pyx_mstate_global->__pyx_n_s_anonymous_counter +#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines +#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool +#define __pyx_n_s_cache_key __pyx_mstate_global->__pyx_n_s_cache_key +#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback +#define __pyx_n_s_derived __pyx_mstate_global->__pyx_n_s_derived +#define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict +#define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 +#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable +#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable +#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc +#define __pyx_n_s_get __pyx_mstate_global->__pyx_n_s_get +#define __pyx_n_s_get_anon __pyx_mstate_global->__pyx_n_s_get_anon +#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate +#define __pyx_n_s_idself __pyx_mstate_global->__pyx_n_s_idself +#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import +#define __pyx_n_s_int __pyx_mstate_global->__pyx_n_s_int +#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled +#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine +#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled +#define __pyx_n_s_key __pyx_mstate_global->__pyx_n_s_key +#define __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py +#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main +#define __pyx_n_s_missing __pyx_mstate_global->__pyx_n_s_missing +#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name +#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new +#define __pyx_kp_s_no_default___reduce___due_to_non __pyx_mstate_global->__pyx_kp_s_no_default___reduce___due_to_non +#define __pyx_n_s_obj __pyx_mstate_global->__pyx_n_s_obj +#define __pyx_n_s_object __pyx_mstate_global->__pyx_n_s_object +#define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle +#define __pyx_n_s_prefix_anon_map __pyx_mstate_global->__pyx_n_s_prefix_anon_map +#define __pyx_n_s_prefix_anon_map___missing __pyx_mstate_global->__pyx_n_s_prefix_anon_map___missing +#define __pyx_n_s_prefix_anon_map___reduce_cython __pyx_mstate_global->__pyx_n_s_prefix_anon_map___reduce_cython +#define __pyx_n_s_prefix_anon_map___setstate_cytho __pyx_mstate_global->__pyx_n_s_prefix_anon_map___setstate_cytho +#define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError +#define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum +#define __pyx_n_s_pyx_result __pyx_mstate_global->__pyx_n_s_pyx_result +#define __pyx_n_s_pyx_state __pyx_mstate_global->__pyx_n_s_pyx_state +#define __pyx_n_s_pyx_type __pyx_mstate_global->__pyx_n_s_pyx_type +#define __pyx_n_s_pyx_unpickle_prefix_anon_map __pyx_mstate_global->__pyx_n_s_pyx_unpickle_prefix_anon_map +#define __pyx_n_s_pyx_vtable __pyx_mstate_global->__pyx_n_s_pyx_vtable +#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce +#define __pyx_n_s_reduce_cython __pyx_mstate_global->__pyx_n_s_reduce_cython +#define __pyx_n_s_reduce_ex __pyx_mstate_global->__pyx_n_s_reduce_ex +#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return +#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self +#define __pyx_n_s_self_dict __pyx_mstate_global->__pyx_n_s_self_dict +#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate +#define __pyx_n_s_setstate_cython __pyx_mstate_global->__pyx_n_s_setstate_cython +#define __pyx_n_s_sqlalchemy_sql__util_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_sql__util_cy +#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state +#define __pyx_n_s_str __pyx_mstate_global->__pyx_n_s_str +#define __pyx_kp_s_stringsource __pyx_mstate_global->__pyx_kp_s_stringsource +#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test +#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing +#define __pyx_n_s_update __pyx_mstate_global->__pyx_n_s_update +#define __pyx_n_s_use_setstate __pyx_mstate_global->__pyx_n_s_use_setstate +#define __pyx_n_s_util_typing __pyx_mstate_global->__pyx_n_s_util_typing +#define __pyx_n_s_value __pyx_mstate_global->__pyx_n_s_value +#define __pyx_int_1 __pyx_mstate_global->__pyx_int_1 +#define __pyx_int_222419149 __pyx_mstate_global->__pyx_int_222419149 +#define __pyx_int_228825662 __pyx_mstate_global->__pyx_int_228825662 +#define __pyx_int_238750788 __pyx_mstate_global->__pyx_int_238750788 +#define __pyx_tuple__3 __pyx_mstate_global->__pyx_tuple__3 +#define __pyx_tuple__7 __pyx_mstate_global->__pyx_tuple__7 +#define __pyx_tuple__9 __pyx_mstate_global->__pyx_tuple__9 +#define __pyx_tuple__11 __pyx_mstate_global->__pyx_tuple__11 +#define __pyx_tuple__13 __pyx_mstate_global->__pyx_tuple__13 +#define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 +#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 +#define __pyx_tuple__20 __pyx_mstate_global->__pyx_tuple__20 +#define __pyx_codeobj__6 __pyx_mstate_global->__pyx_codeobj__6 +#define __pyx_codeobj__8 __pyx_mstate_global->__pyx_codeobj__8 +#define __pyx_codeobj__10 __pyx_mstate_global->__pyx_codeobj__10 +#define __pyx_codeobj__12 __pyx_mstate_global->__pyx_codeobj__12 +#define __pyx_codeobj__14 __pyx_mstate_global->__pyx_codeobj__14 +#define __pyx_codeobj__16 __pyx_mstate_global->__pyx_codeobj__16 +#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 +#define __pyx_codeobj__19 __pyx_mstate_global->__pyx_codeobj__19 +#define __pyx_codeobj__21 __pyx_mstate_global->__pyx_codeobj__21 +/* #### Code section: module_code ### */ + +/* "sqlalchemy/sql/_util_cy.py":31 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +PyDoc_STRVAR(__pyx_doc_10sqlalchemy_3sql_8_util_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_3sql_8_util_cy__is_compiled}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy__is_compiled(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled", 1); + + /* "sqlalchemy/sql/_util_cy.py":33 + * def _is_compiled() -> bool: + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "sqlalchemy/sql/_util_cy.py":31 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/sql/_util_cy.py":57 + * """ + * + * def __missing__(self, key: str, /) -> str: # <<<<<<<<<<<<<< + * derived: str + * value: str + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__ = {"__missing__", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__missing__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_key), (&PyUnicode_Type), 0, "key", 1))) __PYX_ERR(1, 57, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map___missing__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *)__pyx_v_self), ((PyObject*)__pyx_v_key)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map___missing__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self, PyObject *__pyx_v_key) { + PyObject *__pyx_v_derived = 0; + PyObject *__pyx_v_value = 0; + PyObject *__pyx_v_self_dict = 0; + PyObject *__pyx_v_anonymous_counter = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + Py_UCS4 __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__missing__", 1); + + /* "sqlalchemy/sql/_util_cy.py":60 + * derived: str + * value: str + * self_dict: dict = self # type: ignore[type-arg] # <<<<<<<<<<<<<< + * + * derived = key.split(" ", 1)[1] + */ + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_v_self_dict = ((PyObject*)__pyx_v_self); + + /* "sqlalchemy/sql/_util_cy.py":62 + * self_dict: dict = self # type: ignore[type-arg] + * + * derived = key.split(" ", 1)[1] # <<<<<<<<<<<<<< + * + * anonymous_counter: int = self_dict.get(derived, 1) + */ + __pyx_t_1 = PyUnicode_Split(__pyx_v_key, __Pyx_NoneAsNull(__pyx_kp_u_), 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 62, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_t_1, 1, long, 1, __Pyx_PyInt_From_long, 1, 0, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 62, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!(likely(PyUnicode_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_t_2))) __PYX_ERR(1, 62, __pyx_L1_error) + __pyx_v_derived = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/sql/_util_cy.py":64 + * derived = key.split(" ", 1)[1] + * + * anonymous_counter: int = self_dict.get(derived, 1) # <<<<<<<<<<<<<< + * self_dict[derived] = anonymous_counter + 1 + * value = f"{derived}_{anonymous_counter}" + */ + if (unlikely(__pyx_v_self_dict == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); + __PYX_ERR(1, 64, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyDict_GetItemDefault(__pyx_v_self_dict, __pyx_v_derived, __pyx_int_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 64, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_2))) __PYX_ERR(1, 64, __pyx_L1_error) + __pyx_v_anonymous_counter = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/sql/_util_cy.py":65 + * + * anonymous_counter: int = self_dict.get(derived, 1) + * self_dict[derived] = anonymous_counter + 1 # <<<<<<<<<<<<<< + * value = f"{derived}_{anonymous_counter}" + * self_dict[key] = value + */ + __pyx_t_2 = PyNumber_Add(__pyx_v_anonymous_counter, __pyx_int_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely(__pyx_v_self_dict == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 65, __pyx_L1_error) + } + if (unlikely((PyDict_SetItem(__pyx_v_self_dict, __pyx_v_derived, __pyx_t_2) < 0))) __PYX_ERR(1, 65, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/sql/_util_cy.py":66 + * anonymous_counter: int = self_dict.get(derived, 1) + * self_dict[derived] = anonymous_counter + 1 + * value = f"{derived}_{anonymous_counter}" # <<<<<<<<<<<<<< + * self_dict[key] = value + * return value + */ + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = 0; + __pyx_t_4 = 127; + __pyx_t_1 = __Pyx_PyUnicode_Unicode(__pyx_v_derived); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) > __pyx_t_4) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) : __pyx_t_4; + __pyx_t_3 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1); + __pyx_t_1 = 0; + __Pyx_INCREF(__pyx_n_u__2); + __pyx_t_3 += 1; + __Pyx_GIVEREF(__pyx_n_u__2); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_n_u__2); + __pyx_t_1 = __Pyx_PyObject_FormatSimple(__pyx_v_anonymous_counter, __pyx_empty_unicode); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) > __pyx_t_4) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) : __pyx_t_4; + __pyx_t_3 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyUnicode_Join(__pyx_t_2, 3, __pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_value = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/sql/_util_cy.py":67 + * self_dict[derived] = anonymous_counter + 1 + * value = f"{derived}_{anonymous_counter}" + * self_dict[key] = value # <<<<<<<<<<<<<< + * return value + * + */ + if (unlikely(__pyx_v_self_dict == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 67, __pyx_L1_error) + } + if (unlikely((PyDict_SetItem(__pyx_v_self_dict, __pyx_v_key, __pyx_v_value) < 0))) __PYX_ERR(1, 67, __pyx_L1_error) + + /* "sqlalchemy/sql/_util_cy.py":68 + * value = f"{derived}_{anonymous_counter}" + * self_dict[key] = value + * return value # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_value); + __pyx_r = __pyx_v_value; + goto __pyx_L0; + + /* "sqlalchemy/sql/_util_cy.py":57 + * """ + * + * def __missing__(self, key: str, /) -> str: # <<<<<<<<<<<<<< + * derived: str + * value: str + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.prefix_anon_map.__missing__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_derived); + __Pyx_XDECREF(__pyx_v_value); + __Pyx_XDECREF(__pyx_v_self_dict); + __Pyx_XDECREF(__pyx_v_anonymous_counter); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_2__reduce_cython__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_2__reduce_cython__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 1); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = () # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __Pyx_INCREF(__pyx_empty_tuple); + __pyx_v_state = __pyx_empty_tuple; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = () + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = () + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(0, 8, __pyx_L1_error); + __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = False + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = () + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = False # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state + */ + /*else*/ { + __pyx_v_use_setstate = 0; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = False + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state + * else: + */ + if (__pyx_v_use_setstate) { + + /* "(tree fragment)":13 + * use_setstate = False + * if use_setstate: + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_prefix_anon_map); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(0, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_238750788); + __Pyx_GIVEREF(__pyx_int_238750788); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_238750788)) __PYX_ERR(0, 13, __pyx_L1_error); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(0, 13, __pyx_L1_error); + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state)) __PYX_ERR(0, 13, __pyx_L1_error); + __pyx_t_3 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = False + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state + * else: + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_prefix_anon_map); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(0, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_238750788); + __Pyx_GIVEREF(__pyx_int_238750788); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_238750788)) __PYX_ERR(0, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(0, 15, __pyx_L1_error); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4)) __PYX_ERR(0, 15, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.prefix_anon_map.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 16, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(0, 16, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v___pyx_state = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 16, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.prefix_anon_map.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_4__setstate_cython__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *)__pyx_v_self), __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_4__setstate_cython__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 1); + + /* "(tree fragment)":17 + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(0, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_3sql_8_util_cy___pyx_unpickle_prefix_anon_map__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.prefix_anon_map.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/sql/_util_cy.py":92 + * _index: cython.uint + * + * def __cinit__(self): # type: ignore[no-untyped-def] # <<<<<<<<<<<<<< + * self._index = 0 + * + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, __pyx_nargs); return -1;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map___cinit__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map___cinit__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self) { + int __pyx_r; + + /* "sqlalchemy/sql/_util_cy.py":93 + * + * def __cinit__(self): # type: ignore[no-untyped-def] + * self._index = 0 # <<<<<<<<<<<<<< + * + * else: + */ + __pyx_v_self->_index = 0; + + /* "sqlalchemy/sql/_util_cy.py":92 + * _index: cython.uint + * + * def __cinit__(self): # type: ignore[no-untyped-def] # <<<<<<<<<<<<<< + * self._index = 0 + * + */ + + /* function exit code */ + __pyx_r = 0; + return __pyx_r; +} + +/* "sqlalchemy/sql/_util_cy.py":98 + * _index: int = 0 # type: ignore[no-redef] + * + * @cython.cfunc # type:ignore[misc] # <<<<<<<<<<<<<< + * @cython.inline # type:ignore[misc] + * def _add_missing( + */ + +static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key) { + PyObject *__pyx_v_val = 0; + PyObject *__pyx_v_self_dict = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_add_missing", 1); + + /* "sqlalchemy/sql/_util_cy.py":103 + * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / + * ) -> int: + * val: int = self._index # <<<<<<<<<<<<<< + * self._index += 1 + * self_dict: dict = self # type: ignore[type-arg] + */ + __pyx_t_1 = __Pyx_PyInt_From_unsigned_int(__pyx_v_self->_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 103, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_1)) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_1))) __PYX_ERR(1, 103, __pyx_L1_error) + __pyx_v_val = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/sql/_util_cy.py":104 + * ) -> int: + * val: int = self._index + * self._index += 1 # <<<<<<<<<<<<<< + * self_dict: dict = self # type: ignore[type-arg] + * self_dict[key] = val + */ + __pyx_v_self->_index = (__pyx_v_self->_index + 1); + + /* "sqlalchemy/sql/_util_cy.py":105 + * val: int = self._index + * self._index += 1 + * self_dict: dict = self # type: ignore[type-arg] # <<<<<<<<<<<<<< + * self_dict[key] = val + * return val + */ + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_v_self_dict = ((PyObject*)__pyx_v_self); + + /* "sqlalchemy/sql/_util_cy.py":106 + * self._index += 1 + * self_dict: dict = self # type: ignore[type-arg] + * self_dict[key] = val # <<<<<<<<<<<<<< + * return val + * + */ + if (unlikely((PyDict_SetItem(__pyx_v_self_dict, __pyx_v_key, __pyx_v_val) < 0))) __PYX_ERR(1, 106, __pyx_L1_error) + + /* "sqlalchemy/sql/_util_cy.py":107 + * self_dict: dict = self # type: ignore[type-arg] + * self_dict[key] = val + * return val # <<<<<<<<<<<<<< + * + * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_val); + __pyx_r = __pyx_v_val; + goto __pyx_L0; + + /* "sqlalchemy/sql/_util_cy.py":98 + * _index: int = 0 # type: ignore[no-redef] + * + * @cython.cfunc # type:ignore[misc] # <<<<<<<<<<<<<< + * @cython.inline # type:ignore[misc] + * def _add_missing( + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map._add_missing", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_val); + __Pyx_XDECREF(__pyx_v_self_dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/sql/_util_cy.py":109 + * return val + * + * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: # <<<<<<<<<<<<<< + * self_dict: dict = self # type: ignore[type-arg] + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon(PyObject *__pyx_v_self, PyObject *__pyx_v_obj); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon = {"get_anon", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon(PyObject *__pyx_v_self, PyObject *__pyx_v_obj) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_anon (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "self"); __PYX_ERR(1, 109, __pyx_L1_error) + } + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_2get_anon(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self), ((PyObject *)__pyx_v_obj)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_2get_anon(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_obj) { + PyObject *__pyx_v_self_dict = 0; + PyObject *__pyx_v_idself = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + unsigned PY_LONG_LONG __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_anon", 1); + + /* "sqlalchemy/sql/_util_cy.py":110 + * + * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: + * self_dict: dict = self # type: ignore[type-arg] # <<<<<<<<<<<<<< + * + * idself: int = _get_id(obj) + */ + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_v_self_dict = ((PyObject*)__pyx_v_self); + + /* "sqlalchemy/sql/_util_cy.py":112 + * self_dict: dict = self # type: ignore[type-arg] + * + * idself: int = _get_id(obj) # <<<<<<<<<<<<<< + * if idself in self_dict: + * return self_dict[idself], True + */ + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_obj); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 112, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 112, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_2)) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_2))) __PYX_ERR(1, 112, __pyx_L1_error) + __pyx_v_idself = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/sql/_util_cy.py":113 + * + * idself: int = _get_id(obj) + * if idself in self_dict: # <<<<<<<<<<<<<< + * return self_dict[idself], True + * else: + */ + __pyx_t_3 = (__Pyx_PyDict_ContainsTF(__pyx_v_idself, __pyx_v_self_dict, Py_EQ)); if (unlikely((__pyx_t_3 < 0))) __PYX_ERR(1, 113, __pyx_L1_error) + if (__pyx_t_3) { + + /* "sqlalchemy/sql/_util_cy.py":114 + * idself: int = _get_id(obj) + * if idself in self_dict: + * return self_dict[idself], True # <<<<<<<<<<<<<< + * else: + * return self._add_missing(idself), False + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyDict_GetItem(__pyx_v_self_dict, __pyx_v_idself); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 114, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 114, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_2); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2)) __PYX_ERR(1, 114, __pyx_L1_error); + __Pyx_INCREF(Py_True); + __Pyx_GIVEREF(Py_True); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, Py_True)) __PYX_ERR(1, 114, __pyx_L1_error); + __pyx_t_2 = 0; + __pyx_r = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "sqlalchemy/sql/_util_cy.py":113 + * + * idself: int = _get_id(obj) + * if idself in self_dict: # <<<<<<<<<<<<<< + * return self_dict[idself], True + * else: + */ + } + + /* "sqlalchemy/sql/_util_cy.py":116 + * return self_dict[idself], True + * else: + * return self._add_missing(idself), False # <<<<<<<<<<<<<< + * + * if cython.compiled: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(__pyx_v_self, __pyx_v_idself); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_4); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_4)) __PYX_ERR(1, 116, __pyx_L1_error); + __Pyx_INCREF(Py_False); + __Pyx_GIVEREF(Py_False); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 1, Py_False)) __PYX_ERR(1, 116, __pyx_L1_error); + __pyx_t_4 = 0; + __pyx_r = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + goto __pyx_L0; + } + + /* "sqlalchemy/sql/_util_cy.py":109 + * return val + * + * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: # <<<<<<<<<<<<<< + * self_dict: dict = self # type: ignore[type-arg] + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.get_anon", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_self_dict); + __Pyx_XDECREF(__pyx_v_idself); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/sql/_util_cy.py":120 + * if cython.compiled: + * + * def __getitem__( # <<<<<<<<<<<<<< + * self: anon_map, + * key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "self"); __PYX_ERR(1, 121, __pyx_L1_error) + } + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_4__getitem__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self), ((PyObject *)__pyx_v_key)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_4__getitem__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key) { + PyObject *__pyx_v_self_dict = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getitem__", 1); + + /* "sqlalchemy/sql/_util_cy.py":125 + * /, + * ) -> Union[int, Literal[True]]: + * self_dict: dict = self # type: ignore[type-arg] # <<<<<<<<<<<<<< + * + * if key in self_dict: + */ + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_v_self_dict = ((PyObject*)__pyx_v_self); + + /* "sqlalchemy/sql/_util_cy.py":127 + * self_dict: dict = self # type: ignore[type-arg] + * + * if key in self_dict: # <<<<<<<<<<<<<< + * return self_dict[key] # type:ignore[no-any-return] + * else: + */ + __pyx_t_1 = (__Pyx_PyDict_ContainsTF(__pyx_v_key, __pyx_v_self_dict, Py_EQ)); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(1, 127, __pyx_L1_error) + if (__pyx_t_1) { + + /* "sqlalchemy/sql/_util_cy.py":128 + * + * if key in self_dict: + * return self_dict[key] # type:ignore[no-any-return] # <<<<<<<<<<<<<< + * else: + * return self._add_missing(key) # type:ignore[no-any-return] + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyDict_GetItem(__pyx_v_self_dict, __pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 128, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/sql/_util_cy.py":127 + * self_dict: dict = self # type: ignore[type-arg] + * + * if key in self_dict: # <<<<<<<<<<<<<< + * return self_dict[key] # type:ignore[no-any-return] + * else: + */ + } + + /* "sqlalchemy/sql/_util_cy.py":130 + * return self_dict[key] # type:ignore[no-any-return] + * else: + * return self._add_missing(key) # type:ignore[no-any-return] # <<<<<<<<<<<<<< + * + * def __missing__( + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + } + + /* "sqlalchemy/sql/_util_cy.py":120 + * if cython.compiled: + * + * def __getitem__( # <<<<<<<<<<<<<< + * self: anon_map, + * key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_self_dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/sql/_util_cy.py":132 + * return self._add_missing(key) # type:ignore[no-any-return] + * + * def __missing__( # <<<<<<<<<<<<<< + * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / + * ) -> int: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__ = {"__missing__", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__missing__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "self"); __PYX_ERR(1, 133, __pyx_L1_error) + } + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_6__missing__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self), ((PyObject *)__pyx_v_key)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_6__missing__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__missing__", 1); + + /* "sqlalchemy/sql/_util_cy.py":135 + * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / + * ) -> int: + * return self._add_missing(key) # type:ignore[no-any-return] # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/sql/_util_cy.py":132 + * return self._add_missing(key) # type:ignore[no-any-return] + * + * def __missing__( # <<<<<<<<<<<<<< + * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / + * ) -> int: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__missing__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_8__reduce_cython__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_8__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 1); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + */ + __Pyx_Raise(__pyx_builtin_TypeError, __pyx_kp_s_no_default___reduce___due_to_non, 0, 0); + __PYX_ERR(0, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + CYTHON_UNUSED PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 3, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(0, 3, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v___pyx_state = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 3, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_10__setstate_cython__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self), __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_10__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 1); + + /* "(tree fragment)":4 + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" # <<<<<<<<<<<<<< + */ + __Pyx_Raise(__pyx_builtin_TypeError, __pyx_kp_s_no_default___reduce___due_to_non, 0, 0); + __PYX_ERR(0, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_prefix_anon_map(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map = {"__pyx_unpickle_prefix_anon_map", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[3] = {0,0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_prefix_anon_map (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_prefix_anon_map", 1, 3, 3, 1); __PYX_ERR(0, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_prefix_anon_map", 1, 3, 3, 2); __PYX_ERR(0, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_prefix_anon_map") < 0)) __PYX_ERR(0, 1, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 3)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_prefix_anon_map", 1, 3, 3, __pyx_nargs); __PYX_ERR(0, 1, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.__pyx_unpickle_prefix_anon_map", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_2__pyx_unpickle_prefix_anon_map(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_2__pyx_unpickle_prefix_anon_map(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_prefix_anon_map", 1); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__3, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + * __pyx_result = prefix_anon_map.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(0, 5, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum # <<<<<<<<<<<<<< + * __pyx_result = prefix_anon_map.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + * __pyx_result = prefix_anon_map.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_v___pyx_result = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + * __pyx_result = prefix_anon_map.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_2 = (__pyx_v___pyx_state != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = prefix_anon_map.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(0, 9, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_3sql_8_util_cy___pyx_unpickle_prefix_anon_map__set_state(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + * __pyx_result = prefix_anon_map.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_prefix_anon_map(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.__pyx_unpickle_prefix_anon_map", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[0]) + */ + +static PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy___pyx_unpickle_prefix_anon_map__set_state(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + Py_ssize_t __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + unsigned int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_prefix_anon_map__set_state", 1); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[0]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 12, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 12, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 > 0); + if (__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 12, __pyx_L1_error) + __pyx_t_1 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + if (__pyx_t_1) { + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[0]) # <<<<<<<<<<<<<< + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 13, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[0]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[0]) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("sqlalchemy.sql._util_cy.__pyx_unpickle_prefix_anon_map__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_prefix_anon_map(PyObject *o, visitproc v, void *a) { + int e; + if (!(&PyDict_Type)->tp_traverse); else { e = (&PyDict_Type)->tp_traverse(o,v,a); if (e) return e; } + return 0; +} + +static int __pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_prefix_anon_map(PyObject *o) { + if (!(&PyDict_Type)->tp_clear); else (&PyDict_Type)->tp_clear(o); + return 0; +} + +static PyMethodDef __pyx_methods_10sqlalchemy_3sql_8_util_cy_prefix_anon_map[] = { + {"__missing__", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__, METH_O, 0}, + {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {0, 0, 0, 0} +}; +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_slots[] = { + {Py_tp_doc, (void *)PyDoc_STR("A map that creates new keys for missing key access.\n\n Considers keys of the form \" \" to produce\n new symbols \"_\", where \"index\" is an incrementing integer\n corresponding to .\n\n Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which\n is otherwise usually used for this type of operation.\n\n ")}, + {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_prefix_anon_map}, + {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_prefix_anon_map}, + {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_3sql_8_util_cy_prefix_anon_map}, + {0, 0}, +}; +static PyType_Spec __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_spec = { + "sqlalchemy.sql._util_cy.prefix_anon_map", + sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map), + 0, + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, + __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_slots, +}; +#else + +static PyTypeObject __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map = { + PyVarObject_HEAD_INIT(0, 0) + "sqlalchemy.sql._util_cy.""prefix_anon_map", /*tp_name*/ + sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + 0, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ + PyDoc_STR("A map that creates new keys for missing key access.\n\n Considers keys of the form \" \" to produce\n new symbols \"_\", where \"index\" is an incrementing integer\n corresponding to .\n\n Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which\n is otherwise usually used for this type of operation.\n\n "), /*tp_doc*/ + __pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, /*tp_traverse*/ + __pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + #if !CYTHON_USE_TYPE_SPECS + 0, /*tp_dictoffset*/ + #endif + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + #if CYTHON_USE_TP_FINALIZE + 0, /*tp_finalize*/ + #else + NULL, /*tp_finalize*/ + #endif + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if __PYX_NEED_TP_PRINT_SLOT == 1 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030C0000 + 0, /*tp_watched*/ + #endif + #if PY_VERSION_HEX >= 0x030d00A4 + 0, /*tp_versions_used*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, /*tp_pypy_flags*/ + #endif +}; +#endif +static struct __pyx_vtabstruct_10sqlalchemy_3sql_8_util_cy_anon_map __pyx_vtable_10sqlalchemy_3sql_8_util_cy_anon_map; + +static PyObject *__pyx_tp_new_10sqlalchemy_3sql_8_util_cy_anon_map(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *p; + PyObject *o = __Pyx_PyType_GetSlot((&PyDict_Type), tp_new, newfunc)(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)o); + p->__pyx_vtab = __pyx_vtabptr_10sqlalchemy_3sql_8_util_cy_anon_map; + if (unlikely(__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static int __pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_anon_map(PyObject *o, visitproc v, void *a) { + int e; + if (!(&PyDict_Type)->tp_traverse); else { e = (&PyDict_Type)->tp_traverse(o,v,a); if (e) return e; } + return 0; +} + +static int __pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_anon_map(PyObject *o) { + if (!(&PyDict_Type)->tp_clear); else (&PyDict_Type)->tp_clear(o); + return 0; +} +static PyObject *__pyx_sq_item_10sqlalchemy_3sql_8_util_cy_anon_map(PyObject *o, Py_ssize_t i) { + PyObject *r; + PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; + r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); + Py_DECREF(x); + return r; +} + +static PyMethodDef __pyx_methods_10sqlalchemy_3sql_8_util_cy_anon_map[] = { + {"get_anon", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon, METH_O, 0}, + {"__missing__", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__, METH_O, 0}, + {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {0, 0, 0, 0} +}; +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_slots[] = { + {Py_sq_item, (void *)__pyx_sq_item_10sqlalchemy_3sql_8_util_cy_anon_map}, + {Py_mp_subscript, (void *)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_5__getitem__}, + {Py_tp_doc, (void *)PyDoc_STR("A map that creates new keys for missing key access.\n\n Produces an incrementing sequence given a series of unique keys.\n\n This is similar to the compiler prefix_anon_map class although simpler.\n\n Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which\n is otherwise usually used for this type of operation.\n\n ")}, + {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_anon_map}, + {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_anon_map}, + {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_3sql_8_util_cy_anon_map}, + {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_3sql_8_util_cy_anon_map}, + {0, 0}, +}; +static PyType_Spec __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_spec = { + "sqlalchemy.sql._util_cy.anon_map", + sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map), + 0, + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, + __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_slots, +}; +#else + +static PySequenceMethods __pyx_tp_as_sequence_anon_map = { + 0, /*sq_length*/ + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + __pyx_sq_item_10sqlalchemy_3sql_8_util_cy_anon_map, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + 0, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyMappingMethods __pyx_tp_as_mapping_anon_map = { + 0, /*mp_length*/ + __pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_5__getitem__, /*mp_subscript*/ + 0, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map = { + PyVarObject_HEAD_INIT(0, 0) + "sqlalchemy.sql._util_cy.""anon_map", /*tp_name*/ + sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + 0, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + &__pyx_tp_as_sequence_anon_map, /*tp_as_sequence*/ + &__pyx_tp_as_mapping_anon_map, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ + PyDoc_STR("A map that creates new keys for missing key access.\n\n Produces an incrementing sequence given a series of unique keys.\n\n This is similar to the compiler prefix_anon_map class although simpler.\n\n Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which\n is otherwise usually used for this type of operation.\n\n "), /*tp_doc*/ + __pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_anon_map, /*tp_traverse*/ + __pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_anon_map, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_10sqlalchemy_3sql_8_util_cy_anon_map, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + #if !CYTHON_USE_TYPE_SPECS + 0, /*tp_dictoffset*/ + #endif + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_10sqlalchemy_3sql_8_util_cy_anon_map, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + #if CYTHON_USE_TP_FINALIZE + 0, /*tp_finalize*/ + #else + NULL, /*tp_finalize*/ + #endif + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if __PYX_NEED_TP_PRINT_SLOT == 1 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030C0000 + 0, /*tp_watched*/ + #endif + #if PY_VERSION_HEX >= 0x030d00A4 + 0, /*tp_versions_used*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, /*tp_pypy_flags*/ + #endif +}; +#endif + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif +/* #### Code section: pystring_table ### */ + +static int __Pyx_CreateStringTabAndInitStrings(void) { + __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, + {&__pyx_n_s_CacheConst, __pyx_k_CacheConst, sizeof(__pyx_k_CacheConst), 0, 0, 1, 1}, + {&__pyx_n_s_Dict, __pyx_k_Dict, sizeof(__pyx_k_Dict), 0, 0, 1, 1}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, + {&__pyx_n_s_Literal, __pyx_k_Literal, sizeof(__pyx_k_Literal), 0, 0, 1, 1}, + {&__pyx_kp_u_None, __pyx_k_None, sizeof(__pyx_k_None), 0, 1, 0, 0}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_TYPE_CHECKING, __pyx_k_TYPE_CHECKING, sizeof(__pyx_k_TYPE_CHECKING), 0, 0, 1, 1}, + {&__pyx_n_s_Tuple, __pyx_k_Tuple, sizeof(__pyx_k_Tuple), 0, 0, 1, 1}, + {&__pyx_kp_s_Tuple_int_bool, __pyx_k_Tuple_int_bool, sizeof(__pyx_k_Tuple_int_bool), 0, 0, 1, 0}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_Union, __pyx_k_Union, sizeof(__pyx_k_Union), 0, 0, 1, 1}, + {&__pyx_kp_s_Union_int_str_Literal_CacheConst, __pyx_k_Union_int_str_Literal_CacheConst, sizeof(__pyx_k_Union_int_str_Literal_CacheConst), 0, 0, 1, 0}, + {&__pyx_n_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 1}, + {&__pyx_kp_u__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 1, 0, 0}, + {&__pyx_n_s__5, __pyx_k__5, sizeof(__pyx_k__5), 0, 0, 1, 1}, + {&__pyx_n_s_anon_map, __pyx_k_anon_map, sizeof(__pyx_k_anon_map), 0, 0, 1, 1}, + {&__pyx_n_s_anon_map___missing, __pyx_k_anon_map___missing, sizeof(__pyx_k_anon_map___missing), 0, 0, 1, 1}, + {&__pyx_n_s_anon_map___reduce_cython, __pyx_k_anon_map___reduce_cython, sizeof(__pyx_k_anon_map___reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_anon_map___setstate_cython, __pyx_k_anon_map___setstate_cython, sizeof(__pyx_k_anon_map___setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_anon_map_get_anon, __pyx_k_anon_map_get_anon, sizeof(__pyx_k_anon_map_get_anon), 0, 0, 1, 1}, + {&__pyx_n_s_anonymous_counter, __pyx_k_anonymous_counter, sizeof(__pyx_k_anonymous_counter), 0, 0, 1, 1}, + {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, + {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, + {&__pyx_n_s_cache_key, __pyx_k_cache_key, sizeof(__pyx_k_cache_key), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_derived, __pyx_k_derived, sizeof(__pyx_k_derived), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, + {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, + {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, + {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, + {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, + {&__pyx_n_s_get_anon, __pyx_k_get_anon, sizeof(__pyx_k_get_anon), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_idself, __pyx_k_idself, sizeof(__pyx_k_idself), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_int, __pyx_k_int, sizeof(__pyx_k_int), 0, 0, 1, 1}, + {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, + {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, + {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, + {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, + {&__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_k_lib_sqlalchemy_sql__util_cy_py, sizeof(__pyx_k_lib_sqlalchemy_sql__util_cy_py), 0, 0, 1, 0}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_missing, __pyx_k_missing, sizeof(__pyx_k_missing), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0}, + {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1}, + {&__pyx_n_s_object, __pyx_k_object, sizeof(__pyx_k_object), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_prefix_anon_map, __pyx_k_prefix_anon_map, sizeof(__pyx_k_prefix_anon_map), 0, 0, 1, 1}, + {&__pyx_n_s_prefix_anon_map___missing, __pyx_k_prefix_anon_map___missing, sizeof(__pyx_k_prefix_anon_map___missing), 0, 0, 1, 1}, + {&__pyx_n_s_prefix_anon_map___reduce_cython, __pyx_k_prefix_anon_map___reduce_cython, sizeof(__pyx_k_prefix_anon_map___reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_prefix_anon_map___setstate_cytho, __pyx_k_prefix_anon_map___setstate_cytho, sizeof(__pyx_k_prefix_anon_map___setstate_cytho), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_prefix_anon_map, __pyx_k_pyx_unpickle_prefix_anon_map, sizeof(__pyx_k_pyx_unpickle_prefix_anon_map), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, + {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, + {&__pyx_n_s_self_dict, __pyx_k_self_dict, sizeof(__pyx_k_self_dict), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_sqlalchemy_sql__util_cy, __pyx_k_sqlalchemy_sql__util_cy, sizeof(__pyx_k_sqlalchemy_sql__util_cy), 0, 0, 1, 1}, + {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, + {&__pyx_n_s_str, __pyx_k_str, sizeof(__pyx_k_str), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_n_s_use_setstate, __pyx_k_use_setstate, sizeof(__pyx_k_use_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_util_typing, __pyx_k_util_typing, sizeof(__pyx_k_util_typing), 0, 0, 1, 1}, + {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} + }; + return __Pyx_InitStrings(__pyx_string_tab); +} +/* #### Code section: cached_builtins ### */ +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 2, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: cached_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + */ + __pyx_tuple__3 = PyTuple_Pack(3, __pyx_int_238750788, __pyx_int_228825662, __pyx_int_222419149); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "sqlalchemy/sql/_util_cy.py":31 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_codeobj__6 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_n_s_is_compiled, 31, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__6)) __PYX_ERR(1, 31, __pyx_L1_error) + + /* "sqlalchemy/sql/_util_cy.py":57 + * """ + * + * def __missing__(self, key: str, /) -> str: # <<<<<<<<<<<<<< + * derived: str + * value: str + */ + __pyx_tuple__7 = PyTuple_Pack(6, __pyx_n_s_self, __pyx_n_s_key, __pyx_n_s_derived, __pyx_n_s_value, __pyx_n_s_self_dict, __pyx_n_s_anonymous_counter); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__7, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_n_s_missing, 57, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(1, 57, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_tuple__9 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__9, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 1, __pyx_L1_error) + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) + */ + __pyx_tuple__11 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); + __pyx_codeobj__12 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__11, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__12)) __PYX_ERR(0, 16, __pyx_L1_error) + + /* "sqlalchemy/sql/_util_cy.py":109 + * return val + * + * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: # <<<<<<<<<<<<<< + * self_dict: dict = self # type: ignore[type-arg] + * + */ + __pyx_tuple__13 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_obj, __pyx_n_s_self_dict, __pyx_n_s_idself); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(1, 109, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_n_s_get_anon, 109, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(1, 109, __pyx_L1_error) + + /* "sqlalchemy/sql/_util_cy.py":132 + * return self._add_missing(key) # type:ignore[no-any-return] + * + * def __missing__( # <<<<<<<<<<<<<< + * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / + * ) -> int: + */ + __pyx_tuple__15 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_key); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_n_s_missing, 132, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(1, 132, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): + */ + __pyx_tuple__17 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__17); + __Pyx_GIVEREF(__pyx_tuple__17); + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 1, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + */ + __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__11, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 3, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __pyx_unpickle_prefix_anon_map(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__20 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__20); + __Pyx_GIVEREF(__pyx_tuple__20); + __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_prefix_anon_map, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} +/* #### Code section: init_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { + __pyx_umethod_PyDict_Type_get.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyDict_Type_get.method_name = &__pyx_n_s_get; + if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(1, 1, __pyx_L1_error); + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_int_222419149 = PyInt_FromLong(222419149L); if (unlikely(!__pyx_int_222419149)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_int_228825662 = PyInt_FromLong(228825662L); if (unlikely(!__pyx_int_228825662)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_int_238750788 = PyInt_FromLong(238750788L); if (unlikely(!__pyx_int_238750788)) __PYX_ERR(1, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: init_globals ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + return 0; +} +/* #### Code section: init_module ### */ + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + #if CYTHON_USE_TYPE_SPECS + __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PyDict_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_spec, __pyx_t_1); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map)) __PYX_ERR(1, 45, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_spec, __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) < 0) __PYX_ERR(1, 45, __pyx_L1_error) + #else + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map = &__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map; + #endif + if (sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) != sizeof(PyDictObject)) { + if (__Pyx_validate_extern_base((&PyDict_Type)) < 0) __PYX_ERR(1, 45, __pyx_L1_error) + } + #if !CYTHON_COMPILING_IN_LIMITED_API + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_dealloc = (&PyDict_Type)->tp_dealloc; + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_base = (&PyDict_Type); + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_new = (&PyDict_Type)->tp_new; + #endif + #if !CYTHON_USE_TYPE_SPECS + if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) < 0) __PYX_ERR(1, 45, __pyx_L1_error) + #endif + #if PY_MAJOR_VERSION < 3 + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_print = 0; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_dictoffset && __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_getattro == PyObject_GenericGetAttr)) { + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_prefix_anon_map, (PyObject *) __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) < 0) __PYX_ERR(1, 45, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) < 0) __PYX_ERR(1, 45, __pyx_L1_error) + #endif + __pyx_vtabptr_10sqlalchemy_3sql_8_util_cy_anon_map = &__pyx_vtable_10sqlalchemy_3sql_8_util_cy_anon_map; + __pyx_vtable_10sqlalchemy_3sql_8_util_cy_anon_map._add_missing = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *, PyObject *))__pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing; + #if CYTHON_USE_TYPE_SPECS + __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PyDict_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 72, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_spec, __pyx_t_1); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map)) __PYX_ERR(1, 72, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_spec, __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) + #else + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map = &__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map; + #endif + if (sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map) != sizeof(PyDictObject)) { + if (__Pyx_validate_extern_base((&PyDict_Type)) < 0) __PYX_ERR(1, 72, __pyx_L1_error) + } + #if !CYTHON_COMPILING_IN_LIMITED_API + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_dealloc = (&PyDict_Type)->tp_dealloc; + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_base = (&PyDict_Type); + #endif + #if !CYTHON_USE_TYPE_SPECS + if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) + #endif + #if PY_MAJOR_VERSION < 3 + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_print = 0; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_dictoffset && __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_getattro == PyObject_GenericGetAttr)) { + __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #endif + if (__Pyx_SetVtable(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map, __pyx_vtabptr_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_MergeVtables(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_anon_map, (PyObject *) __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) + #endif + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __pyx_t_1 = PyImport_ImportModule("sqlalchemy.util._collections_cy"); if (!__pyx_t_1) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_ImportFunction_3_0_11(__pyx_t_1, "_get_id", (void (**)(void))&__pyx_f_10sqlalchemy_4util_15_collections_cy__get_id, "unsigned PY_LONG_LONG (PyObject *)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__util_cy(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__util_cy}, + {0, NULL} +}; +#endif + +#ifdef __cplusplus +namespace { + struct PyModuleDef __pyx_moduledef = + #else + static struct PyModuleDef __pyx_moduledef = + #endif + { + PyModuleDef_HEAD_INIT, + "_util_cy", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #elif CYTHON_USE_MODULE_STATE + sizeof(__pyx_mstate), /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + #if CYTHON_USE_MODULE_STATE + __pyx_m_traverse, /* m_traverse */ + __pyx_m_clear, /* m_clear */ + NULL /* m_free */ + #else + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ + #endif + }; + #ifdef __cplusplus +} /* anonymous namespace */ +#endif +#endif + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_util_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_util_cy(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__util_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__util_cy(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) +#else +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) +#endif +{ + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { +#if CYTHON_COMPILING_IN_LIMITED_API + result = PyModule_AddObject(module, to_name, value); +#else + result = PyDict_SetItemString(moddict, to_name, value); +#endif + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + CYTHON_UNUSED_VAR(def); + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + moddict = module; +#else + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; +#endif + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__util_cy(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + int stringtab_initialized = 0; + #if CYTHON_USE_MODULE_STATE + int pystate_addmodule_run = 0; + #endif + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_util_cy' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_util_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + if (unlikely(!__pyx_m)) __PYX_ERR(1, 1, __pyx_L1_error) + #elif CYTHON_USE_MODULE_STATE + __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) + { + int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_util_cy" pseudovariable */ + if (unlikely((add_module_result < 0))) __PYX_ERR(1, 1, __pyx_L1_error) + pystate_addmodule_run = 1; + } + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + if (unlikely(!__pyx_m)) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #endif + CYTHON_UNUSED_VAR(__pyx_t_1); + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(1, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(1, 1, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__util_cy(void)", 0); + if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(1, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitConstants() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + stringtab_initialized = 1; + if (__Pyx_InitGlobals() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_sqlalchemy__sql___util_cy) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(1, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "sqlalchemy.sql._util_cy")) { + if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.sql._util_cy", __pyx_m) < 0))) __PYX_ERR(1, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(1, 1, __pyx_L1_error) + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + if (unlikely((__Pyx_modinit_function_import_code() < 0))) __PYX_ERR(1, 1, __pyx_L1_error) + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + + /* "sqlalchemy/sql/_util_cy.py":10 + * from __future__ import annotations + * + * from typing import Dict # <<<<<<<<<<<<<< + * from typing import Tuple + * from typing import TYPE_CHECKING + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Dict); + __Pyx_GIVEREF(__pyx_n_s_Dict); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Dict)) __PYX_ERR(1, 10, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Dict); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Dict, __pyx_t_2) < 0) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/sql/_util_cy.py":11 + * + * from typing import Dict + * from typing import Tuple # <<<<<<<<<<<<<< + * from typing import TYPE_CHECKING + * from typing import Union + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Tuple); + __Pyx_GIVEREF(__pyx_n_s_Tuple); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Tuple)) __PYX_ERR(1, 11, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Tuple); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Tuple, __pyx_t_3) < 0) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/sql/_util_cy.py":12 + * from typing import Dict + * from typing import Tuple + * from typing import TYPE_CHECKING # <<<<<<<<<<<<<< + * from typing import Union + * + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_TYPE_CHECKING); + __Pyx_GIVEREF(__pyx_n_s_TYPE_CHECKING); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_TYPE_CHECKING)) __PYX_ERR(1, 12, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TYPE_CHECKING, __pyx_t_2) < 0) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/sql/_util_cy.py":13 + * from typing import Tuple + * from typing import TYPE_CHECKING + * from typing import Union # <<<<<<<<<<<<<< + * + * from ..util.typing import Literal + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Union); + __Pyx_GIVEREF(__pyx_n_s_Union); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Union)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Union); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Union, __pyx_t_3) < 0) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/sql/_util_cy.py":15 + * from typing import Union + * + * from ..util.typing import Literal # <<<<<<<<<<<<<< + * + * if TYPE_CHECKING: + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Literal); + __Pyx_GIVEREF(__pyx_n_s_Literal); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Literal)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_util_typing, __pyx_t_2, 2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Literal); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Literal, __pyx_t_2) < 0) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/sql/_util_cy.py":17 + * from ..util.typing import Literal + * + * if TYPE_CHECKING: # <<<<<<<<<<<<<< + * from .cache_key import CacheConst + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_4 < 0))) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_4) { + + /* "sqlalchemy/sql/_util_cy.py":18 + * + * if TYPE_CHECKING: + * from .cache_key import CacheConst # <<<<<<<<<<<<<< + * + * # START GENERATED CYTHON IMPORT + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_CacheConst); + __Pyx_GIVEREF(__pyx_n_s_CacheConst); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_CacheConst)) __PYX_ERR(1, 18, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_cache_key, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CacheConst); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CacheConst, __pyx_t_3) < 0) __PYX_ERR(1, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/sql/_util_cy.py":17 + * from ..util.typing import Literal + * + * if TYPE_CHECKING: # <<<<<<<<<<<<<< + * from .cache_key import CacheConst + * + */ + } + + /* "sqlalchemy/sql/_util_cy.py":22 + * # START GENERATED CYTHON IMPORT + * # This section is automatically generated by the script tools/cython_imports.py + * try: # <<<<<<<<<<<<<< + * # NOTE: the cython compiler needs this "import cython" in the file, it + * # can't be only "from sqlalchemy.util import cython" with the fallback + */ + { + (void)__pyx_t_1; (void)__pyx_t_5; (void)__pyx_t_6; /* mark used */ + /*try:*/ { + + /* "sqlalchemy/sql/_util_cy.py":26 + * # can't be only "from sqlalchemy.util import cython" with the fallback + * # in that module + * import cython # <<<<<<<<<<<<<< + * except ModuleNotFoundError: + * from sqlalchemy.util import cython + */ + } + } + + /* "sqlalchemy/sql/_util_cy.py":31 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 31, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(1, 31, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__6)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 31, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(1, 31, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/sql/_util_cy.py":57 + * """ + * + * def __missing__(self, key: str, /) -> str: # <<<<<<<<<<<<<< + * derived: str + * value: str + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_str) < 0) __PYX_ERR(1, 57, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_str) < 0) __PYX_ERR(1, 57, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_prefix_anon_map___missing, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__8)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, __pyx_n_s_missing, __pyx_t_2) < 0) __PYX_ERR(1, 57, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_prefix_anon_map___reduce_cython, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__10)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, __pyx_n_s_reduce_cython, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_prefix_anon_map___setstate_cytho, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__12)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, __pyx_n_s_setstate_cython, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); + + /* "sqlalchemy/sql/_util_cy.py":109 + * return val + * + * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: # <<<<<<<<<<<<<< + * self_dict: dict = self # type: ignore[type-arg] + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 109, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_self, __pyx_n_s_anon_map) < 0) __PYX_ERR(1, 109, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_obj, __pyx_n_s_object) < 0) __PYX_ERR(1, 109, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Tuple_int_bool) < 0) __PYX_ERR(1, 109, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_anon_map_get_anon, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__14)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 109, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map, __pyx_n_s_get_anon, __pyx_t_3) < 0) __PYX_ERR(1, 109, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map); + + /* "sqlalchemy/sql/_util_cy.py":132 + * return self._add_missing(key) # type:ignore[no-any-return] + * + * def __missing__( # <<<<<<<<<<<<<< + * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / + * ) -> int: + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_self, __pyx_n_s_anon_map) < 0) __PYX_ERR(1, 132, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_kp_s_Union_int_str_Literal_CacheConst) < 0) __PYX_ERR(1, 132, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_int) < 0) __PYX_ERR(1, 132, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_anon_map___missing, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__16)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map, __pyx_n_s_missing, __pyx_t_2) < 0) __PYX_ERR(1, 132, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map); + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_anon_map___reduce_cython, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_reduce_cython, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_anon_map___setstate_cython, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__19)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_setstate_cython, __pyx_t_2) < 0) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_prefix_anon_map(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map, 0, __pyx_n_s_pyx_unpickle_prefix_anon_map, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__21)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_prefix_anon_map, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/sql/_util_cy.py":1 + * # sql/_util_cy.py # <<<<<<<<<<<<<< + * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors + * # + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + if (__pyx_m) { + if (__pyx_d && stringtab_initialized) { + __Pyx_AddTraceback("init sqlalchemy.sql._util_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + #if !CYTHON_USE_MODULE_STATE + Py_CLEAR(__pyx_m); + #else + Py_DECREF(__pyx_m); + if (pystate_addmodule_run) { + PyObject *tp, *value, *tb; + PyErr_Fetch(&tp, &value, &tb); + PyState_RemoveModule(&__pyx_moduledef); + PyErr_Restore(tp, value, tb); + } + #endif + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init sqlalchemy.sql._util_cy"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} +/* #### Code section: cleanup_globals ### */ +/* #### Code section: cleanup_module ### */ +/* #### Code section: main_method ### */ +/* #### Code section: utility_code_pragmas ### */ +#ifdef _MSC_VER +#pragma warning( push ) +/* Warning 4127: conditional expression is constant + * Cython uses constant conditional expressions to allow in inline functions to be optimized at + * compile-time, so this warning is not useful + */ +#pragma warning( disable : 4127 ) +#endif + + + +/* #### Code section: utility_code_def ### */ + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030C00A6 + PyObject *current_exception = tstate->current_exception; + if (unlikely(!current_exception)) return 0; + exc_type = (PyObject*) Py_TYPE(current_exception); + if (exc_type == err) return 1; +#else + exc_type = tstate->curexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; +#endif + #if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(exc_type); + #endif + if (unlikely(PyTuple_Check(err))) { + result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + } else { + result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); + } + #if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(exc_type); + #endif + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject *tmp_value; + assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); + if (value) { + #if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) + #endif + PyException_SetTraceback(value, tb); + } + tmp_value = tstate->current_exception; + tstate->current_exception = value; + Py_XDECREF(tmp_value); + Py_XDECREF(type); + Py_XDECREF(tb); +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject* exc_value; + exc_value = tstate->current_exception; + tstate->current_exception = 0; + *value = exc_value; + *type = NULL; + *tb = NULL; + if (exc_value) { + *type = (PyObject*) Py_TYPE(exc_value); + Py_INCREF(*type); + #if CYTHON_COMPILING_IN_CPYTHON + *tb = ((PyBaseExceptionObject*) exc_value)->traceback; + Py_XINCREF(*tb); + #else + *tb = PyException_GetTraceback(exc_value); + #endif + } +#else + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#endif +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* PyObjectGetAttrStrNoError */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + (void) PyObject_GetOptionalAttr(obj, attr_name, &result); + return result; +#else +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +#endif +} + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); + if (unlikely(!result) && !PyErr_Occurred()) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* TupleAndListFromArray */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { + PyObject *v; + Py_ssize_t i; + for (i = 0; i < length; i++) { + v = dest[i] = src[i]; + Py_INCREF(v); + } +} +static CYTHON_INLINE PyObject * +__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + Py_INCREF(__pyx_empty_tuple); + return __pyx_empty_tuple; + } + res = PyTuple_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); + return res; +} +static CYTHON_INLINE PyObject * +__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + return PyList_New(0); + } + res = PyList_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); + return res; +} +#endif + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* fastcall */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) +{ + Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); + for (i = 0; i < n; i++) + { + if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; + } + for (i = 0; i < n; i++) + { + int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); + if (unlikely(eq != 0)) { + if (unlikely(eq < 0)) return NULL; + return kwvalues[i]; + } + } + return NULL; +} +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { + Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); + PyObject *dict; + dict = PyDict_New(); + if (unlikely(!dict)) + return NULL; + for (i=0; i= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; + PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; + if (mm && mm->mp_subscript) { + PyObject *r, *key = PyInt_FromSsize_t(i); + if (unlikely(!key)) return NULL; + r = mm->mp_subscript(o, key); + Py_DECREF(key); + return r; + } + if (likely(sm && sm->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { + Py_ssize_t l = sm->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return sm->sq_item(o, i); + } + } +#else + if (is_list || !PyMapping_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* RaiseUnexpectedTypeError */ +static int +__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) +{ + __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, + expected, obj_type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* UnpackUnboundCMethod */ +static PyObject *__Pyx_SelflessCall(PyObject *method, PyObject *args, PyObject *kwargs) { + PyObject *result; + PyObject *selfless_args = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); + if (unlikely(!selfless_args)) return NULL; + result = PyObject_Call(method, selfless_args, kwargs); + Py_DECREF(selfless_args); + return result; +} +static PyMethodDef __Pyx_UnboundCMethod_Def = { + "CythonUnboundCMethod", + __PYX_REINTERPRET_FUNCION(PyCFunction, __Pyx_SelflessCall), + METH_VARARGS | METH_KEYWORDS, + NULL +}; +static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { + PyObject *method; + method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); + if (unlikely(!method)) + return -1; + target->method = method; +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION >= 3 + if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) + #else + if (likely(!__Pyx_CyOrPyCFunction_Check(method))) + #endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject*) method; + target->func = descr->d_method->ml_meth; + target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); + } else +#endif +#if CYTHON_COMPILING_IN_PYPY +#else + if (PyCFunction_Check(method)) +#endif + { + PyObject *self; + int self_found; +#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY + self = PyObject_GetAttrString(method, "__self__"); + if (!self) { + PyErr_Clear(); + } +#else + self = PyCFunction_GET_SELF(method); +#endif + self_found = (self && self != Py_None); +#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY + Py_XDECREF(self); +#endif + if (self_found) { + PyObject *unbound_method = PyCFunction_New(&__Pyx_UnboundCMethod_Def, method); + if (unlikely(!unbound_method)) return -1; + Py_DECREF(method); + target->method = unbound_method; + } + } + return 0; +} + +/* CallUnboundCMethod1 */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { + if (likely(cfunc->func)) { + int flag = cfunc->flag; + if (flag == METH_O) { + return (*(cfunc->func))(self, arg); + } else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + #endif + } else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } + return __Pyx__CallUnboundCMethod1(cfunc, self, arg); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(2, self, arg); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* CallUnboundCMethod2 */ +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 +static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) { + if (likely(cfunc->func)) { + PyObject *args[2] = {arg1, arg2}; + if (cfunc->flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, args, 2); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif + } + #if PY_VERSION_HEX >= 0x030700A0 + if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS)) + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); + #endif + } + return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(3); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 1, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 2, arg2); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(3, self, arg1, arg2); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* dict_getitem_default */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { + PyObject* value; +#if PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (unlikely(PyErr_Occurred())) + return NULL; + value = default_value; + } + Py_INCREF(value); + if ((1)); +#else + if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { + value = PyDict_GetItem(d, key); + if (unlikely(!value)) { + value = default_value; + } + Py_INCREF(value); + } +#endif + else { + if (default_value == Py_None) + value = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_get, d, key); + else + value = __Pyx_CallUnboundCMethod2(&__pyx_umethod_PyDict_Type_get, d, key, default_value); + } + return value; +} + +/* PyUnicode_Unicode */ +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj) { + if (unlikely(obj == Py_None)) + obj = __pyx_kp_u_None; + return __Pyx_NewRef(obj); +} + +/* JoinPyUnicode */ +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char) { +#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyObject *result_uval; + int result_ukind, kind_shift; + Py_ssize_t i, char_pos; + void *result_udata; + CYTHON_MAYBE_UNUSED_VAR(max_char); +#if CYTHON_PEP393_ENABLED + result_uval = PyUnicode_New(result_ulength, max_char); + if (unlikely(!result_uval)) return NULL; + result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; + kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; + result_udata = PyUnicode_DATA(result_uval); +#else + result_uval = PyUnicode_FromUnicode(NULL, result_ulength); + if (unlikely(!result_uval)) return NULL; + result_ukind = sizeof(Py_UNICODE); + kind_shift = (result_ukind == 4) ? 2 : result_ukind - 1; + result_udata = PyUnicode_AS_UNICODE(result_uval); +#endif + assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); + char_pos = 0; + for (i=0; i < value_count; i++) { + int ukind; + Py_ssize_t ulength; + void *udata; + PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); + if (unlikely(__Pyx_PyUnicode_READY(uval))) + goto bad; + ulength = __Pyx_PyUnicode_GET_LENGTH(uval); + if (unlikely(!ulength)) + continue; + if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) + goto overflow; + ukind = __Pyx_PyUnicode_KIND(uval); + udata = __Pyx_PyUnicode_DATA(uval); + if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { + memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); + } else { + #if PY_VERSION_HEX >= 0x030d0000 + if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; + #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) + _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); + #else + Py_ssize_t j; + for (j=0; j < ulength; j++) { + Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); + __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); + } + #endif + } + char_pos += ulength; + } + return result_uval; +overflow: + PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); +bad: + Py_DECREF(result_uval); + return NULL; +#else + CYTHON_UNUSED_VAR(max_char); + CYTHON_UNUSED_VAR(result_ulength); + CYTHON_UNUSED_VAR(value_count); + return PyUnicode_Join(__pyx_empty_unicode, value_tuple); +#endif +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kw, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { + Py_ssize_t kwsize; +#if CYTHON_ASSUME_SAFE_MACROS + kwsize = PyTuple_GET_SIZE(kw); +#else + kwsize = PyTuple_Size(kw); + if (kwsize < 0) return 0; +#endif + if (unlikely(kwsize == 0)) + return 1; + if (!kw_allowed) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, 0); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + goto invalid_keyword; + } +#if PY_VERSION_HEX < 0x03090000 + for (pos = 0; pos < kwsize; pos++) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, pos); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } +#endif + return 1; + } + while (PyDict_Next(kw, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if (!kw_allowed && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* GetAttr3 */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +#endif +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + int res = PyObject_GetOptionalAttr(o, n, &r); + return (res != 0) ? r : __Pyx_NewRef(d); +#else + #if CYTHON_USE_TYPE_SLOTS + if (likely(PyString_Check(n))) { + r = __Pyx_PyObject_GetAttrStrNoError(o, n); + if (unlikely(!r) && likely(!PyErr_Occurred())) { + r = __Pyx_NewRef(d); + } + return r; + } + #endif + r = PyObject_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +#endif +} + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#elif CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(!__pyx_m)) { + return NULL; + } + result = PyObject_GetAttr(__pyx_m, name); + if (likely(result)) { + return result; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); + while (1) { + Py_XDECREF(key); key = NULL; + Py_XDECREF(value); value = NULL; + if (kwds_is_tuple) { + Py_ssize_t size; +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(kwds); +#else + size = PyTuple_Size(kwds); + if (size < 0) goto bad; +#endif + if (pos >= size) break; +#if CYTHON_AVOID_BORROWED_REFS + key = __Pyx_PySequence_ITEM(kwds, pos); + if (!key) goto bad; +#elif CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kwds, pos); +#else + key = PyTuple_GetItem(kwds, pos); + if (!key) goto bad; +#endif + value = kwvalues[pos]; + pos++; + } + else + { + if (!PyDict_Next(kwds, &pos, &key, &value)) break; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + } + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(value); + Py_DECREF(key); +#endif + key = NULL; + value = NULL; + continue; + } +#if !CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + Py_INCREF(value); + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = ( + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key) + ); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + Py_XDECREF(key); + Py_XDECREF(value); + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + Py_XDECREF(key); + Py_XDECREF(value); + return -1; +} + +/* DictGetItem */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + if (unlikely(PyTuple_Check(key))) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) { + PyErr_SetObject(PyExc_KeyError, args); + Py_DECREF(args); + } + } else { + PyErr_SetObject(PyExc_KeyError, key); + } + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + __Pyx_PyThreadState_declare + CYTHON_UNUSED_VAR(cause); + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + #if PY_VERSION_HEX >= 0x030C00A6 + PyException_SetTraceback(value, tb); + #elif CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *module = 0; + PyObject *empty_dict = 0; + PyObject *empty_list = 0; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (unlikely(!py_import)) + goto bad; + if (!from_list) { + empty_list = PyList_New(0); + if (unlikely(!empty_list)) + goto bad; + from_list = empty_list; + } + #endif + empty_dict = PyDict_New(); + if (unlikely(!empty_dict)) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, 1); + if (unlikely(!module)) { + if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (unlikely(!py_level)) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, level); + #endif + } + } +bad: + Py_XDECREF(empty_dict); + Py_XDECREF(empty_list); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + const char* module_name_str = 0; + PyObject* module_name = 0; + PyObject* module_dot = 0; + PyObject* full_name = 0; + PyErr_Clear(); + module_name_str = PyModule_GetName(module); + if (unlikely(!module_name_str)) { goto modbad; } + module_name = PyUnicode_FromString(module_name_str); + if (unlikely(!module_name)) { goto modbad; } + module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__4); + if (unlikely(!module_dot)) { goto modbad; } + full_name = PyUnicode_Concat(module_dot, name); + if (unlikely(!full_name)) { goto modbad; } + #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) + { + PyObject *modules = PyImport_GetModuleDict(); + if (unlikely(!modules)) + goto modbad; + value = PyObject_GetItem(modules, full_name); + } + #else + value = PyImport_GetModule(full_name); + #endif + modbad: + Py_XDECREF(full_name); + Py_XDECREF(module_dot); + Py_XDECREF(module_name); + } + if (unlikely(!value)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { + return NULL; + } + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { + return NULL; + } + #endif + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); + self = __Pyx_CyOrPyCFunction_GET_SELF(func); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectFastCall */ +#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API +static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { + PyObject *argstuple; + PyObject *result = 0; + size_t i; + argstuple = PyTuple_New((Py_ssize_t)nargs); + if (unlikely(!argstuple)) return NULL; + for (i = 0; i < nargs; i++) { + Py_INCREF(args[i]); + if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; + } + result = __Pyx_PyObject_Call(func, argstuple, kwargs); + bad: + Py_DECREF(argstuple); + return result; +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { + Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); +#if CYTHON_COMPILING_IN_CPYTHON + if (nargs == 0 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) + return __Pyx_PyObject_CallMethO(func, NULL); + } + else if (nargs == 1 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) + return __Pyx_PyObject_CallMethO(func, args[0]); + } +#endif + #if PY_VERSION_HEX < 0x030800B1 + #if CYTHON_FAST_PYCCALL + if (PyCFunction_Check(func)) { + if (kwargs) { + return _PyCFunction_FastCallDict(func, args, nargs, kwargs); + } else { + return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); + } + } + #if PY_VERSION_HEX >= 0x030700A1 + if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { + return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); + } + #endif + #endif + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); + } + #endif + #endif + if (kwargs == NULL) { + #if CYTHON_VECTORCALL + #if PY_VERSION_HEX < 0x03090000 + vectorcallfunc f = _PyVectorcall_Function(func); + #else + vectorcallfunc f = PyVectorcall_Function(func); + #endif + if (f) { + return f(func, args, (size_t)nargs, NULL); + } + #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL + if (__Pyx_CyFunction_CheckExact(func)) { + __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); + if (f) return f(func, args, (size_t)nargs, NULL); + } + #endif + } + if (nargs == 0) { + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); + } + #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API + return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); + #else + return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); + #endif +} + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (!r) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* FixUpExtensionType */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { +#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + CYTHON_UNUSED_VAR(spec); + CYTHON_UNUSED_VAR(type); +#else + const PyType_Slot *slot = spec->slots; + while (slot && slot->slot && slot->slot != Py_tp_members) + slot++; + if (slot && slot->slot == Py_tp_members) { + int changed = 0; +#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) + const +#endif + PyMemberDef *memb = (PyMemberDef*) slot->pfunc; + while (memb && memb->name) { + if (memb->name[0] == '_' && memb->name[1] == '_') { +#if PY_VERSION_HEX < 0x030900b1 + if (strcmp(memb->name, "__weaklistoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_weaklistoffset = memb->offset; + changed = 1; + } + else if (strcmp(memb->name, "__dictoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_dictoffset = memb->offset; + changed = 1; + } +#if CYTHON_METH_FASTCALL + else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); +#if PY_VERSION_HEX >= 0x030800b4 + type->tp_vectorcall_offset = memb->offset; +#else + type->tp_print = (printfunc) memb->offset; +#endif + changed = 1; + } +#endif +#else + if ((0)); +#endif +#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON + else if (strcmp(memb->name, "__module__") == 0) { + PyObject *descr; + assert(memb->type == T_OBJECT); + assert(memb->flags == 0 || memb->flags == READONLY); + descr = PyDescr_NewMember(type, memb); + if (unlikely(!descr)) + return -1; + if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { + Py_DECREF(descr); + return -1; + } + Py_DECREF(descr); + changed = 1; + } +#endif + } + memb++; + } + if (changed) + PyType_Modified(type); + } +#endif + return 0; +} +#endif + +/* FormatTypeName */ +#if CYTHON_COMPILING_IN_LIMITED_API +static __Pyx_TypeName +__Pyx_PyType_GetName(PyTypeObject* tp) +{ + PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, + __pyx_n_s_name); + if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { + PyErr_Clear(); + Py_XDECREF(name); + name = __Pyx_NewRef(__pyx_n_s__5); + } + return name; +} +#endif + +/* ValidateExternBase */ +static int __Pyx_validate_extern_base(PyTypeObject *base) { + Py_ssize_t itemsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_itemsize; +#endif +#if !CYTHON_COMPILING_IN_LIMITED_API + itemsize = ((PyTypeObject *)base)->tp_itemsize; +#else + py_itemsize = PyObject_GetAttrString((PyObject*)base, "__itemsize__"); + if (!py_itemsize) + return -1; + itemsize = PyLong_AsSsize_t(py_itemsize); + Py_DECREF(py_itemsize); + py_itemsize = 0; + if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) + return -1; +#endif + if (itemsize) { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(base); + PyErr_Format(PyExc_TypeError, + "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name); + __Pyx_DECREF_TypeName(b_name); + return -1; + } + return 0; +} + +/* PyObjectCallNoArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { + PyObject *arg[2] = {NULL, NULL}; + return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* PyObjectCallOneArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *args[2] = {NULL, arg}; + return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + __Pyx_TypeName type_name; + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR + if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) +#elif PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (likely(descr != NULL)) { + *method = descr; + return 0; + } + type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod0 */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { + PyObject *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_CallOneArg(method, obj); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) goto bad; + result = __Pyx_PyObject_CallNoArg(method); + Py_DECREF(method); +bad: + return result; +} + +/* ValidateBasesTuple */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { + Py_ssize_t i, n; +#if CYTHON_ASSUME_SAFE_MACROS + n = PyTuple_GET_SIZE(bases); +#else + n = PyTuple_Size(bases); + if (n < 0) return -1; +#endif + for (i = 1; i < n; i++) + { +#if CYTHON_AVOID_BORROWED_REFS + PyObject *b0 = PySequence_GetItem(bases, i); + if (!b0) return -1; +#elif CYTHON_ASSUME_SAFE_MACROS + PyObject *b0 = PyTuple_GET_ITEM(bases, i); +#else + PyObject *b0 = PyTuple_GetItem(bases, i); + if (!b0) return -1; +#endif + PyTypeObject *b; +#if PY_MAJOR_VERSION < 3 + if (PyClass_Check(b0)) + { + PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", + PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } +#endif + b = (PyTypeObject*) b0; + if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); + __Pyx_DECREF_TypeName(b_name); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + if (dictoffset == 0) + { + Py_ssize_t b_dictoffset = 0; +#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + b_dictoffset = b->tp_dictoffset; +#else + PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); + if (!py_b_dictoffset) goto dictoffset_return; + b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); + Py_DECREF(py_b_dictoffset); + if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; +#endif + if (b_dictoffset) { + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "extension type '%.200s' has no __dict__ slot, " + "but base type '" __Pyx_FMT_TYPENAME "' has: " + "either add 'cdef dict __dict__' to the extension type " + "or add '__slots__ = [...]' to the base type", + type_name, b_name); + __Pyx_DECREF_TypeName(b_name); + } +#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) + dictoffset_return: +#endif +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + } +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + } + return 0; +} +#endif + +/* PyType_Ready */ +static int __Pyx_PyType_Ready(PyTypeObject *t) { +#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) + (void)__Pyx_PyObject_CallMethod0; +#if CYTHON_USE_TYPE_SPECS + (void)__Pyx_validate_bases_tuple; +#endif + return PyType_Ready(t); +#else + int r; + PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); + if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) + return -1; +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + { + int gc_was_enabled; + #if PY_VERSION_HEX >= 0x030A00b1 + gc_was_enabled = PyGC_Disable(); + (void)__Pyx_PyObject_CallMethod0; + #else + PyObject *ret, *py_status; + PyObject *gc = NULL; + #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) + gc = PyImport_GetModule(__pyx_kp_u_gc); + #endif + if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); + if (unlikely(!gc)) return -1; + py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); + if (unlikely(!py_status)) { + Py_DECREF(gc); + return -1; + } + gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); + Py_DECREF(py_status); + if (gc_was_enabled > 0) { + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); + if (unlikely(!ret)) { + Py_DECREF(gc); + return -1; + } + Py_DECREF(ret); + } else if (unlikely(gc_was_enabled == -1)) { + Py_DECREF(gc); + return -1; + } + #endif + t->tp_flags |= Py_TPFLAGS_HEAPTYPE; +#if PY_VERSION_HEX >= 0x030A0000 + t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; +#endif +#else + (void)__Pyx_PyObject_CallMethod0; +#endif + r = PyType_Ready(t); +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; + #if PY_VERSION_HEX >= 0x030A00b1 + if (gc_was_enabled) + PyGC_Enable(); + #else + if (gc_was_enabled) { + PyObject *tp, *v, *tb; + PyErr_Fetch(&tp, &v, &tb); + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); + if (likely(ret || r == -1)) { + Py_XDECREF(ret); + PyErr_Restore(tp, v, tb); + } else { + Py_XDECREF(tp); + Py_XDECREF(v); + Py_XDECREF(tb); + r = -1; + } + } + Py_DECREF(gc); + #endif + } +#endif + return r; +#endif +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, attr_name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(attr_name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetupReduce */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStrNoError(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_getstate = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; + PyObject *getstate = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); +#else + getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); + if (!getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (getstate) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); +#else + object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); + if (!object_getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (object_getstate != getstate) { + goto __PYX_GOOD; + } + } +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + goto __PYX_BAD; + } + setstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; +__PYX_BAD: + if (!PyErr_Occurred()) { + __Pyx_TypeName type_obj_name = + __Pyx_PyType_GetName((PyTypeObject*)type_obj); + PyErr_Format(PyExc_RuntimeError, + "Unable to initialize pickling for " __Pyx_FMT_TYPENAME, type_obj_name); + __Pyx_DECREF_TypeName(type_obj_name); + } + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); + Py_XDECREF(object_getstate); + Py_XDECREF(getstate); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} +#endif + +/* SetVTable */ +static int __Pyx_SetVtable(PyTypeObject *type, void *vtable) { + PyObject *ob = PyCapsule_New(vtable, 0, 0); + if (unlikely(!ob)) + goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(PyObject_SetAttr((PyObject *) type, __pyx_n_s_pyx_vtable, ob) < 0)) +#else + if (unlikely(PyDict_SetItem(type->tp_dict, __pyx_n_s_pyx_vtable, ob) < 0)) +#endif + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* GetVTable */ +static void* __Pyx_GetVtable(PyTypeObject *type) { + void* ptr; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *ob = PyObject_GetAttr((PyObject *)type, __pyx_n_s_pyx_vtable); +#else + PyObject *ob = PyObject_GetItem(type->tp_dict, __pyx_n_s_pyx_vtable); +#endif + if (!ob) + goto bad; + ptr = PyCapsule_GetPointer(ob, 0); + if (!ptr && !PyErr_Occurred()) + PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); + Py_DECREF(ob); + return ptr; +bad: + Py_XDECREF(ob); + return NULL; +} + +/* MergeVTables */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_MergeVtables(PyTypeObject *type) { + int i; + void** base_vtables; + __Pyx_TypeName tp_base_name; + __Pyx_TypeName base_name; + void* unknown = (void*)-1; + PyObject* bases = type->tp_bases; + int base_depth = 0; + { + PyTypeObject* base = type->tp_base; + while (base) { + base_depth += 1; + base = base->tp_base; + } + } + base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1)); + base_vtables[0] = unknown; + for (i = 1; i < PyTuple_GET_SIZE(bases); i++) { + void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))); + if (base_vtable != NULL) { + int j; + PyTypeObject* base = type->tp_base; + for (j = 0; j < base_depth; j++) { + if (base_vtables[j] == unknown) { + base_vtables[j] = __Pyx_GetVtable(base); + base_vtables[j + 1] = unknown; + } + if (base_vtables[j] == base_vtable) { + break; + } else if (base_vtables[j] == NULL) { + goto bad; + } + base = base->tp_base; + } + } + } + PyErr_Clear(); + free(base_vtables); + return 0; +bad: + tp_base_name = __Pyx_PyType_GetName(type->tp_base); + base_name = __Pyx_PyType_GetName((PyTypeObject*)PyTuple_GET_ITEM(bases, i)); + PyErr_Format(PyExc_TypeError, + "multiple bases have vtable conflict: '" __Pyx_FMT_TYPENAME "' and '" __Pyx_FMT_TYPENAME "'", tp_base_name, base_name); + __Pyx_DECREF_TypeName(tp_base_name); + __Pyx_DECREF_TypeName(base_name); + free(base_vtables); + return -1; +} +#endif + +/* FetchSharedCythonModule */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void) { + return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); +} + +/* FetchCommonType */ +static int __Pyx_VerifyCachedType(PyObject *cached_type, + const char *name, + Py_ssize_t basicsize, + Py_ssize_t expected_basicsize) { + if (!PyType_Check(cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", name); + return -1; + } + if (basicsize != expected_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + name); + return -1; + } + return 0; +} +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { + PyObject* abi_module; + const char* object_name; + PyTypeObject *cached_type = NULL; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + object_name = strrchr(type->tp_name, '.'); + object_name = object_name ? object_name+1 : type->tp_name; + cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + if (__Pyx_VerifyCachedType( + (PyObject *)cached_type, + object_name, + cached_type->tp_basicsize, + type->tp_basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; +done: + Py_DECREF(abi_module); + return cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#else +static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { + PyObject *abi_module, *cached_type = NULL; + const char* object_name = strrchr(spec->name, '.'); + object_name = object_name ? object_name+1 : spec->name; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + cached_type = PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + Py_ssize_t basicsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_basicsize; + py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); + if (unlikely(!py_basicsize)) goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; +#else + basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; +#endif + if (__Pyx_VerifyCachedType( + cached_type, + object_name, + basicsize, + spec->basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + CYTHON_UNUSED_VAR(module); + cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); + if (unlikely(!cached_type)) goto bad; + if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; +done: + Py_DECREF(abi_module); + assert(cached_type == NULL || PyType_Check(cached_type)); + return (PyTypeObject *) cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#endif + +/* PyVectorcallFastCallDict */ +#if CYTHON_METH_FASTCALL +static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + PyObject *res = NULL; + PyObject *kwnames; + PyObject **newargs; + PyObject **kwvalues; + Py_ssize_t i, pos; + size_t j; + PyObject *key, *value; + unsigned long keys_are_strings; + Py_ssize_t nkw = PyDict_GET_SIZE(kw); + newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); + if (unlikely(newargs == NULL)) { + PyErr_NoMemory(); + return NULL; + } + for (j = 0; j < nargs; j++) newargs[j] = args[j]; + kwnames = PyTuple_New(nkw); + if (unlikely(kwnames == NULL)) { + PyMem_Free(newargs); + return NULL; + } + kwvalues = newargs + nargs; + pos = i = 0; + keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; + while (PyDict_Next(kw, &pos, &key, &value)) { + keys_are_strings &= Py_TYPE(key)->tp_flags; + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(kwnames, i, key); + kwvalues[i] = value; + i++; + } + if (unlikely(!keys_are_strings)) { + PyErr_SetString(PyExc_TypeError, "keywords must be strings"); + goto cleanup; + } + res = vc(func, newargs, nargs, kwnames); +cleanup: + Py_DECREF(kwnames); + for (i = 0; i < nkw; i++) + Py_DECREF(kwvalues[i]); + PyMem_Free(newargs); + return res; +} +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { + return vc(func, args, nargs, NULL); + } + return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); +} +#endif + +/* CythonFunctionShared */ +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + if (__Pyx_CyFunction_Check(func)) { + return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; + } else if (PyCFunction_Check(func)) { + return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; + } + return 0; +} +#else +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +} +#endif +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + __Pyx_Py_XDECREF_SET( + __Pyx_CyFunction_GetClassObj(f), + ((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#else + __Pyx_Py_XDECREF_SET( + ((PyCMethodObject *) (f))->mm_class, + (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#endif +} +static PyObject * +__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) +{ + CYTHON_UNUSED_VAR(closure); + if (unlikely(op->func_doc == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); + if (unlikely(!op->func_doc)) return NULL; +#else + if (((PyCFunctionObject*)op)->m_ml->ml_doc) { +#if PY_MAJOR_VERSION >= 3 + op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#else + op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#endif + if (unlikely(op->func_doc == NULL)) + return NULL; + } else { + Py_INCREF(Py_None); + return Py_None; + } +#endif + } + Py_INCREF(op->func_doc); + return op->func_doc; +} +static int +__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (value == NULL) { + value = Py_None; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_doc, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_name == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_name = PyObject_GetAttrString(op->func, "__name__"); +#elif PY_MAJOR_VERSION >= 3 + op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#else + op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#endif + if (unlikely(op->func_name == NULL)) + return NULL; + } + Py_INCREF(op->func_name); + return op->func_name; +} +static int +__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_name, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_qualname); + return op->func_qualname; +} +static int +__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_qualname, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_dict == NULL)) { + op->func_dict = PyDict_New(); + if (unlikely(op->func_dict == NULL)) + return NULL; + } + Py_INCREF(op->func_dict); + return op->func_dict; +} +static int +__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(value == NULL)) { + PyErr_SetString(PyExc_TypeError, + "function's dictionary may not be deleted"); + return -1; + } + if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "setting function's dictionary to a non-dict"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_dict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_globals); + return op->func_globals; +} +static PyObject * +__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(op); + CYTHON_UNUSED_VAR(context); + Py_INCREF(Py_None); + return Py_None; +} +static PyObject * +__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) +{ + PyObject* result = (op->func_code) ? op->func_code : Py_None; + CYTHON_UNUSED_VAR(context); + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { + int result = 0; + PyObject *res = op->defaults_getter((PyObject *) op); + if (unlikely(!res)) + return -1; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + op->defaults_tuple = PyTuple_GET_ITEM(res, 0); + Py_INCREF(op->defaults_tuple); + op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); + Py_INCREF(op->defaults_kwdict); + #else + op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); + if (unlikely(!op->defaults_tuple)) result = -1; + else { + op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); + if (unlikely(!op->defaults_kwdict)) result = -1; + } + #endif + Py_DECREF(res); + return result; +} +static int +__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__defaults__ must be set to a tuple object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_tuple; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_tuple; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__kwdefaults__ must be set to a dict object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_kwdict; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_kwdict; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value || value == Py_None) { + value = NULL; + } else if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__annotations__ must be set to a dict object"); + return -1; + } + Py_XINCREF(value); + __Pyx_Py_XDECREF_SET(op->func_annotations, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->func_annotations; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + result = PyDict_New(); + if (unlikely(!result)) return NULL; + op->func_annotations = result; + } + Py_INCREF(result); + return result; +} +static PyObject * +__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { + int is_coroutine; + CYTHON_UNUSED_VAR(context); + if (op->func_is_coroutine) { + return __Pyx_NewRef(op->func_is_coroutine); + } + is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; +#if PY_VERSION_HEX >= 0x03050000 + if (is_coroutine) { + PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; + fromlist = PyList_New(1); + if (unlikely(!fromlist)) return NULL; + Py_INCREF(marker); +#if CYTHON_ASSUME_SAFE_MACROS + PyList_SET_ITEM(fromlist, 0, marker); +#else + if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { + Py_DECREF(marker); + Py_DECREF(fromlist); + return NULL; + } +#endif + module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); + Py_DECREF(fromlist); + if (unlikely(!module)) goto ignore; + op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); + Py_DECREF(module); + if (likely(op->func_is_coroutine)) { + return __Pyx_NewRef(op->func_is_coroutine); + } +ignore: + PyErr_Clear(); + } +#endif + op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); + return __Pyx_NewRef(op->func_is_coroutine); +} +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject * +__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_GetAttrString(op->func, "__module__"); +} +static int +__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_SetAttrString(op->func, "__module__", value); +} +#endif +static PyGetSetDef __pyx_CyFunction_getsets[] = { + {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, + {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, + {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, + {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, +#if CYTHON_COMPILING_IN_LIMITED_API + {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, +#endif + {0, 0, 0, 0, 0} +}; +static PyMemberDef __pyx_CyFunction_members[] = { +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, +#endif +#if CYTHON_USE_TYPE_SPECS + {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, +#if CYTHON_METH_FASTCALL +#if CYTHON_BACKPORT_VECTORCALL + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, +#else +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, +#endif +#endif +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, +#else + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, +#endif +#endif + {0, 0, 0, 0, 0} +}; +static PyObject * +__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) +{ + CYTHON_UNUSED_VAR(args); +#if PY_MAJOR_VERSION >= 3 + Py_INCREF(m->func_qualname); + return m->func_qualname; +#else + return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); +#endif +} +static PyMethodDef __pyx_CyFunction_methods[] = { + {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, + {0, 0, 0, 0} +}; +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) +#else +#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) +#endif +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { +#if !CYTHON_COMPILING_IN_LIMITED_API + PyCFunctionObject *cf = (PyCFunctionObject*) op; +#endif + if (unlikely(op == NULL)) + return NULL; +#if CYTHON_COMPILING_IN_LIMITED_API + op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); + if (unlikely(!op->func)) return NULL; +#endif + op->flags = flags; + __Pyx_CyFunction_weakreflist(op) = NULL; +#if !CYTHON_COMPILING_IN_LIMITED_API + cf->m_ml = ml; + cf->m_self = (PyObject *) op; +#endif + Py_XINCREF(closure); + op->func_closure = closure; +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_XINCREF(module); + cf->m_module = module; +#endif + op->func_dict = NULL; + op->func_name = NULL; + Py_INCREF(qualname); + op->func_qualname = qualname; + op->func_doc = NULL; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + op->func_classobj = NULL; +#else + ((PyCMethodObject*)op)->mm_class = NULL; +#endif + op->func_globals = globals; + Py_INCREF(op->func_globals); + Py_XINCREF(code); + op->func_code = code; + op->defaults_pyobjects = 0; + op->defaults_size = 0; + op->defaults = NULL; + op->defaults_tuple = NULL; + op->defaults_kwdict = NULL; + op->defaults_getter = NULL; + op->func_annotations = NULL; + op->func_is_coroutine = NULL; +#if CYTHON_METH_FASTCALL + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { + case METH_NOARGS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; + break; + case METH_O: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; + break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; + break; + case METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; + break; + case METH_VARARGS | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = NULL; + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + Py_DECREF(op); + return NULL; + } +#endif + return (PyObject *) op; +} +static int +__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) +{ + Py_CLEAR(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_CLEAR(m->func); +#else + Py_CLEAR(((PyCFunctionObject*)m)->m_module); +#endif + Py_CLEAR(m->func_dict); + Py_CLEAR(m->func_name); + Py_CLEAR(m->func_qualname); + Py_CLEAR(m->func_doc); + Py_CLEAR(m->func_globals); + Py_CLEAR(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API +#if PY_VERSION_HEX < 0x030900B1 + Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); +#else + { + PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; + ((PyCMethodObject *) (m))->mm_class = NULL; + Py_XDECREF(cls); + } +#endif +#endif + Py_CLEAR(m->defaults_tuple); + Py_CLEAR(m->defaults_kwdict); + Py_CLEAR(m->func_annotations); + Py_CLEAR(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_XDECREF(pydefaults[i]); + PyObject_Free(m->defaults); + m->defaults = NULL; + } + return 0; +} +static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + if (__Pyx_CyFunction_weakreflist(m) != NULL) + PyObject_ClearWeakRefs((PyObject *) m); + __Pyx_CyFunction_clear(m); + __Pyx_PyHeapTypeObject_GC_Del(m); +} +static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + PyObject_GC_UnTrack(m); + __Pyx__CyFunction_dealloc(m); +} +static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) +{ + Py_VISIT(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(m->func); +#else + Py_VISIT(((PyCFunctionObject*)m)->m_module); +#endif + Py_VISIT(m->func_dict); + Py_VISIT(m->func_name); + Py_VISIT(m->func_qualname); + Py_VISIT(m->func_doc); + Py_VISIT(m->func_globals); + Py_VISIT(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); +#endif + Py_VISIT(m->defaults_tuple); + Py_VISIT(m->defaults_kwdict); + Py_VISIT(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_VISIT(pydefaults[i]); + } + return 0; +} +static PyObject* +__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) +{ +#if PY_MAJOR_VERSION >= 3 + return PyUnicode_FromFormat("", + op->func_qualname, (void *)op); +#else + return PyString_FromFormat("", + PyString_AsString(op->func_qualname), (void *)op); +#endif +} +static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *f = ((__pyx_CyFunctionObject*)func)->func; + PyObject *py_name = NULL; + PyCFunction meth; + int flags; + meth = PyCFunction_GetFunction(f); + if (unlikely(!meth)) return NULL; + flags = PyCFunction_GetFlags(f); + if (unlikely(flags < 0)) return NULL; +#else + PyCFunctionObject* f = (PyCFunctionObject*)func; + PyCFunction meth = f->m_ml->ml_meth; + int flags = f->m_ml->ml_flags; +#endif + Py_ssize_t size; + switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { + case METH_VARARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) + return (*meth)(self, arg); + break; + case METH_VARARGS | METH_KEYWORDS: + return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); + case METH_NOARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 0)) + return (*meth)(self, NULL); +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + case METH_O: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 1)) { + PyObject *result, *arg0; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + arg0 = PyTuple_GET_ITEM(arg, 0); + #else + arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; + #endif + result = (*meth)(self, arg0); + #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(arg0); + #endif + return result; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + return NULL; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", + py_name); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", + f->m_ml->ml_name); +#endif + return NULL; +} +static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *self, *result; +#if CYTHON_COMPILING_IN_LIMITED_API + self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); + if (unlikely(!self) && PyErr_Occurred()) return NULL; +#else + self = ((PyCFunctionObject*)func)->m_self; +#endif + result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); + return result; +} +static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { + PyObject *result; + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; +#if CYTHON_METH_FASTCALL + __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); + if (vc) { +#if CYTHON_ASSUME_SAFE_MACROS + return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); +#else + (void) &__Pyx_PyVectorcall_FastCallDict; + return PyVectorcall_Call(func, args, kw); +#endif + } +#endif + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + Py_ssize_t argc; + PyObject *new_args; + PyObject *self; +#if CYTHON_ASSUME_SAFE_MACROS + argc = PyTuple_GET_SIZE(args); +#else + argc = PyTuple_Size(args); + if (unlikely(!argc) < 0) return NULL; +#endif + new_args = PyTuple_GetSlice(args, 1, argc); + if (unlikely(!new_args)) + return NULL; + self = PyTuple_GetItem(args, 0); + if (unlikely(!self)) { + Py_DECREF(new_args); +#if PY_MAJOR_VERSION > 2 + PyErr_Format(PyExc_TypeError, + "unbound method %.200S() needs an argument", + cyfunc->func_qualname); +#else + PyErr_SetString(PyExc_TypeError, + "unbound method needs an argument"); +#endif + return NULL; + } + result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); + Py_DECREF(new_args); + } else { + result = __Pyx_CyFunction_Call(func, args, kw); + } + return result; +} +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) +{ + int ret = 0; + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + if (unlikely(nargs < 1)) { + PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", + ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + ret = 1; + } + if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + return ret; +} +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 0)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, NULL); +} +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 1)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, args[0]); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; + PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); +} +#endif +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_CyFunctionType_slots[] = { + {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, + {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, + {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, + {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, + {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, + {Py_tp_methods, (void *)__pyx_CyFunction_methods}, + {Py_tp_members, (void *)__pyx_CyFunction_members}, + {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, + {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, + {0, 0}, +}; +static PyType_Spec __pyx_CyFunctionType_spec = { + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + __pyx_CyFunctionType_slots +}; +#else +static PyTypeObject __pyx_CyFunctionType_type = { + PyVarObject_HEAD_INIT(0, 0) + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, + (destructor) __Pyx_CyFunction_dealloc, +#if !CYTHON_METH_FASTCALL + 0, +#elif CYTHON_BACKPORT_VECTORCALL + (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), +#else + offsetof(PyCFunctionObject, vectorcall), +#endif + 0, + 0, +#if PY_MAJOR_VERSION < 3 + 0, +#else + 0, +#endif + (reprfunc) __Pyx_CyFunction_repr, + 0, + 0, + 0, + 0, + __Pyx_CyFunction_CallAsMethod, + 0, + 0, + 0, + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + 0, + (traverseproc) __Pyx_CyFunction_traverse, + (inquiry) __Pyx_CyFunction_clear, + 0, +#if PY_VERSION_HEX < 0x030500A0 + offsetof(__pyx_CyFunctionObject, func_weakreflist), +#else + offsetof(PyCFunctionObject, m_weakreflist), +#endif + 0, + 0, + __pyx_CyFunction_methods, + __pyx_CyFunction_members, + __pyx_CyFunction_getsets, + 0, + 0, + __Pyx_PyMethod_New, + 0, + offsetof(__pyx_CyFunctionObject, func_dict), + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +#if PY_VERSION_HEX >= 0x030400a1 + 0, +#endif +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, +#endif +#if __PYX_NEED_TP_PRINT_SLOT + 0, +#endif +#if PY_VERSION_HEX >= 0x030C0000 + 0, +#endif +#if PY_VERSION_HEX >= 0x030d00A4 + 0, +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, +#endif +}; +#endif +static int __pyx_CyFunction_init(PyObject *module) { +#if CYTHON_USE_TYPE_SPECS + __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); +#else + CYTHON_UNUSED_VAR(module); + __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); +#endif + if (unlikely(__pyx_CyFunctionType == NULL)) { + return -1; + } + return 0; +} +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults = PyObject_Malloc(size); + if (unlikely(!m->defaults)) + return PyErr_NoMemory(); + memset(m->defaults, 0, size); + m->defaults_pyobjects = pyobjects; + m->defaults_size = size; + return m->defaults; +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_tuple = tuple; + Py_INCREF(tuple); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_kwdict = dict; + Py_INCREF(dict); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->func_annotations = dict; + Py_INCREF(dict); +} + +/* CythonFunction */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + PyObject *op = __Pyx_CyFunction_Init( + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + PyObject_GC_Track(op); + } + return op; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + CYTHON_MAYBE_UNUSED_VAR(tstate); + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} +#endif + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, + PyObject *firstlineno, PyObject *name) { + PyObject *replace = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; + replace = PyObject_GetAttrString(code, "replace"); + if (likely(replace)) { + PyObject *result; + result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); + Py_DECREF(replace); + return result; + } + PyErr_Clear(); + #if __PYX_LIMITED_VERSION_HEX < 0x030780000 + { + PyObject *compiled = NULL, *result = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; + compiled = Py_CompileString( + "out = type(code)(\n" + " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" + " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" + " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" + " code.co_lnotab)\n", "", Py_file_input); + if (!compiled) return NULL; + result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); + Py_DECREF(compiled); + if (!result) PyErr_Print(); + Py_DECREF(result); + result = PyDict_GetItemString(scratch_dict, "out"); + if (result) Py_INCREF(result); + return result; + } + #else + return NULL; + #endif +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; + PyObject *replace = NULL, *getframe = NULL, *frame = NULL; + PyObject *exc_type, *exc_value, *exc_traceback; + int success = 0; + if (c_line) { + (void) __pyx_cfilenm; + (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); + } + PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); + code_object = Py_CompileString("_getframe()", filename, Py_eval_input); + if (unlikely(!code_object)) goto bad; + py_py_line = PyLong_FromLong(py_line); + if (unlikely(!py_py_line)) goto bad; + py_funcname = PyUnicode_FromString(funcname); + if (unlikely(!py_funcname)) goto bad; + dict = PyDict_New(); + if (unlikely(!dict)) goto bad; + { + PyObject *old_code_object = code_object; + code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); + Py_DECREF(old_code_object); + } + if (unlikely(!code_object)) goto bad; + getframe = PySys_GetObject("_getframe"); + if (unlikely(!getframe)) goto bad; + if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; + frame = PyEval_EvalCode(code_object, dict, dict); + if (unlikely(!frame) || frame == Py_None) goto bad; + success = 1; + bad: + PyErr_Restore(exc_type, exc_value, exc_traceback); + Py_XDECREF(code_object); + Py_XDECREF(py_py_line); + Py_XDECREF(py_funcname); + Py_XDECREF(dict); + Py_XDECREF(replace); + if (success) { + PyTraceBack_Here( + (struct _frame*)frame); + } + Py_XDECREF(frame); +} +#else +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject *ptype, *pvalue, *ptraceback; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) { + /* If the code object creation fails, then we should clear the + fetched exception references and propagate the new exception */ + Py_XDECREF(ptype); + Py_XDECREF(pvalue); + Py_XDECREF(ptraceback); + goto bad; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} +#endif + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(long) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(long) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(long) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + long val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (long) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (long) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (long) -1; + } else { + stepval = v; + } + v = NULL; + val = (long) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((long) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((long) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (long) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const unsigned int neg_one = (unsigned int) -1, const_zero = (unsigned int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(unsigned int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(unsigned int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(unsigned int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(unsigned int), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(unsigned int)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG) -1, const_zero = (unsigned PY_LONG_LONG) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(unsigned PY_LONG_LONG) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(unsigned PY_LONG_LONG) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(unsigned PY_LONG_LONG), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(unsigned PY_LONG_LONG)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(int) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(int) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(int) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + int val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (int) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (int) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (int) -1; + } else { + stepval = v; + } + v = NULL; + val = (int) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((int) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((int) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (int) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (cls == a || cls == b) return 1; + mro = cls->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + PyObject *base = PyTuple_GET_ITEM(mro, i); + if (base == (PyObject *)a || base == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + if (exc_type1) { + return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); + } else { + return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030B00A4 + return Py_Version & ~0xFFUL; +#else + const char* rt_version = Py_GetVersion(); + unsigned long version = 0; + unsigned long factor = 0x01000000UL; + unsigned int digit = 0; + int i = 0; + while (factor) { + while ('0' <= rt_version[i] && rt_version[i] <= '9') { + digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); + ++i; + } + version += factor * digit; + if (rt_version[i] != '.') + break; + digit = 0; + factor >>= 8; + ++i; + } + return version; +#endif +} +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { + const unsigned long MAJOR_MINOR = 0xFFFF0000UL; + if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) + return 0; + if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) + return 1; + { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compile time Python version %d.%d " + "of module '%.100s' " + "%s " + "runtime version %d.%d", + (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), + __Pyx_MODULE_NAME, + (allow_newer) ? "was newer than" : "does not match", + (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) + ); + return PyErr_WarnEx(NULL, message, 1); + } +} + +/* FunctionImport */ +#ifndef __PYX_HAVE_RT_ImportFunction_3_0_11 +#define __PYX_HAVE_RT_ImportFunction_3_0_11 +static int __Pyx_ImportFunction_3_0_11(PyObject *module, const char *funcname, void (**f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, funcname); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C function %.200s", + PyModule_GetName(module), funcname); + goto bad; + } + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); + goto bad; + } + tmp.p = PyCapsule_GetPointer(cobj, sig); + *f = tmp.fp; + if (!(*f)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/* InitStrings */ +#if PY_MAJOR_VERSION >= 3 +static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { + if (t.is_unicode | t.is_str) { + if (t.intern) { + *str = PyUnicode_InternFromString(t.s); + } else if (t.encoding) { + *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); + } else { + *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); + } + } else { + *str = PyBytes_FromStringAndSize(t.s, t.n - 1); + } + if (!*str) + return -1; + if (PyObject_Hash(*str) == -1) + return -1; + return 0; +} +#endif +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION >= 3 + __Pyx_InitString(*t, t->p); + #else + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + #endif + ++t; + } + return 0; +} + +#include +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { + size_t len = strlen(s); + if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { + PyErr_SetString(PyExc_OverflowError, "byte string is too long"); + return -1; + } + return (Py_ssize_t) len; +} +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return __Pyx_PyUnicode_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return PyByteArray_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { + __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " + "The ability to return an instance of a strict subclass of int is deprecated, " + "and may be removed in a future version of Python.", + result_type_name)) { + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; + } + __Pyx_DECREF_TypeName(result_type_name); + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", + type_name, type_name, result_type_name); + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(__Pyx_PyLong_IsCompact(b))) { + return __Pyx_PyLong_CompactValue(b); + } else { + const digit* digits = __Pyx_PyLong_Digits(b); + const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +/* #### Code section: utility_code_pragmas_end ### */ +#ifdef _MSC_VER +#pragma warning( pop ) +#endif + + + +/* #### Code section: end ### */ +#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 6d409a9fb7e..7ccef84e0d5 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -68,6 +68,7 @@ from ._orm_types import DMLStrategyArgument from ._orm_types import SynchronizeSessionArgument from ._typing import _CLE + from .compiler import SQLCompiler from .elements import BindParameter from .elements import ClauseList from .elements import ColumnClause # noqa @@ -657,7 +658,9 @@ class CompileState: _ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] @classmethod - def create_for_statement(cls, statement, compiler, **kw): + def create_for_statement( + cls, statement: Executable, compiler: SQLCompiler, **kw: Any + ) -> CompileState: # factory construction. if statement._propagate_attrs: diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index fde503aaf9b..cd1dc34e0a1 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -298,8 +298,7 @@ def compile( if bind: dialect = bind.dialect elif self.stringify_dialect == "default": - default = util.preloaded.engine_default - dialect = default.StrCompileDialect() + dialect = self._default_dialect() else: url = util.preloaded.engine_url dialect = url.URL.create( @@ -308,6 +307,10 @@ def compile( return self._compiler(dialect, **kw) + def _default_dialect(self): + default = util.preloaded.engine_default + return default.StrCompileDialect() + def _compiler(self, dialect: Dialect, **kw: Any) -> Compiled: """Return a compiler appropriate for this ClauseElement, given a Dialect.""" @@ -404,6 +407,10 @@ def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self: self._propagate_attrs = util.immutabledict(values) return self + def _default_compiler(self) -> SQLCompiler: + dialect = self._default_dialect() + return dialect.statement_compiler(dialect, self) # type: ignore + def _clone(self, **kw: Any) -> Self: """Create a shallow copy of this ClauseElement. diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 5db1e729e7a..d137ab504ea 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -4694,7 +4694,7 @@ def get_plugin_class( def __init__( self, statement: Select[Any], - compiler: Optional[SQLCompiler], + compiler: SQLCompiler, **kw: Any, ): self.statement = statement @@ -5742,8 +5742,9 @@ def get_final_froms(self) -> Sequence[FromClause]: :attr:`_sql.Select.columns_clause_froms` """ + compiler = self._default_compiler() - return self._compile_state_factory(self, None)._get_display_froms() + return self._compile_state_factory(self, compiler)._get_display_froms() @property @util.deprecated( diff --git a/lib/sqlalchemy/util/_collections_cy.c b/lib/sqlalchemy/util/_collections_cy.c new file mode 100644 index 00000000000..3753b20a2fe --- /dev/null +++ b/lib/sqlalchemy/util/_collections_cy.c @@ -0,0 +1,24882 @@ +/* Generated by Cython 3.0.11 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "name": "sqlalchemy.util._collections_cy", + "sources": [ + "lib/sqlalchemy/util/_collections_cy.py" + ] + }, + "module_name": "sqlalchemy.util._collections_cy" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#if defined(CYTHON_LIMITED_API) && 0 + #ifndef Py_LIMITED_API + #if CYTHON_LIMITED_API+0 > 0x03030000 + #define Py_LIMITED_API CYTHON_LIMITED_API + #else + #define Py_LIMITED_API 0x03030000 + #endif + #endif +#endif + +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.7+ or Python 3.3+. +#else +#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API +#define __PYX_EXTRA_ABI_MODULE_NAME "limited" +#else +#define __PYX_EXTRA_ABI_MODULE_NAME "" +#endif +#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME +#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI +#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." +#define CYTHON_HEX_VERSION 0x03000BF0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #define HAVE_LONG_LONG +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX +#if defined(GRAALVM_PYTHON) + /* For very preliminary testing purposes. Most variables are set the same as PyPy. + The existence of this section does not imply that anything works or is even tested */ + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 1 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(PYPY_VERSION) + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #if PY_VERSION_HEX < 0x03090000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(CYTHON_LIMITED_API) + #ifdef Py_LIMITED_API + #undef __PYX_LIMITED_VERSION_HEX + #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API + #endif + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 1 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_CLINE_IN_TRACEBACK + #define CYTHON_CLINE_IN_TRACEBACK 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 1 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #endif + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 1 + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #ifndef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) + #endif + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #if PY_VERSION_HEX < 0x030400a1 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #elif !defined(CYTHON_USE_TP_FINALIZE) + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #if PY_VERSION_HEX < 0x030600B1 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #elif !defined(CYTHON_USE_DICT_VERSIONS) + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) + #endif + #if PY_VERSION_HEX < 0x030700A3 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK 1 + #endif + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if !defined(CYTHON_VECTORCALL) +#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) +#endif +#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(maybe_unused) + #define CYTHON_UNUSED [[maybe_unused]] + #endif + #endif + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR + #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_USE_CPP_STD_MOVE + #if defined(__cplusplus) && (\ + __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) + #define CYTHON_USE_CPP_STD_MOVE 1 + #else + #define CYTHON_USE_CPP_STD_MOVE 0 + #endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + #endif + #endif + #if _MSC_VER < 1300 + #ifdef _WIN64 + typedef unsigned long long __pyx_uintptr_t; + #else + typedef unsigned int __pyx_uintptr_t; + #endif + #else + #ifdef _WIN64 + typedef unsigned __int64 __pyx_uintptr_t; + #else + typedef unsigned __int32 __pyx_uintptr_t; + #endif + #endif +#else + #include + typedef uintptr_t __pyx_uintptr_t; +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif +#ifdef __cplusplus + template + struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; + #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) +#else + #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) +#endif +#if CYTHON_COMPILING_IN_PYPY == 1 + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) +#else + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) +#endif +#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_DefaultClassType PyClass_Type + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if CYTHON_COMPILING_IN_LIMITED_API + static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *exception_table = NULL; + PyObject *types_module=NULL, *code_type=NULL, *result=NULL; + #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 + PyObject *version_info; + PyObject *py_minor_version = NULL; + #endif + long minor_version = 0; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 + minor_version = 11; + #else + if (!(version_info = PySys_GetObject("version_info"))) goto end; + if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; + minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); + if (minor_version == -1 && PyErr_Occurred()) goto end; + #endif + if (!(types_module = PyImport_ImportModule("types"))) goto end; + if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; + if (minor_version <= 7) { + (void)p; + result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else if (minor_version <= 10) { + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else { + if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); + } + end: + Py_XDECREF(code_type); + Py_XDECREF(exception_table); + Py_XDECREF(types_module); + if (type) { + PyErr_Restore(type, value, traceback); + } + return result; + } + #ifndef CO_OPTIMIZED + #define CO_OPTIMIZED 0x0001 + #endif + #ifndef CO_NEWLOCALS + #define CO_NEWLOCALS 0x0002 + #endif + #ifndef CO_VARARGS + #define CO_VARARGS 0x0004 + #endif + #ifndef CO_VARKEYWORDS + #define CO_VARKEYWORDS 0x0008 + #endif + #ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x0200 + #endif + #ifndef CO_GENERATOR + #define CO_GENERATOR 0x0020 + #endif + #ifndef CO_COROUTINE + #define CO_COROUTINE 0x0080 + #endif +#elif PY_VERSION_HEX >= 0x030B0000 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyCodeObject *result; + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); + if (!empty_bytes) return NULL; + result = + #if PY_VERSION_HEX >= 0x030C0000 + PyUnstable_Code_NewWithPosOnlyArgs + #else + PyCode_NewWithPosOnlyArgs + #endif + (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); + Py_DECREF(empty_bytes); + return result; + } +#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#endif +#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) + #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) +#else + #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) + #define __Pyx_Py_Is(x, y) Py_Is(x, y) +#else + #define __Pyx_Py_Is(x, y) ((x) == (y)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) + #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) +#else + #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) + #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) +#else + #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) + #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) +#else + #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) +#endif +#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) +#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) +#else + #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) +#endif +#ifndef CO_COROUTINE + #define CO_COROUTINE 0x80 +#endif +#ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x200 +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef Py_TPFLAGS_SEQUENCE + #define Py_TPFLAGS_SEQUENCE 0 +#endif +#ifndef Py_TPFLAGS_MAPPING + #define Py_TPFLAGS_MAPPING 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif +#endif +#if CYTHON_METH_FASTCALL + #define __Pyx_METH_FASTCALL METH_FASTCALL + #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast + #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords +#else + #define __Pyx_METH_FASTCALL METH_VARARGS + #define __Pyx_PyCFunction_FastCall PyCFunction + #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords +#endif +#if CYTHON_VECTORCALL + #define __pyx_vectorcallfunc vectorcallfunc + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET + #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) +#elif CYTHON_BACKPORT_VECTORCALL + typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames); + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) +#else + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) +#endif +#if PY_MAJOR_VERSION >= 0x030900B1 +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) +#else +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) +#endif +#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) +#elif !CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) +#endif +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) +static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { + return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; +} +#endif +static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { +#if CYTHON_COMPILING_IN_LIMITED_API + return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; +#else + return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +#endif +} +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) +#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) + typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); +#else + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) + #define __Pyx_PyCMethod PyCMethod +#endif +#ifndef METH_METHOD + #define METH_METHOD 0x200 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyThreadState_Current PyThreadState_Get() +#elif !CYTHON_FAST_THREAD_STATE + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) +{ + void *result; + result = PyModule_GetState(op); + if (!result) + Py_FatalError("Couldn't find the module state"); + return result; +} +#endif +#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) +#else + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if PY_MAJOR_VERSION < 3 + #if CYTHON_COMPILING_IN_PYPY + #if PYPY_VERSION_NUM < 0x07030600 + #if defined(__cplusplus) && __cplusplus >= 201402L + [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] + #elif defined(__GNUC__) || defined(__clang__) + __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) + #elif defined(_MSC_VER) + __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) + #endif + static CYTHON_INLINE int PyGILState_Check(void) { + return 0; + } + #else // PYPY_VERSION_NUM < 0x07030600 + #endif // PYPY_VERSION_NUM < 0x07030600 + #else + static CYTHON_INLINE int PyGILState_Check(void) { + PyThreadState * tstate = _PyThreadState_Current; + return tstate && (tstate == PyGILState_GetThisThreadState()); + } + #endif +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { + PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); + if (res == NULL) PyErr_Clear(); + return res; +} +#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) +#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#else +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { +#if CYTHON_COMPILING_IN_PYPY + return PyDict_GetItem(dict, name); +#else + PyDictEntry *ep; + PyDictObject *mp = (PyDictObject*) dict; + long hash = ((PyStringObject *) name)->ob_shash; + assert(hash != -1); + ep = (mp->ma_lookup)(mp, name, hash); + if (ep == NULL) { + return NULL; + } + return ep->me_value; +#endif +} +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#endif +#if CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) + #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) + #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) +#else + #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) + #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) + #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) +#else + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) +#endif +#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 +#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ + assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ + PyObject_GC_Del(obj);\ + Py_DECREF(type);\ +} +#else +#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) + #define __Pyx_PyUnicode_DATA(u) ((void*)u) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) +#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_READY(op) (0) + #else + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #else + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #if !defined(PyUnicode_DecodeUnicodeEscape) + #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) + #endif + #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) + #undef PyUnicode_Contains + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) + #endif + #if !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) + #endif + #if !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) + #endif +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#if CYTHON_COMPILING_IN_CPYTHON + #define __Pyx_PySequence_ListKeepNew(obj)\ + (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) +#else + #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) +#else + #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) + #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) +#endif +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) +#else + static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { + PyObject *module = PyImport_AddModule(name); + Py_XINCREF(module); + return module; + } +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define __Pyx_Py3Int_Check(op) PyLong_Check(op) + #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#else + #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) + #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) + #if !defined(_USE_MATH_DEFINES) + #define _USE_MATH_DEFINES + #endif +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifdef CYTHON_EXTERN_C + #undef __PYX_EXTERN_C + #define __PYX_EXTERN_C CYTHON_EXTERN_C +#elif defined(__PYX_EXTERN_C) + #ifdef _MSC_VER + #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") + #else + #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. + #endif +#else + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__sqlalchemy__util___collections_cy +#define __PYX_HAVE_API__sqlalchemy__util___collections_cy +/* Early includes */ +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_VERSION_HEX >= 0x030C00A7 + #ifndef _PyLong_SIGN_MASK + #define _PyLong_SIGN_MASK 3 + #endif + #ifndef _PyLong_NON_SIZE_BITS + #define _PyLong_NON_SIZE_BITS 3 + #endif + #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) + #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) + #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) + #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) + #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_SignedDigitCount(x)\ + ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) + #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) + #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) + #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) + #else + #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) + #endif + typedef Py_ssize_t __Pyx_compact_pylong; + typedef size_t __Pyx_compact_upylong; + #else + #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) + #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) + #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) + #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) + #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) + #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) + #define __Pyx_PyLong_CompactValue(x)\ + ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) + typedef sdigit __Pyx_compact_pylong; + typedef digit __Pyx_compact_upylong; + #endif + #if PY_VERSION_HEX >= 0x030C00A5 + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) + #else + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) + #endif +#endif +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +#include +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = (char) c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#include +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +#if !CYTHON_USE_MODULE_STATE +static PyObject *__pyx_m = NULL; +#endif +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm = __FILE__; +static const char *__pyx_filename; + +/* #### Code section: filename_table ### */ + +static const char *__pyx_f[] = { + "lib/sqlalchemy/util/_collections_cy.py", + "", +}; +/* #### Code section: utility_code_proto_before_types ### */ +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + +/* #### Code section: numeric_typedefs ### */ +/* #### Code section: complex_type_declarations ### */ +/* #### Code section: type_declarations ### */ + +/*--- Type declarations ---*/ +struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet; +struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet; + +/* "sqlalchemy/util/_collections_cy.py":80 + * + * @cython.cclass + * class OrderedSet(Set[_T]): # <<<<<<<<<<<<<< + * """A set implementation that maintains insertion order.""" + * + */ +struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet { + PySetObject __pyx_base; + struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_vtab; + PyObject *_list; +}; + + +/* "sqlalchemy/util/_collections_cy.py":274 + * + * @cython.cclass + * class IdentitySet: # <<<<<<<<<<<<<< + * """A set that considers only object id() for uniqueness. + * + */ +struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet { + PyObject_HEAD + struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_vtab; + PyObject *_members; +}; + + + +/* "sqlalchemy/util/_collections_cy.py":80 + * + * @cython.cclass + * class OrderedSet(Set[_T]): # <<<<<<<<<<<<<< + * """A set implementation that maintains insertion order.""" + * + */ + +struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *(*_from_list)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *); + PyObject *(*symmetric_difference)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *, int __pyx_skip_dispatch); + PyObject *(*symmetric_difference_update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *, int __pyx_skip_dispatch); +}; +static struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_OrderedSet; +static CYTHON_INLINE struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *); + + +/* "sqlalchemy/util/_collections_cy.py":274 + * + * @cython.cclass + * class IdentitySet: # <<<<<<<<<<<<<< + * """A set that considers only object id() for uniqueness. + * + */ + +struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet { + PyObject *(*remove)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + int (*issubset)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + int (*issuperset)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*__pyx_union)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + PyObject *(*update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*difference)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + PyObject *(*difference_update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*intersection)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + PyObject *(*intersection_update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*symmetric_difference)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + PyObject *(*symmetric_difference_update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*copy)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, int __pyx_skip_dispatch); +}; +static struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_IdentitySet; +/* #### Code section: utility_code_proto ### */ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, Py_ssize_t); + void (*DECREF)(void*, PyObject*, Py_ssize_t); + void (*GOTREF)(void*, PyObject*, Py_ssize_t); + void (*GIVEREF)(void*, PyObject*, Py_ssize_t); + void* (*SetupContext)(const char*, Py_ssize_t, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + } + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) + #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() +#endif + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContextNogil() + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_Py_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; Py_XDECREF(tmp);\ + } while (0) +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#if PY_VERSION_HEX >= 0x030C00A6 +#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) +#else +#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) +#endif +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) +#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* TupleAndListFromArray.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); +static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); +#endif + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* fastcall.proto */ +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) +#elif CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) +#else + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) +#endif +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) + #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) +#else + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) +#define __Pyx_KwValues_VARARGS(args, nargs) NULL +#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) +#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) +#if CYTHON_METH_FASTCALL + #define __Pyx_Arg_FASTCALL(args, i) args[i] + #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) + #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) + static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + #else + #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) + #endif + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) +#else + #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS + #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS + #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS + #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS + #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS + #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) + #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) +#else +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) +#endif + +/* pyfrozenset_new.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it); + +/* PySetContains.proto */ +static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq); + +/* ListCompAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len)) { + Py_INCREF(x); + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 + L->ob_item[len] = x; + #else + PyList_SET_ITEM(list, len, x); + #endif + __Pyx_SET_SIZE(list, len + 1); + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x) +#endif + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, + const char* function_name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#if !CYTHON_VECTORCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if !CYTHON_VECTORCALL +#if PY_VERSION_HEX >= 0x03080000 + #include "frameobject.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif + #define __Pxy_PyFrame_Initialize_Offsets() + #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) +#else + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif +#endif +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectFastCall.proto */ +#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* ListAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { + Py_INCREF(x); + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 + L->ob_item[len] = x; + #else + PyList_SET_ITEM(list, len, x); + #endif + __Pyx_SET_SIZE(list, len + 1); + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) +#endif + +/* py_set_remove.proto */ +static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key); + +/* PyObjectCallNoArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod0.proto */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); + +/* pop.proto */ +static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L); +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L); +#define __Pyx_PyObject_Pop(L) (likely(PyList_CheckExact(L)) ?\ + __Pyx_PyList_Pop(L) : __Pyx__PyObject_Pop(L)) +#else +#define __Pyx_PyList_Pop(L) __Pyx__PyObject_Pop(L) +#define __Pyx_PyObject_Pop(L) __Pyx__PyObject_Pop(L) +#endif + +/* UnpackUnboundCMethod.proto */ +typedef struct { + PyObject *type; + PyObject **method_name; + PyCFunction func; + PyObject *method; + int flag; +} __Pyx_CachedCFunction; + +/* CallUnboundCMethod0.proto */ +static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CallUnboundCMethod0(cfunc, self)\ + (likely((cfunc)->func) ?\ + (likely((cfunc)->flag == METH_NOARGS) ? (*((cfunc)->func))(self, NULL) :\ + (PY_VERSION_HEX >= 0x030600B1 && likely((cfunc)->flag == METH_FASTCALL) ?\ + (PY_VERSION_HEX >= 0x030700A0 ?\ + (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0) :\ + (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0, NULL)) :\ + (PY_VERSION_HEX >= 0x030700A0 && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ?\ + (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0, NULL) :\ + (likely((cfunc)->flag == (METH_VARARGS | METH_KEYWORDS)) ? ((*(PyCFunctionWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, __pyx_empty_tuple, NULL)) :\ + ((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, __pyx_empty_tuple) :\ + __Pyx__CallUnboundCMethod0(cfunc, self)))))) :\ + __Pyx__CallUnboundCMethod0(cfunc, self)) +#else +#define __Pyx_CallUnboundCMethod0(cfunc, self) __Pyx__CallUnboundCMethod0(cfunc, self) +#endif + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK && CYTHON_FAST_THREAD_STATE +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* PyObjectFormatAndDecref.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatSimpleAndDecref(PyObject* s, PyObject* f); +static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObject* f); + +/* JoinPyUnicode.proto */ +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char); + +/* RaiseUnexpectedTypeError.proto */ +static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) do {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* PyDictContains.proto */ +static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { + int result = PyDict_Contains(dict, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* py_dict_clear.proto */ +#define __Pyx_PyDict_Clear(d) (PyDict_Clear(d), 0) + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* py_dict_keys.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyDict_Keys(PyObject* d); + +/* CallUnboundCMethod1.proto */ +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); +#else +#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) +#endif + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* RaiseNoneIterError.proto */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/* UnpackTupleError.proto */ +static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); + +/* UnpackTuple2.proto */ +#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple)\ + (likely(is_tuple || PyTuple_Check(tuple)) ?\ + (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ?\ + __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) :\ + (__Pyx_UnpackTupleError(tuple, 2), -1)) :\ + __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple)) +static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( + PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple); +static int __Pyx_unpack_tuple2_generic( + PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple); + +/* dict_iter.proto */ +static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name, + Py_ssize_t* p_orig_length, int* p_is_dict); +static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos, + PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict); + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* py_dict_values.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d); + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* PyTrashcan.proto */ +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03080000 +#define __Pyx_TRASHCAN_BEGIN Py_TRASHCAN_BEGIN +#define __Pyx_TRASHCAN_END Py_TRASHCAN_END +#elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x02070400 +#define __Pyx_TRASHCAN_BEGIN_CONDITION(op, cond)\ + do {\ + PyThreadState *_tstate = NULL;\ + if (cond) {\ + _tstate = PyThreadState_GET();\ + if (_tstate->trash_delete_nesting >= PyTrash_UNWIND_LEVEL) {\ + _PyTrash_thread_deposit_object((PyObject*)(op));\ + break;\ + }\ + ++_tstate->trash_delete_nesting;\ + } +#define __Pyx_TRASHCAN_END\ + if (_tstate) {\ + --_tstate->trash_delete_nesting;\ + if (_tstate->trash_delete_later && _tstate->trash_delete_nesting <= 0)\ + _PyTrash_thread_destroy_chain();\ + }\ + } while (0); +#define __Pyx_TRASHCAN_BEGIN(op, dealloc) __Pyx_TRASHCAN_BEGIN_CONDITION(op,\ + __Pyx_PyObject_GetSlot(op, tp_dealloc, destructor) == (destructor)(dealloc)) +#else +#define __Pyx_TRASHCAN_BEGIN(op, dealloc) +#define __Pyx_TRASHCAN_END +#endif + +/* IncludeStructmemberH.proto */ +#include + +/* FixUpExtensionType.proto */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); +#endif + +/* FormatTypeName.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +typedef PyObject *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%U" +static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); +#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) +#else +typedef const char *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%.200s" +#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) +#define __Pyx_DECREF_TypeName(obj) +#endif + +/* ValidateExternBase.proto */ +static int __Pyx_validate_extern_base(PyTypeObject *base); + +/* ValidateBasesTuple.proto */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); +#endif + +/* PyType_Ready.proto */ +CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyTypeObject* typeptr , void* vtable); + +/* GetVTable.proto */ +static void* __Pyx_GetVtable(PyTypeObject *type); + +/* MergeVTables.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_MergeVtables(PyTypeObject *type); +#endif + +/* SetupReduce.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_setup_reduce(PyObject* type_obj); +#endif + +/* FetchSharedCythonModule.proto */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void); + +/* FetchCommonType.proto */ +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); +#else +static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); +#endif + +/* PyMethodNew.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + typesModule = PyImport_ImportModule("types"); + if (!typesModule) return NULL; + methodType = PyObject_GetAttrString(typesModule, "MethodType"); + Py_DECREF(typesModule); + if (!methodType) return NULL; + result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); + Py_DECREF(methodType); + return result; +} +#elif PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + return PyMethod_New(func, self); +} +#else + #define __Pyx_PyMethod_New PyMethod_New +#endif + +/* PyVectorcallFastCallDict.proto */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); +#endif + +/* CythonFunctionShared.proto */ +#define __Pyx_CyFunction_USED +#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 +#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 +#define __Pyx_CYFUNCTION_CCLASS 0x04 +#define __Pyx_CYFUNCTION_COROUTINE 0x08 +#define __Pyx_CyFunction_GetClosure(f)\ + (((__pyx_CyFunctionObject *) (f))->func_closure) +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_CyFunction_GetClassObj(f)\ + (((__pyx_CyFunctionObject *) (f))->func_classobj) +#else + #define __Pyx_CyFunction_GetClassObj(f)\ + ((PyObject*) ((PyCMethodObject *) (f))->mm_class) +#endif +#define __Pyx_CyFunction_SetClassObj(f, classobj)\ + __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) +#define __Pyx_CyFunction_Defaults(type, f)\ + ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) +#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ + ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) +typedef struct { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject_HEAD + PyObject *func; +#elif PY_VERSION_HEX < 0x030900B1 + PyCFunctionObject func; +#else + PyCMethodObject func; +#endif +#if CYTHON_BACKPORT_VECTORCALL + __pyx_vectorcallfunc func_vectorcall; +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_weakreflist; +#endif + PyObject *func_dict; + PyObject *func_name; + PyObject *func_qualname; + PyObject *func_doc; + PyObject *func_globals; + PyObject *func_code; + PyObject *func_closure; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_classobj; +#endif + void *defaults; + int defaults_pyobjects; + size_t defaults_size; + int flags; + PyObject *defaults_tuple; + PyObject *defaults_kwdict; + PyObject *(*defaults_getter)(PyObject *); + PyObject *func_annotations; + PyObject *func_is_coroutine; +} __pyx_CyFunctionObject; +#undef __Pyx_CyOrPyCFunction_Check +#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) +#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) +#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); +#undef __Pyx_IsSameCFunction +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, + size_t size, + int pyobjects); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, + PyObject *tuple); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, + PyObject *dict); +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, + PyObject *dict); +static int __pyx_CyFunction_init(PyObject *module); +#if CYTHON_METH_FASTCALL +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +#if CYTHON_BACKPORT_VECTORCALL +#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) +#else +#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) +#endif +#endif + +/* CythonFunction.proto */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); + +/* ClassMethod.proto */ +#include "descrobject.h" +CYTHON_UNUSED static PyObject* __Pyx_Method_ClassMethod(PyObject *method); + +/* GetNameInClass.proto */ +#define __Pyx_GetNameInClass(var, nmspace, name) (var) = __Pyx__GetNameInClass(nmspace, name) +static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); +#endif + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static unsigned long __Pyx_get_runtime_version(void); +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); + +/* FunctionExport.proto */ +static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +/* #### Code section: module_declarations ### */ +static CYTHON_INLINE struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_new_list); /* proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_skip_dispatch); /* proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_skip_dispatch); /* proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value, int __pyx_skip_dispatch); /* proto*/ +static int __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issubset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static int __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issuperset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, int __pyx_skip_dispatch); /* proto*/ + +/* Module declarations from "cython" */ + +/* Module declarations from "sqlalchemy.util._collections_cy" */ +static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(PyObject *); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_unique_list(PyObject *, int __pyx_skip_dispatch); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_OrderedSet__set_state(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_IdentitySet__set_state(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *); /*proto*/ +/* #### Code section: typeinfo ### */ +/* #### Code section: before_global_var ### */ +#define __Pyx_MODULE_NAME "sqlalchemy.util._collections_cy" +extern int __pyx_module_is_main_sqlalchemy__util___collections_cy; +int __pyx_module_is_main_sqlalchemy__util___collections_cy = 0; + +/* Implementation of "sqlalchemy.util._collections_cy" */ +/* #### Code section: global_var ### */ +static PyObject *__pyx_builtin_IndexError; +static PyObject *__pyx_builtin_KeyError; +static PyObject *__pyx_builtin_NotImplemented; +static PyObject *__pyx_builtin_TypeError; +/* #### Code section: string_decls ### */ +static const char __pyx_k_S[] = "_S"; +static const char __pyx_k_T[] = "_T"; +static const char __pyx_k_a[] = "a"; +static const char __pyx_k_d[] = "d"; +static const char __pyx_k__2[] = "("; +static const char __pyx_k__3[] = ")"; +static const char __pyx_k__6[] = "."; +static const char __pyx_k__8[] = "?"; +static const char __pyx_k_gc[] = "gc"; +static const char __pyx_k_Any[] = "Any"; +static const char __pyx_k_Set[] = "Set"; +static const char __pyx_k_add[] = "add"; +static const char __pyx_k_cls[] = "cls"; +static const char __pyx_k_key[] = "key"; +static const char __pyx_k_len[] = "__len__"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_pop[] = "pop"; +static const char __pyx_k_pos[] = "pos"; +static const char __pyx_k_seq[] = "seq"; +static const char __pyx_k_str[] = "__str__"; +static const char __pyx_k_Dict[] = "Dict"; +static const char __pyx_k_List[] = "List"; +static const char __pyx_k_None[] = "None"; +static const char __pyx_k_Self[] = "Self"; +static const char __pyx_k_bool[] = "bool"; +static const char __pyx_k_copy[] = "copy"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_init[] = "__init__"; +static const char __pyx_k_keys[] = "keys"; +static const char __pyx_k_list[] = "_list"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_pair[] = "pair"; +static const char __pyx_k_repr[] = "__repr__"; +static const char __pyx_k_self[] = "self"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_Tuple[] = "Tuple"; +static const char __pyx_k_Union[] = "Union"; +static const char __pyx_k_class[] = "__class__"; +static const char __pyx_k_clear[] = "clear"; +static const char __pyx_k_items[] = "items"; +static const char __pyx_k_other[] = "other"; +static const char __pyx_k_slots[] = "__slots__"; +static const char __pyx_k_state[] = "state"; +static const char __pyx_k_union[] = "union"; +static const char __pyx_k_value[] = "value"; +static const char __pyx_k_copy_2[] = "__copy__"; +static const char __pyx_k_dict_2[] = "_dict"; +static const char __pyx_k_enable[] = "enable"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_insert[] = "insert"; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_remove[] = "remove"; +static const char __pyx_k_result[] = "result"; +static const char __pyx_k_return[] = "return"; +static const char __pyx_k_typing[] = "typing"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_values[] = "values"; +static const char __pyx_k_TypeVar[] = "TypeVar"; +static const char __pyx_k_disable[] = "disable"; +static const char __pyx_k_discard[] = "discard"; +static const char __pyx_k_element[] = "element"; +static const char __pyx_k_members[] = "_members"; +static const char __pyx_k_popitem[] = "popitem"; +static const char __pyx_k_Hashable[] = "Hashable"; +static const char __pyx_k_Iterable[] = "Iterable"; +static const char __pyx_k_Iterator[] = "Iterator"; +static const char __pyx_k_KeyError[] = "KeyError"; +static const char __pyx_k_NoReturn[] = "NoReturn"; +static const char __pyx_k_Optional[] = "Optional"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_issubset[] = "issubset"; +static const char __pyx_k_iterable[] = "iterable"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_isenabled[] = "isenabled"; +static const char __pyx_k_iterables[] = "iterables"; +static const char __pyx_k_other_set[] = "other_set"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_type_Self[] = "type[Self]"; +static const char __pyx_k_IndexError[] = "IndexError"; +static const char __pyx_k_OrderedSet[] = "OrderedSet"; +static const char __pyx_k_difference[] = "difference"; +static const char __pyx_k_issuperset[] = "issuperset"; +static const char __pyx_k_pyx_result[] = "__pyx_result"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_AbstractSet[] = "AbstractSet"; +static const char __pyx_k_IdentitySet[] = "IdentitySet"; +static const char __pyx_k_Iterable__S[] = "Iterable[_S]"; +static const char __pyx_k_Iterable__T[] = "Iterable[_T]"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_is_compiled[] = "_is_compiled"; +static const char __pyx_k_unique_list[] = "unique_list"; +static const char __pyx_k_Iterable_Any[] = "Iterable[Any]"; +static const char __pyx_k_intersection[] = "intersection"; +static const char __pyx_k_is_coroutine[] = "_is_coroutine"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_stringsource[] = ""; +static const char __pyx_k_use_setstate[] = "use_setstate"; +static const char __pyx_k_OrderedSet__T[] = "OrderedSet[_T]"; +static const char __pyx_k_class_getitem[] = "__class_getitem__"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_NotImplemented[] = "NotImplemented"; +static const char __pyx_k_OrderedSet_add[] = "OrderedSet.add"; +static const char __pyx_k_OrderedSet_pop[] = "OrderedSet.pop"; +static const char __pyx_k_IdentitySet_add[] = "IdentitySet.add"; +static const char __pyx_k_IdentitySet_pop[] = "IdentitySet.pop"; +static const char __pyx_k_OrderedSet_copy[] = "OrderedSet.copy"; +static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_IdentitySet_copy[] = "IdentitySet.copy"; +static const char __pyx_k_OrderedSet_clear[] = "OrderedSet.clear"; +static const char __pyx_k_OrderedSet_union[] = "OrderedSet.union"; +static const char __pyx_k_IdentitySet_clear[] = "IdentitySet.clear"; +static const char __pyx_k_IdentitySet_union[] = "IdentitySet.union"; +static const char __pyx_k_Iterable_Hashable[] = "Iterable[Hashable]"; +static const char __pyx_k_OrderedSet_insert[] = "OrderedSet.insert"; +static const char __pyx_k_OrderedSet_remove[] = "OrderedSet.remove"; +static const char __pyx_k_OrderedSet_update[] = "OrderedSet.update"; +static const char __pyx_k_cython_Py_ssize_t[] = "cython.Py_ssize_t"; +static const char __pyx_k_difference_update[] = "difference_update"; +static const char __pyx_k_IdentitySet___copy[] = "IdentitySet.__copy__"; +static const char __pyx_k_IdentitySet_remove[] = "IdentitySet.remove"; +static const char __pyx_k_IdentitySet_update[] = "IdentitySet.update"; +static const char __pyx_k_OrderedSet_discard[] = "OrderedSet.discard"; +static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_IdentitySet_discard[] = "IdentitySet.discard"; +static const char __pyx_k_intersection_update[] = "intersection_update"; +static const char __pyx_k_IdentitySet_issubset[] = "IdentitySet.issubset"; +static const char __pyx_k_symmetric_difference[] = "symmetric_difference"; +static const char __pyx_k_OrderedSet_difference[] = "OrderedSet.difference"; +static const char __pyx_k_pop_from_an_empty_set[] = "pop from an empty set"; +static const char __pyx_k_IdentitySet_difference[] = "IdentitySet.difference"; +static const char __pyx_k_IdentitySet_issuperset[] = "IdentitySet.issuperset"; +static const char __pyx_k_OrderedSet_Union__T__S[] = "OrderedSet[Union[_T, _S]]"; +static const char __pyx_k_OrderedSet_intersection[] = "OrderedSet.intersection"; +static const char __pyx_k_pyx_unpickle_OrderedSet[] = "__pyx_unpickle_OrderedSet"; +static const char __pyx_k_IdentitySet_intersection[] = "IdentitySet.intersection"; +static const char __pyx_k_pyx_unpickle_IdentitySet[] = "__pyx_unpickle_IdentitySet"; +static const char __pyx_k_OrderedSet___class_getitem[] = "OrderedSet.__class_getitem__"; +static const char __pyx_k_OrderedSet___reduce_cython[] = "OrderedSet.__reduce_cython__"; +static const char __pyx_k_set_objects_are_unhashable[] = "set objects are unhashable"; +static const char __pyx_k_IdentitySet___reduce_cython[] = "IdentitySet.__reduce_cython__"; +static const char __pyx_k_symmetric_difference_update[] = "symmetric_difference_update"; +static const char __pyx_k_OrderedSet___setstate_cython[] = "OrderedSet.__setstate_cython__"; +static const char __pyx_k_OrderedSet_difference_update[] = "OrderedSet.difference_update"; +static const char __pyx_k_IdentitySet___setstate_cython[] = "IdentitySet.__setstate_cython__"; +static const char __pyx_k_IdentitySet_difference_update[] = "IdentitySet.difference_update"; +static const char __pyx_k_OrderedSet_intersection_update[] = "OrderedSet.intersection_update"; +static const char __pyx_k_IdentitySet_intersection_update[] = "IdentitySet.intersection_update"; +static const char __pyx_k_OrderedSet_symmetric_difference[] = "OrderedSet.symmetric_difference"; +static const char __pyx_k_sqlalchemy_util__collections_cy[] = "sqlalchemy.util._collections_cy"; +static const char __pyx_k_IdentitySet_symmetric_difference[] = "IdentitySet.symmetric_difference"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))"; +static const char __pyx_k_lib_sqlalchemy_util__collections[] = "lib/sqlalchemy/util/_collections_cy.py"; +static const char __pyx_k_OrderedSet_symmetric_difference_2[] = "OrderedSet.symmetric_difference_update"; +static const char __pyx_k_IdentitySet_symmetric_difference_2[] = "IdentitySet.symmetric_difference_update"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0_2[] = "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))"; +/* #### Code section: decls ### */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_2unique_list(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_seq); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_2__init__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_d); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_4copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_6add(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_8remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_10pop(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_12insert(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, Py_ssize_t __pyx_v_pos, PyObject *__pyx_v_element); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_14discard(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_16clear(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_18__getitem__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, Py_ssize_t __pyx_v_key); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_20__iter__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_22__add__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_24__repr__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_26update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_iterables); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_28__ior__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_30union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_32__or__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_34intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_36__and__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_38symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_40__xor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_42difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_44__sub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_46intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_48__iand__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_50symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_52__ixor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_54difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_56__isub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_58__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_60__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet___init__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_2add(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_4__contains__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_6remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_8discard(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_10pop(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_12clear(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_14__eq__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_16__ne__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_18issubset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_20__le__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_22__lt__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_24issuperset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_26__ge__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_28__gt__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_30union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_32__or__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_34update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_36__ior__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_38difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_40__sub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_42difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_44__isub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_46intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_48__and__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_50intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_52__iand__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_54symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_56__xor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_58symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_60__ixor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_62copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_64__copy__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static Py_ssize_t __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_66__len__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_68__iter__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static Py_hash_t __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_70__hash__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_72__repr__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_74__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_76__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_4__pyx_unpickle_OrderedSet(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_6__pyx_unpickle_IdentitySet(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_keys = {0, 0, 0, 0, 0}; +static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_update = {0, 0, 0, 0, 0}; +static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_values = {0, 0, 0, 0, 0}; +static __Pyx_CachedCFunction __pyx_umethod_PyList_Type_pop = {0, 0, 0, 0, 0}; +/* #### Code section: late_includes ### */ +/* #### Code section: module_state ### */ +typedef struct { + PyObject *__pyx_d; + PyObject *__pyx_b; + PyObject *__pyx_cython_runtime; + PyObject *__pyx_empty_tuple; + PyObject *__pyx_empty_bytes; + PyObject *__pyx_empty_unicode; + #ifdef __Pyx_CyFunction_USED + PyTypeObject *__pyx_CyFunctionType; + #endif + #ifdef __Pyx_FusedFunction_USED + PyTypeObject *__pyx_FusedFunctionType; + #endif + #ifdef __Pyx_Generator_USED + PyTypeObject *__pyx_GeneratorType; + #endif + #ifdef __Pyx_IterableCoroutine_USED + PyTypeObject *__pyx_IterableCoroutineType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineAwaitType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineType; + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + PyObject *__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet; + PyObject *__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet; + #endif + PyTypeObject *__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet; + PyTypeObject *__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet; + PyObject *__pyx_n_s_AbstractSet; + PyObject *__pyx_n_s_Any; + PyObject *__pyx_n_s_Dict; + PyObject *__pyx_n_s_Hashable; + PyObject *__pyx_n_s_IdentitySet; + PyObject *__pyx_n_s_IdentitySet___copy; + PyObject *__pyx_n_s_IdentitySet___reduce_cython; + PyObject *__pyx_n_s_IdentitySet___setstate_cython; + PyObject *__pyx_n_s_IdentitySet_add; + PyObject *__pyx_n_s_IdentitySet_clear; + PyObject *__pyx_n_s_IdentitySet_copy; + PyObject *__pyx_n_s_IdentitySet_difference; + PyObject *__pyx_n_s_IdentitySet_difference_update; + PyObject *__pyx_n_s_IdentitySet_discard; + PyObject *__pyx_n_s_IdentitySet_intersection; + PyObject *__pyx_n_s_IdentitySet_intersection_update; + PyObject *__pyx_n_s_IdentitySet_issubset; + PyObject *__pyx_n_s_IdentitySet_issuperset; + PyObject *__pyx_n_s_IdentitySet_pop; + PyObject *__pyx_n_s_IdentitySet_remove; + PyObject *__pyx_n_s_IdentitySet_symmetric_difference; + PyObject *__pyx_n_s_IdentitySet_symmetric_difference_2; + PyObject *__pyx_n_s_IdentitySet_union; + PyObject *__pyx_n_s_IdentitySet_update; + PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; + PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2; + PyObject *__pyx_n_s_IndexError; + PyObject *__pyx_n_s_Iterable; + PyObject *__pyx_kp_s_Iterable_Any; + PyObject *__pyx_kp_s_Iterable_Hashable; + PyObject *__pyx_kp_s_Iterable__S; + PyObject *__pyx_kp_s_Iterable__T; + PyObject *__pyx_n_s_Iterator; + PyObject *__pyx_n_s_KeyError; + PyObject *__pyx_n_s_List; + PyObject *__pyx_n_s_NoReturn; + PyObject *__pyx_n_s_None; + PyObject *__pyx_n_s_NotImplemented; + PyObject *__pyx_n_s_Optional; + PyObject *__pyx_n_s_OrderedSet; + PyObject *__pyx_kp_s_OrderedSet_Union__T__S; + PyObject *__pyx_kp_s_OrderedSet__T; + PyObject *__pyx_n_s_OrderedSet___class_getitem; + PyObject *__pyx_n_s_OrderedSet___reduce_cython; + PyObject *__pyx_n_s_OrderedSet___setstate_cython; + PyObject *__pyx_n_s_OrderedSet_add; + PyObject *__pyx_n_s_OrderedSet_clear; + PyObject *__pyx_n_s_OrderedSet_copy; + PyObject *__pyx_n_s_OrderedSet_difference; + PyObject *__pyx_n_s_OrderedSet_difference_update; + PyObject *__pyx_n_s_OrderedSet_discard; + PyObject *__pyx_n_s_OrderedSet_insert; + PyObject *__pyx_n_s_OrderedSet_intersection; + PyObject *__pyx_n_s_OrderedSet_intersection_update; + PyObject *__pyx_n_s_OrderedSet_pop; + PyObject *__pyx_n_s_OrderedSet_remove; + PyObject *__pyx_n_s_OrderedSet_symmetric_difference; + PyObject *__pyx_n_s_OrderedSet_symmetric_difference_2; + PyObject *__pyx_n_s_OrderedSet_union; + PyObject *__pyx_n_s_OrderedSet_update; + PyObject *__pyx_n_s_PickleError; + PyObject *__pyx_n_s_S; + PyObject *__pyx_n_u_S; + PyObject *__pyx_n_s_Self; + PyObject *__pyx_n_s_Set; + PyObject *__pyx_n_s_T; + PyObject *__pyx_n_u_T; + PyObject *__pyx_n_s_Tuple; + PyObject *__pyx_n_s_TypeError; + PyObject *__pyx_n_s_TypeVar; + PyObject *__pyx_n_s_Union; + PyObject *__pyx_kp_u__2; + PyObject *__pyx_kp_u__3; + PyObject *__pyx_kp_u__6; + PyObject *__pyx_n_s__8; + PyObject *__pyx_n_s_a; + PyObject *__pyx_n_s_add; + PyObject *__pyx_n_s_asyncio_coroutines; + PyObject *__pyx_n_s_bool; + PyObject *__pyx_n_s_class; + PyObject *__pyx_n_s_class_getitem; + PyObject *__pyx_n_s_clear; + PyObject *__pyx_n_s_cline_in_traceback; + PyObject *__pyx_n_s_cls; + PyObject *__pyx_n_s_copy; + PyObject *__pyx_n_s_copy_2; + PyObject *__pyx_kp_s_cython_Py_ssize_t; + PyObject *__pyx_n_s_d; + PyObject *__pyx_n_s_dict; + PyObject *__pyx_n_s_dict_2; + PyObject *__pyx_n_s_difference; + PyObject *__pyx_n_s_difference_update; + PyObject *__pyx_kp_u_disable; + PyObject *__pyx_n_s_discard; + PyObject *__pyx_n_s_element; + PyObject *__pyx_kp_u_enable; + PyObject *__pyx_kp_u_gc; + PyObject *__pyx_n_s_getstate; + PyObject *__pyx_n_s_import; + PyObject *__pyx_n_s_init; + PyObject *__pyx_n_s_insert; + PyObject *__pyx_n_s_intersection; + PyObject *__pyx_n_s_intersection_update; + PyObject *__pyx_n_s_is_compiled; + PyObject *__pyx_n_s_is_coroutine; + PyObject *__pyx_kp_u_isenabled; + PyObject *__pyx_n_s_issubset; + PyObject *__pyx_n_s_issuperset; + PyObject *__pyx_n_s_items; + PyObject *__pyx_n_s_iterable; + PyObject *__pyx_n_s_iterables; + PyObject *__pyx_n_s_key; + PyObject *__pyx_n_s_keys; + PyObject *__pyx_n_u_len; + PyObject *__pyx_kp_s_lib_sqlalchemy_util__collections; + PyObject *__pyx_n_u_list; + PyObject *__pyx_n_s_main; + PyObject *__pyx_n_u_members; + PyObject *__pyx_n_s_name; + PyObject *__pyx_n_s_new; + PyObject *__pyx_n_s_other; + PyObject *__pyx_n_s_other_set; + PyObject *__pyx_n_s_pair; + PyObject *__pyx_n_s_pickle; + PyObject *__pyx_n_s_pop; + PyObject *__pyx_kp_u_pop_from_an_empty_set; + PyObject *__pyx_n_s_popitem; + PyObject *__pyx_n_s_pos; + PyObject *__pyx_n_s_pyx_PickleError; + PyObject *__pyx_n_s_pyx_checksum; + PyObject *__pyx_n_s_pyx_result; + PyObject *__pyx_n_s_pyx_state; + PyObject *__pyx_n_s_pyx_type; + PyObject *__pyx_n_s_pyx_unpickle_IdentitySet; + PyObject *__pyx_n_s_pyx_unpickle_OrderedSet; + PyObject *__pyx_n_s_pyx_vtable; + PyObject *__pyx_n_s_reduce; + PyObject *__pyx_n_s_reduce_cython; + PyObject *__pyx_n_s_reduce_ex; + PyObject *__pyx_n_s_remove; + PyObject *__pyx_n_s_repr; + PyObject *__pyx_n_s_result; + PyObject *__pyx_n_s_return; + PyObject *__pyx_n_s_self; + PyObject *__pyx_n_s_seq; + PyObject *__pyx_kp_u_set_objects_are_unhashable; + PyObject *__pyx_n_s_setstate; + PyObject *__pyx_n_s_setstate_cython; + PyObject *__pyx_n_s_slots; + PyObject *__pyx_n_s_sqlalchemy_util__collections_cy; + PyObject *__pyx_n_s_state; + PyObject *__pyx_n_s_str; + PyObject *__pyx_kp_s_stringsource; + PyObject *__pyx_n_s_symmetric_difference; + PyObject *__pyx_n_s_symmetric_difference_update; + PyObject *__pyx_n_s_test; + PyObject *__pyx_kp_s_type_Self; + PyObject *__pyx_n_s_typing; + PyObject *__pyx_n_s_union; + PyObject *__pyx_n_s_unique_list; + PyObject *__pyx_n_s_update; + PyObject *__pyx_n_s_use_setstate; + PyObject *__pyx_n_s_value; + PyObject *__pyx_n_s_values; + PyObject *__pyx_int_61630440; + PyObject *__pyx_int_75814257; + PyObject *__pyx_int_143295406; + PyObject *__pyx_int_183888701; + PyObject *__pyx_int_197243545; + PyObject *__pyx_int_242532825; + PyObject *__pyx_tuple_; + PyObject *__pyx_tuple__4; + PyObject *__pyx_tuple__5; + PyObject *__pyx_tuple__7; + PyObject *__pyx_tuple__10; + PyObject *__pyx_tuple__11; + PyObject *__pyx_tuple__12; + PyObject *__pyx_tuple__14; + PyObject *__pyx_tuple__15; + PyObject *__pyx_tuple__17; + PyObject *__pyx_tuple__19; + PyObject *__pyx_tuple__22; + PyObject *__pyx_tuple__24; + PyObject *__pyx_tuple__28; + PyObject *__pyx_tuple__30; + PyObject *__pyx_tuple__32; + PyObject *__pyx_tuple__34; + PyObject *__pyx_tuple__37; + PyObject *__pyx_tuple__41; + PyObject *__pyx_tuple__43; + PyObject *__pyx_tuple__45; + PyObject *__pyx_tuple__49; + PyObject *__pyx_tuple__52; + PyObject *__pyx_tuple__67; + PyObject *__pyx_codeobj__9; + PyObject *__pyx_codeobj__13; + PyObject *__pyx_codeobj__16; + PyObject *__pyx_codeobj__18; + PyObject *__pyx_codeobj__20; + PyObject *__pyx_codeobj__21; + PyObject *__pyx_codeobj__23; + PyObject *__pyx_codeobj__25; + PyObject *__pyx_codeobj__26; + PyObject *__pyx_codeobj__27; + PyObject *__pyx_codeobj__29; + PyObject *__pyx_codeobj__31; + PyObject *__pyx_codeobj__33; + PyObject *__pyx_codeobj__35; + PyObject *__pyx_codeobj__36; + PyObject *__pyx_codeobj__38; + PyObject *__pyx_codeobj__39; + PyObject *__pyx_codeobj__40; + PyObject *__pyx_codeobj__42; + PyObject *__pyx_codeobj__44; + PyObject *__pyx_codeobj__46; + PyObject *__pyx_codeobj__47; + PyObject *__pyx_codeobj__48; + PyObject *__pyx_codeobj__50; + PyObject *__pyx_codeobj__51; + PyObject *__pyx_codeobj__53; + PyObject *__pyx_codeobj__54; + PyObject *__pyx_codeobj__55; + PyObject *__pyx_codeobj__56; + PyObject *__pyx_codeobj__57; + PyObject *__pyx_codeobj__58; + PyObject *__pyx_codeobj__59; + PyObject *__pyx_codeobj__60; + PyObject *__pyx_codeobj__61; + PyObject *__pyx_codeobj__62; + PyObject *__pyx_codeobj__63; + PyObject *__pyx_codeobj__64; + PyObject *__pyx_codeobj__65; + PyObject *__pyx_codeobj__66; + PyObject *__pyx_codeobj__68; + PyObject *__pyx_codeobj__69; +} __pyx_mstate; + +#if CYTHON_USE_MODULE_STATE +#ifdef __cplusplus +namespace { + extern struct PyModuleDef __pyx_moduledef; +} /* anonymous namespace */ +#else +static struct PyModuleDef __pyx_moduledef; +#endif + +#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) + +#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) + +#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) +#else +static __pyx_mstate __pyx_mstate_global_static = +#ifdef __cplusplus + {}; +#else + {0}; +#endif +static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; +#endif +/* #### Code section: module_state_clear ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_clear(PyObject *m) { + __pyx_mstate *clear_module_state = __pyx_mstate(m); + if (!clear_module_state) return 0; + Py_CLEAR(clear_module_state->__pyx_d); + Py_CLEAR(clear_module_state->__pyx_b); + Py_CLEAR(clear_module_state->__pyx_cython_runtime); + Py_CLEAR(clear_module_state->__pyx_empty_tuple); + Py_CLEAR(clear_module_state->__pyx_empty_bytes); + Py_CLEAR(clear_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_CLEAR(clear_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); + #endif + Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet); + Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet); + Py_CLEAR(clear_module_state->__pyx_n_s_AbstractSet); + Py_CLEAR(clear_module_state->__pyx_n_s_Any); + Py_CLEAR(clear_module_state->__pyx_n_s_Dict); + Py_CLEAR(clear_module_state->__pyx_n_s_Hashable); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet___copy); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet___reduce_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet___setstate_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_add); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_clear); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_copy); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_difference); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_difference_update); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_discard); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_intersection); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_intersection_update); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_issubset); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_issuperset); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_pop); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_remove); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_symmetric_difference); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_symmetric_difference_2); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_union); + Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_update); + Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); + Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2); + Py_CLEAR(clear_module_state->__pyx_n_s_IndexError); + Py_CLEAR(clear_module_state->__pyx_n_s_Iterable); + Py_CLEAR(clear_module_state->__pyx_kp_s_Iterable_Any); + Py_CLEAR(clear_module_state->__pyx_kp_s_Iterable_Hashable); + Py_CLEAR(clear_module_state->__pyx_kp_s_Iterable__S); + Py_CLEAR(clear_module_state->__pyx_kp_s_Iterable__T); + Py_CLEAR(clear_module_state->__pyx_n_s_Iterator); + Py_CLEAR(clear_module_state->__pyx_n_s_KeyError); + Py_CLEAR(clear_module_state->__pyx_n_s_List); + Py_CLEAR(clear_module_state->__pyx_n_s_NoReturn); + Py_CLEAR(clear_module_state->__pyx_n_s_None); + Py_CLEAR(clear_module_state->__pyx_n_s_NotImplemented); + Py_CLEAR(clear_module_state->__pyx_n_s_Optional); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet); + Py_CLEAR(clear_module_state->__pyx_kp_s_OrderedSet_Union__T__S); + Py_CLEAR(clear_module_state->__pyx_kp_s_OrderedSet__T); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet___class_getitem); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet___reduce_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet___setstate_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_add); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_clear); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_copy); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_difference); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_difference_update); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_discard); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_insert); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_intersection); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_intersection_update); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_pop); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_remove); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_symmetric_difference); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_symmetric_difference_2); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_union); + Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_update); + Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); + Py_CLEAR(clear_module_state->__pyx_n_s_S); + Py_CLEAR(clear_module_state->__pyx_n_u_S); + Py_CLEAR(clear_module_state->__pyx_n_s_Self); + Py_CLEAR(clear_module_state->__pyx_n_s_Set); + Py_CLEAR(clear_module_state->__pyx_n_s_T); + Py_CLEAR(clear_module_state->__pyx_n_u_T); + Py_CLEAR(clear_module_state->__pyx_n_s_Tuple); + Py_CLEAR(clear_module_state->__pyx_n_s_TypeError); + Py_CLEAR(clear_module_state->__pyx_n_s_TypeVar); + Py_CLEAR(clear_module_state->__pyx_n_s_Union); + Py_CLEAR(clear_module_state->__pyx_kp_u__2); + Py_CLEAR(clear_module_state->__pyx_kp_u__3); + Py_CLEAR(clear_module_state->__pyx_kp_u__6); + Py_CLEAR(clear_module_state->__pyx_n_s__8); + Py_CLEAR(clear_module_state->__pyx_n_s_a); + Py_CLEAR(clear_module_state->__pyx_n_s_add); + Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); + Py_CLEAR(clear_module_state->__pyx_n_s_bool); + Py_CLEAR(clear_module_state->__pyx_n_s_class); + Py_CLEAR(clear_module_state->__pyx_n_s_class_getitem); + Py_CLEAR(clear_module_state->__pyx_n_s_clear); + Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); + Py_CLEAR(clear_module_state->__pyx_n_s_cls); + Py_CLEAR(clear_module_state->__pyx_n_s_copy); + Py_CLEAR(clear_module_state->__pyx_n_s_copy_2); + Py_CLEAR(clear_module_state->__pyx_kp_s_cython_Py_ssize_t); + Py_CLEAR(clear_module_state->__pyx_n_s_d); + Py_CLEAR(clear_module_state->__pyx_n_s_dict); + Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); + Py_CLEAR(clear_module_state->__pyx_n_s_difference); + Py_CLEAR(clear_module_state->__pyx_n_s_difference_update); + Py_CLEAR(clear_module_state->__pyx_kp_u_disable); + Py_CLEAR(clear_module_state->__pyx_n_s_discard); + Py_CLEAR(clear_module_state->__pyx_n_s_element); + Py_CLEAR(clear_module_state->__pyx_kp_u_enable); + Py_CLEAR(clear_module_state->__pyx_kp_u_gc); + Py_CLEAR(clear_module_state->__pyx_n_s_getstate); + Py_CLEAR(clear_module_state->__pyx_n_s_import); + Py_CLEAR(clear_module_state->__pyx_n_s_init); + Py_CLEAR(clear_module_state->__pyx_n_s_insert); + Py_CLEAR(clear_module_state->__pyx_n_s_intersection); + Py_CLEAR(clear_module_state->__pyx_n_s_intersection_update); + Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); + Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); + Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); + Py_CLEAR(clear_module_state->__pyx_n_s_issubset); + Py_CLEAR(clear_module_state->__pyx_n_s_issuperset); + Py_CLEAR(clear_module_state->__pyx_n_s_items); + Py_CLEAR(clear_module_state->__pyx_n_s_iterable); + Py_CLEAR(clear_module_state->__pyx_n_s_iterables); + Py_CLEAR(clear_module_state->__pyx_n_s_key); + Py_CLEAR(clear_module_state->__pyx_n_s_keys); + Py_CLEAR(clear_module_state->__pyx_n_u_len); + Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_util__collections); + Py_CLEAR(clear_module_state->__pyx_n_u_list); + Py_CLEAR(clear_module_state->__pyx_n_s_main); + Py_CLEAR(clear_module_state->__pyx_n_u_members); + Py_CLEAR(clear_module_state->__pyx_n_s_name); + Py_CLEAR(clear_module_state->__pyx_n_s_new); + Py_CLEAR(clear_module_state->__pyx_n_s_other); + Py_CLEAR(clear_module_state->__pyx_n_s_other_set); + Py_CLEAR(clear_module_state->__pyx_n_s_pair); + Py_CLEAR(clear_module_state->__pyx_n_s_pickle); + Py_CLEAR(clear_module_state->__pyx_n_s_pop); + Py_CLEAR(clear_module_state->__pyx_kp_u_pop_from_an_empty_set); + Py_CLEAR(clear_module_state->__pyx_n_s_popitem); + Py_CLEAR(clear_module_state->__pyx_n_s_pos); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_result); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_state); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_type); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_IdentitySet); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_OrderedSet); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_vtable); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce_ex); + Py_CLEAR(clear_module_state->__pyx_n_s_remove); + Py_CLEAR(clear_module_state->__pyx_n_s_repr); + Py_CLEAR(clear_module_state->__pyx_n_s_result); + Py_CLEAR(clear_module_state->__pyx_n_s_return); + Py_CLEAR(clear_module_state->__pyx_n_s_self); + Py_CLEAR(clear_module_state->__pyx_n_s_seq); + Py_CLEAR(clear_module_state->__pyx_kp_u_set_objects_are_unhashable); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_slots); + Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_util__collections_cy); + Py_CLEAR(clear_module_state->__pyx_n_s_state); + Py_CLEAR(clear_module_state->__pyx_n_s_str); + Py_CLEAR(clear_module_state->__pyx_kp_s_stringsource); + Py_CLEAR(clear_module_state->__pyx_n_s_symmetric_difference); + Py_CLEAR(clear_module_state->__pyx_n_s_symmetric_difference_update); + Py_CLEAR(clear_module_state->__pyx_n_s_test); + Py_CLEAR(clear_module_state->__pyx_kp_s_type_Self); + Py_CLEAR(clear_module_state->__pyx_n_s_typing); + Py_CLEAR(clear_module_state->__pyx_n_s_union); + Py_CLEAR(clear_module_state->__pyx_n_s_unique_list); + Py_CLEAR(clear_module_state->__pyx_n_s_update); + Py_CLEAR(clear_module_state->__pyx_n_s_use_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_value); + Py_CLEAR(clear_module_state->__pyx_n_s_values); + Py_CLEAR(clear_module_state->__pyx_int_61630440); + Py_CLEAR(clear_module_state->__pyx_int_75814257); + Py_CLEAR(clear_module_state->__pyx_int_143295406); + Py_CLEAR(clear_module_state->__pyx_int_183888701); + Py_CLEAR(clear_module_state->__pyx_int_197243545); + Py_CLEAR(clear_module_state->__pyx_int_242532825); + Py_CLEAR(clear_module_state->__pyx_tuple_); + Py_CLEAR(clear_module_state->__pyx_tuple__4); + Py_CLEAR(clear_module_state->__pyx_tuple__5); + Py_CLEAR(clear_module_state->__pyx_tuple__7); + Py_CLEAR(clear_module_state->__pyx_tuple__10); + Py_CLEAR(clear_module_state->__pyx_tuple__11); + Py_CLEAR(clear_module_state->__pyx_tuple__12); + Py_CLEAR(clear_module_state->__pyx_tuple__14); + Py_CLEAR(clear_module_state->__pyx_tuple__15); + Py_CLEAR(clear_module_state->__pyx_tuple__17); + Py_CLEAR(clear_module_state->__pyx_tuple__19); + Py_CLEAR(clear_module_state->__pyx_tuple__22); + Py_CLEAR(clear_module_state->__pyx_tuple__24); + Py_CLEAR(clear_module_state->__pyx_tuple__28); + Py_CLEAR(clear_module_state->__pyx_tuple__30); + Py_CLEAR(clear_module_state->__pyx_tuple__32); + Py_CLEAR(clear_module_state->__pyx_tuple__34); + Py_CLEAR(clear_module_state->__pyx_tuple__37); + Py_CLEAR(clear_module_state->__pyx_tuple__41); + Py_CLEAR(clear_module_state->__pyx_tuple__43); + Py_CLEAR(clear_module_state->__pyx_tuple__45); + Py_CLEAR(clear_module_state->__pyx_tuple__49); + Py_CLEAR(clear_module_state->__pyx_tuple__52); + Py_CLEAR(clear_module_state->__pyx_tuple__67); + Py_CLEAR(clear_module_state->__pyx_codeobj__9); + Py_CLEAR(clear_module_state->__pyx_codeobj__13); + Py_CLEAR(clear_module_state->__pyx_codeobj__16); + Py_CLEAR(clear_module_state->__pyx_codeobj__18); + Py_CLEAR(clear_module_state->__pyx_codeobj__20); + Py_CLEAR(clear_module_state->__pyx_codeobj__21); + Py_CLEAR(clear_module_state->__pyx_codeobj__23); + Py_CLEAR(clear_module_state->__pyx_codeobj__25); + Py_CLEAR(clear_module_state->__pyx_codeobj__26); + Py_CLEAR(clear_module_state->__pyx_codeobj__27); + Py_CLEAR(clear_module_state->__pyx_codeobj__29); + Py_CLEAR(clear_module_state->__pyx_codeobj__31); + Py_CLEAR(clear_module_state->__pyx_codeobj__33); + Py_CLEAR(clear_module_state->__pyx_codeobj__35); + Py_CLEAR(clear_module_state->__pyx_codeobj__36); + Py_CLEAR(clear_module_state->__pyx_codeobj__38); + Py_CLEAR(clear_module_state->__pyx_codeobj__39); + Py_CLEAR(clear_module_state->__pyx_codeobj__40); + Py_CLEAR(clear_module_state->__pyx_codeobj__42); + Py_CLEAR(clear_module_state->__pyx_codeobj__44); + Py_CLEAR(clear_module_state->__pyx_codeobj__46); + Py_CLEAR(clear_module_state->__pyx_codeobj__47); + Py_CLEAR(clear_module_state->__pyx_codeobj__48); + Py_CLEAR(clear_module_state->__pyx_codeobj__50); + Py_CLEAR(clear_module_state->__pyx_codeobj__51); + Py_CLEAR(clear_module_state->__pyx_codeobj__53); + Py_CLEAR(clear_module_state->__pyx_codeobj__54); + Py_CLEAR(clear_module_state->__pyx_codeobj__55); + Py_CLEAR(clear_module_state->__pyx_codeobj__56); + Py_CLEAR(clear_module_state->__pyx_codeobj__57); + Py_CLEAR(clear_module_state->__pyx_codeobj__58); + Py_CLEAR(clear_module_state->__pyx_codeobj__59); + Py_CLEAR(clear_module_state->__pyx_codeobj__60); + Py_CLEAR(clear_module_state->__pyx_codeobj__61); + Py_CLEAR(clear_module_state->__pyx_codeobj__62); + Py_CLEAR(clear_module_state->__pyx_codeobj__63); + Py_CLEAR(clear_module_state->__pyx_codeobj__64); + Py_CLEAR(clear_module_state->__pyx_codeobj__65); + Py_CLEAR(clear_module_state->__pyx_codeobj__66); + Py_CLEAR(clear_module_state->__pyx_codeobj__68); + Py_CLEAR(clear_module_state->__pyx_codeobj__69); + return 0; +} +#endif +/* #### Code section: module_state_traverse ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { + __pyx_mstate *traverse_module_state = __pyx_mstate(m); + if (!traverse_module_state) return 0; + Py_VISIT(traverse_module_state->__pyx_d); + Py_VISIT(traverse_module_state->__pyx_b); + Py_VISIT(traverse_module_state->__pyx_cython_runtime); + Py_VISIT(traverse_module_state->__pyx_empty_tuple); + Py_VISIT(traverse_module_state->__pyx_empty_bytes); + Py_VISIT(traverse_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_VISIT(traverse_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); + #endif + Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet); + Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet); + Py_VISIT(traverse_module_state->__pyx_n_s_AbstractSet); + Py_VISIT(traverse_module_state->__pyx_n_s_Any); + Py_VISIT(traverse_module_state->__pyx_n_s_Dict); + Py_VISIT(traverse_module_state->__pyx_n_s_Hashable); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet___copy); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet___reduce_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet___setstate_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_add); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_clear); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_copy); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_difference); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_difference_update); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_discard); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_intersection); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_intersection_update); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_issubset); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_issuperset); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_pop); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_remove); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_symmetric_difference); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_symmetric_difference_2); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_union); + Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_update); + Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); + Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2); + Py_VISIT(traverse_module_state->__pyx_n_s_IndexError); + Py_VISIT(traverse_module_state->__pyx_n_s_Iterable); + Py_VISIT(traverse_module_state->__pyx_kp_s_Iterable_Any); + Py_VISIT(traverse_module_state->__pyx_kp_s_Iterable_Hashable); + Py_VISIT(traverse_module_state->__pyx_kp_s_Iterable__S); + Py_VISIT(traverse_module_state->__pyx_kp_s_Iterable__T); + Py_VISIT(traverse_module_state->__pyx_n_s_Iterator); + Py_VISIT(traverse_module_state->__pyx_n_s_KeyError); + Py_VISIT(traverse_module_state->__pyx_n_s_List); + Py_VISIT(traverse_module_state->__pyx_n_s_NoReturn); + Py_VISIT(traverse_module_state->__pyx_n_s_None); + Py_VISIT(traverse_module_state->__pyx_n_s_NotImplemented); + Py_VISIT(traverse_module_state->__pyx_n_s_Optional); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet); + Py_VISIT(traverse_module_state->__pyx_kp_s_OrderedSet_Union__T__S); + Py_VISIT(traverse_module_state->__pyx_kp_s_OrderedSet__T); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet___class_getitem); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet___reduce_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet___setstate_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_add); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_clear); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_copy); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_difference); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_difference_update); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_discard); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_insert); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_intersection); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_intersection_update); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_pop); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_remove); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_symmetric_difference); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_symmetric_difference_2); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_union); + Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_update); + Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); + Py_VISIT(traverse_module_state->__pyx_n_s_S); + Py_VISIT(traverse_module_state->__pyx_n_u_S); + Py_VISIT(traverse_module_state->__pyx_n_s_Self); + Py_VISIT(traverse_module_state->__pyx_n_s_Set); + Py_VISIT(traverse_module_state->__pyx_n_s_T); + Py_VISIT(traverse_module_state->__pyx_n_u_T); + Py_VISIT(traverse_module_state->__pyx_n_s_Tuple); + Py_VISIT(traverse_module_state->__pyx_n_s_TypeError); + Py_VISIT(traverse_module_state->__pyx_n_s_TypeVar); + Py_VISIT(traverse_module_state->__pyx_n_s_Union); + Py_VISIT(traverse_module_state->__pyx_kp_u__2); + Py_VISIT(traverse_module_state->__pyx_kp_u__3); + Py_VISIT(traverse_module_state->__pyx_kp_u__6); + Py_VISIT(traverse_module_state->__pyx_n_s__8); + Py_VISIT(traverse_module_state->__pyx_n_s_a); + Py_VISIT(traverse_module_state->__pyx_n_s_add); + Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); + Py_VISIT(traverse_module_state->__pyx_n_s_bool); + Py_VISIT(traverse_module_state->__pyx_n_s_class); + Py_VISIT(traverse_module_state->__pyx_n_s_class_getitem); + Py_VISIT(traverse_module_state->__pyx_n_s_clear); + Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); + Py_VISIT(traverse_module_state->__pyx_n_s_cls); + Py_VISIT(traverse_module_state->__pyx_n_s_copy); + Py_VISIT(traverse_module_state->__pyx_n_s_copy_2); + Py_VISIT(traverse_module_state->__pyx_kp_s_cython_Py_ssize_t); + Py_VISIT(traverse_module_state->__pyx_n_s_d); + Py_VISIT(traverse_module_state->__pyx_n_s_dict); + Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); + Py_VISIT(traverse_module_state->__pyx_n_s_difference); + Py_VISIT(traverse_module_state->__pyx_n_s_difference_update); + Py_VISIT(traverse_module_state->__pyx_kp_u_disable); + Py_VISIT(traverse_module_state->__pyx_n_s_discard); + Py_VISIT(traverse_module_state->__pyx_n_s_element); + Py_VISIT(traverse_module_state->__pyx_kp_u_enable); + Py_VISIT(traverse_module_state->__pyx_kp_u_gc); + Py_VISIT(traverse_module_state->__pyx_n_s_getstate); + Py_VISIT(traverse_module_state->__pyx_n_s_import); + Py_VISIT(traverse_module_state->__pyx_n_s_init); + Py_VISIT(traverse_module_state->__pyx_n_s_insert); + Py_VISIT(traverse_module_state->__pyx_n_s_intersection); + Py_VISIT(traverse_module_state->__pyx_n_s_intersection_update); + Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); + Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); + Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); + Py_VISIT(traverse_module_state->__pyx_n_s_issubset); + Py_VISIT(traverse_module_state->__pyx_n_s_issuperset); + Py_VISIT(traverse_module_state->__pyx_n_s_items); + Py_VISIT(traverse_module_state->__pyx_n_s_iterable); + Py_VISIT(traverse_module_state->__pyx_n_s_iterables); + Py_VISIT(traverse_module_state->__pyx_n_s_key); + Py_VISIT(traverse_module_state->__pyx_n_s_keys); + Py_VISIT(traverse_module_state->__pyx_n_u_len); + Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_util__collections); + Py_VISIT(traverse_module_state->__pyx_n_u_list); + Py_VISIT(traverse_module_state->__pyx_n_s_main); + Py_VISIT(traverse_module_state->__pyx_n_u_members); + Py_VISIT(traverse_module_state->__pyx_n_s_name); + Py_VISIT(traverse_module_state->__pyx_n_s_new); + Py_VISIT(traverse_module_state->__pyx_n_s_other); + Py_VISIT(traverse_module_state->__pyx_n_s_other_set); + Py_VISIT(traverse_module_state->__pyx_n_s_pair); + Py_VISIT(traverse_module_state->__pyx_n_s_pickle); + Py_VISIT(traverse_module_state->__pyx_n_s_pop); + Py_VISIT(traverse_module_state->__pyx_kp_u_pop_from_an_empty_set); + Py_VISIT(traverse_module_state->__pyx_n_s_popitem); + Py_VISIT(traverse_module_state->__pyx_n_s_pos); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_result); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_state); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_type); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_IdentitySet); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_OrderedSet); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_vtable); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce_ex); + Py_VISIT(traverse_module_state->__pyx_n_s_remove); + Py_VISIT(traverse_module_state->__pyx_n_s_repr); + Py_VISIT(traverse_module_state->__pyx_n_s_result); + Py_VISIT(traverse_module_state->__pyx_n_s_return); + Py_VISIT(traverse_module_state->__pyx_n_s_self); + Py_VISIT(traverse_module_state->__pyx_n_s_seq); + Py_VISIT(traverse_module_state->__pyx_kp_u_set_objects_are_unhashable); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_slots); + Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_util__collections_cy); + Py_VISIT(traverse_module_state->__pyx_n_s_state); + Py_VISIT(traverse_module_state->__pyx_n_s_str); + Py_VISIT(traverse_module_state->__pyx_kp_s_stringsource); + Py_VISIT(traverse_module_state->__pyx_n_s_symmetric_difference); + Py_VISIT(traverse_module_state->__pyx_n_s_symmetric_difference_update); + Py_VISIT(traverse_module_state->__pyx_n_s_test); + Py_VISIT(traverse_module_state->__pyx_kp_s_type_Self); + Py_VISIT(traverse_module_state->__pyx_n_s_typing); + Py_VISIT(traverse_module_state->__pyx_n_s_union); + Py_VISIT(traverse_module_state->__pyx_n_s_unique_list); + Py_VISIT(traverse_module_state->__pyx_n_s_update); + Py_VISIT(traverse_module_state->__pyx_n_s_use_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_value); + Py_VISIT(traverse_module_state->__pyx_n_s_values); + Py_VISIT(traverse_module_state->__pyx_int_61630440); + Py_VISIT(traverse_module_state->__pyx_int_75814257); + Py_VISIT(traverse_module_state->__pyx_int_143295406); + Py_VISIT(traverse_module_state->__pyx_int_183888701); + Py_VISIT(traverse_module_state->__pyx_int_197243545); + Py_VISIT(traverse_module_state->__pyx_int_242532825); + Py_VISIT(traverse_module_state->__pyx_tuple_); + Py_VISIT(traverse_module_state->__pyx_tuple__4); + Py_VISIT(traverse_module_state->__pyx_tuple__5); + Py_VISIT(traverse_module_state->__pyx_tuple__7); + Py_VISIT(traverse_module_state->__pyx_tuple__10); + Py_VISIT(traverse_module_state->__pyx_tuple__11); + Py_VISIT(traverse_module_state->__pyx_tuple__12); + Py_VISIT(traverse_module_state->__pyx_tuple__14); + Py_VISIT(traverse_module_state->__pyx_tuple__15); + Py_VISIT(traverse_module_state->__pyx_tuple__17); + Py_VISIT(traverse_module_state->__pyx_tuple__19); + Py_VISIT(traverse_module_state->__pyx_tuple__22); + Py_VISIT(traverse_module_state->__pyx_tuple__24); + Py_VISIT(traverse_module_state->__pyx_tuple__28); + Py_VISIT(traverse_module_state->__pyx_tuple__30); + Py_VISIT(traverse_module_state->__pyx_tuple__32); + Py_VISIT(traverse_module_state->__pyx_tuple__34); + Py_VISIT(traverse_module_state->__pyx_tuple__37); + Py_VISIT(traverse_module_state->__pyx_tuple__41); + Py_VISIT(traverse_module_state->__pyx_tuple__43); + Py_VISIT(traverse_module_state->__pyx_tuple__45); + Py_VISIT(traverse_module_state->__pyx_tuple__49); + Py_VISIT(traverse_module_state->__pyx_tuple__52); + Py_VISIT(traverse_module_state->__pyx_tuple__67); + Py_VISIT(traverse_module_state->__pyx_codeobj__9); + Py_VISIT(traverse_module_state->__pyx_codeobj__13); + Py_VISIT(traverse_module_state->__pyx_codeobj__16); + Py_VISIT(traverse_module_state->__pyx_codeobj__18); + Py_VISIT(traverse_module_state->__pyx_codeobj__20); + Py_VISIT(traverse_module_state->__pyx_codeobj__21); + Py_VISIT(traverse_module_state->__pyx_codeobj__23); + Py_VISIT(traverse_module_state->__pyx_codeobj__25); + Py_VISIT(traverse_module_state->__pyx_codeobj__26); + Py_VISIT(traverse_module_state->__pyx_codeobj__27); + Py_VISIT(traverse_module_state->__pyx_codeobj__29); + Py_VISIT(traverse_module_state->__pyx_codeobj__31); + Py_VISIT(traverse_module_state->__pyx_codeobj__33); + Py_VISIT(traverse_module_state->__pyx_codeobj__35); + Py_VISIT(traverse_module_state->__pyx_codeobj__36); + Py_VISIT(traverse_module_state->__pyx_codeobj__38); + Py_VISIT(traverse_module_state->__pyx_codeobj__39); + Py_VISIT(traverse_module_state->__pyx_codeobj__40); + Py_VISIT(traverse_module_state->__pyx_codeobj__42); + Py_VISIT(traverse_module_state->__pyx_codeobj__44); + Py_VISIT(traverse_module_state->__pyx_codeobj__46); + Py_VISIT(traverse_module_state->__pyx_codeobj__47); + Py_VISIT(traverse_module_state->__pyx_codeobj__48); + Py_VISIT(traverse_module_state->__pyx_codeobj__50); + Py_VISIT(traverse_module_state->__pyx_codeobj__51); + Py_VISIT(traverse_module_state->__pyx_codeobj__53); + Py_VISIT(traverse_module_state->__pyx_codeobj__54); + Py_VISIT(traverse_module_state->__pyx_codeobj__55); + Py_VISIT(traverse_module_state->__pyx_codeobj__56); + Py_VISIT(traverse_module_state->__pyx_codeobj__57); + Py_VISIT(traverse_module_state->__pyx_codeobj__58); + Py_VISIT(traverse_module_state->__pyx_codeobj__59); + Py_VISIT(traverse_module_state->__pyx_codeobj__60); + Py_VISIT(traverse_module_state->__pyx_codeobj__61); + Py_VISIT(traverse_module_state->__pyx_codeobj__62); + Py_VISIT(traverse_module_state->__pyx_codeobj__63); + Py_VISIT(traverse_module_state->__pyx_codeobj__64); + Py_VISIT(traverse_module_state->__pyx_codeobj__65); + Py_VISIT(traverse_module_state->__pyx_codeobj__66); + Py_VISIT(traverse_module_state->__pyx_codeobj__68); + Py_VISIT(traverse_module_state->__pyx_codeobj__69); + return 0; +} +#endif +/* #### Code section: module_state_defines ### */ +#define __pyx_d __pyx_mstate_global->__pyx_d +#define __pyx_b __pyx_mstate_global->__pyx_b +#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime +#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple +#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes +#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode +#ifdef __Pyx_CyFunction_USED +#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType +#endif +#ifdef __Pyx_FusedFunction_USED +#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType +#endif +#ifdef __Pyx_Generator_USED +#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType +#endif +#ifdef __Pyx_IterableCoroutine_USED +#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#define __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet __pyx_mstate_global->__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet +#define __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet __pyx_mstate_global->__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet +#endif +#define __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet __pyx_mstate_global->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet +#define __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet __pyx_mstate_global->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet +#define __pyx_n_s_AbstractSet __pyx_mstate_global->__pyx_n_s_AbstractSet +#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any +#define __pyx_n_s_Dict __pyx_mstate_global->__pyx_n_s_Dict +#define __pyx_n_s_Hashable __pyx_mstate_global->__pyx_n_s_Hashable +#define __pyx_n_s_IdentitySet __pyx_mstate_global->__pyx_n_s_IdentitySet +#define __pyx_n_s_IdentitySet___copy __pyx_mstate_global->__pyx_n_s_IdentitySet___copy +#define __pyx_n_s_IdentitySet___reduce_cython __pyx_mstate_global->__pyx_n_s_IdentitySet___reduce_cython +#define __pyx_n_s_IdentitySet___setstate_cython __pyx_mstate_global->__pyx_n_s_IdentitySet___setstate_cython +#define __pyx_n_s_IdentitySet_add __pyx_mstate_global->__pyx_n_s_IdentitySet_add +#define __pyx_n_s_IdentitySet_clear __pyx_mstate_global->__pyx_n_s_IdentitySet_clear +#define __pyx_n_s_IdentitySet_copy __pyx_mstate_global->__pyx_n_s_IdentitySet_copy +#define __pyx_n_s_IdentitySet_difference __pyx_mstate_global->__pyx_n_s_IdentitySet_difference +#define __pyx_n_s_IdentitySet_difference_update __pyx_mstate_global->__pyx_n_s_IdentitySet_difference_update +#define __pyx_n_s_IdentitySet_discard __pyx_mstate_global->__pyx_n_s_IdentitySet_discard +#define __pyx_n_s_IdentitySet_intersection __pyx_mstate_global->__pyx_n_s_IdentitySet_intersection +#define __pyx_n_s_IdentitySet_intersection_update __pyx_mstate_global->__pyx_n_s_IdentitySet_intersection_update +#define __pyx_n_s_IdentitySet_issubset __pyx_mstate_global->__pyx_n_s_IdentitySet_issubset +#define __pyx_n_s_IdentitySet_issuperset __pyx_mstate_global->__pyx_n_s_IdentitySet_issuperset +#define __pyx_n_s_IdentitySet_pop __pyx_mstate_global->__pyx_n_s_IdentitySet_pop +#define __pyx_n_s_IdentitySet_remove __pyx_mstate_global->__pyx_n_s_IdentitySet_remove +#define __pyx_n_s_IdentitySet_symmetric_difference __pyx_mstate_global->__pyx_n_s_IdentitySet_symmetric_difference +#define __pyx_n_s_IdentitySet_symmetric_difference_2 __pyx_mstate_global->__pyx_n_s_IdentitySet_symmetric_difference_2 +#define __pyx_n_s_IdentitySet_union __pyx_mstate_global->__pyx_n_s_IdentitySet_union +#define __pyx_n_s_IdentitySet_update __pyx_mstate_global->__pyx_n_s_IdentitySet_update +#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 +#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2 +#define __pyx_n_s_IndexError __pyx_mstate_global->__pyx_n_s_IndexError +#define __pyx_n_s_Iterable __pyx_mstate_global->__pyx_n_s_Iterable +#define __pyx_kp_s_Iterable_Any __pyx_mstate_global->__pyx_kp_s_Iterable_Any +#define __pyx_kp_s_Iterable_Hashable __pyx_mstate_global->__pyx_kp_s_Iterable_Hashable +#define __pyx_kp_s_Iterable__S __pyx_mstate_global->__pyx_kp_s_Iterable__S +#define __pyx_kp_s_Iterable__T __pyx_mstate_global->__pyx_kp_s_Iterable__T +#define __pyx_n_s_Iterator __pyx_mstate_global->__pyx_n_s_Iterator +#define __pyx_n_s_KeyError __pyx_mstate_global->__pyx_n_s_KeyError +#define __pyx_n_s_List __pyx_mstate_global->__pyx_n_s_List +#define __pyx_n_s_NoReturn __pyx_mstate_global->__pyx_n_s_NoReturn +#define __pyx_n_s_None __pyx_mstate_global->__pyx_n_s_None +#define __pyx_n_s_NotImplemented __pyx_mstate_global->__pyx_n_s_NotImplemented +#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional +#define __pyx_n_s_OrderedSet __pyx_mstate_global->__pyx_n_s_OrderedSet +#define __pyx_kp_s_OrderedSet_Union__T__S __pyx_mstate_global->__pyx_kp_s_OrderedSet_Union__T__S +#define __pyx_kp_s_OrderedSet__T __pyx_mstate_global->__pyx_kp_s_OrderedSet__T +#define __pyx_n_s_OrderedSet___class_getitem __pyx_mstate_global->__pyx_n_s_OrderedSet___class_getitem +#define __pyx_n_s_OrderedSet___reduce_cython __pyx_mstate_global->__pyx_n_s_OrderedSet___reduce_cython +#define __pyx_n_s_OrderedSet___setstate_cython __pyx_mstate_global->__pyx_n_s_OrderedSet___setstate_cython +#define __pyx_n_s_OrderedSet_add __pyx_mstate_global->__pyx_n_s_OrderedSet_add +#define __pyx_n_s_OrderedSet_clear __pyx_mstate_global->__pyx_n_s_OrderedSet_clear +#define __pyx_n_s_OrderedSet_copy __pyx_mstate_global->__pyx_n_s_OrderedSet_copy +#define __pyx_n_s_OrderedSet_difference __pyx_mstate_global->__pyx_n_s_OrderedSet_difference +#define __pyx_n_s_OrderedSet_difference_update __pyx_mstate_global->__pyx_n_s_OrderedSet_difference_update +#define __pyx_n_s_OrderedSet_discard __pyx_mstate_global->__pyx_n_s_OrderedSet_discard +#define __pyx_n_s_OrderedSet_insert __pyx_mstate_global->__pyx_n_s_OrderedSet_insert +#define __pyx_n_s_OrderedSet_intersection __pyx_mstate_global->__pyx_n_s_OrderedSet_intersection +#define __pyx_n_s_OrderedSet_intersection_update __pyx_mstate_global->__pyx_n_s_OrderedSet_intersection_update +#define __pyx_n_s_OrderedSet_pop __pyx_mstate_global->__pyx_n_s_OrderedSet_pop +#define __pyx_n_s_OrderedSet_remove __pyx_mstate_global->__pyx_n_s_OrderedSet_remove +#define __pyx_n_s_OrderedSet_symmetric_difference __pyx_mstate_global->__pyx_n_s_OrderedSet_symmetric_difference +#define __pyx_n_s_OrderedSet_symmetric_difference_2 __pyx_mstate_global->__pyx_n_s_OrderedSet_symmetric_difference_2 +#define __pyx_n_s_OrderedSet_union __pyx_mstate_global->__pyx_n_s_OrderedSet_union +#define __pyx_n_s_OrderedSet_update __pyx_mstate_global->__pyx_n_s_OrderedSet_update +#define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError +#define __pyx_n_s_S __pyx_mstate_global->__pyx_n_s_S +#define __pyx_n_u_S __pyx_mstate_global->__pyx_n_u_S +#define __pyx_n_s_Self __pyx_mstate_global->__pyx_n_s_Self +#define __pyx_n_s_Set __pyx_mstate_global->__pyx_n_s_Set +#define __pyx_n_s_T __pyx_mstate_global->__pyx_n_s_T +#define __pyx_n_u_T __pyx_mstate_global->__pyx_n_u_T +#define __pyx_n_s_Tuple __pyx_mstate_global->__pyx_n_s_Tuple +#define __pyx_n_s_TypeError __pyx_mstate_global->__pyx_n_s_TypeError +#define __pyx_n_s_TypeVar __pyx_mstate_global->__pyx_n_s_TypeVar +#define __pyx_n_s_Union __pyx_mstate_global->__pyx_n_s_Union +#define __pyx_kp_u__2 __pyx_mstate_global->__pyx_kp_u__2 +#define __pyx_kp_u__3 __pyx_mstate_global->__pyx_kp_u__3 +#define __pyx_kp_u__6 __pyx_mstate_global->__pyx_kp_u__6 +#define __pyx_n_s__8 __pyx_mstate_global->__pyx_n_s__8 +#define __pyx_n_s_a __pyx_mstate_global->__pyx_n_s_a +#define __pyx_n_s_add __pyx_mstate_global->__pyx_n_s_add +#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines +#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool +#define __pyx_n_s_class __pyx_mstate_global->__pyx_n_s_class +#define __pyx_n_s_class_getitem __pyx_mstate_global->__pyx_n_s_class_getitem +#define __pyx_n_s_clear __pyx_mstate_global->__pyx_n_s_clear +#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback +#define __pyx_n_s_cls __pyx_mstate_global->__pyx_n_s_cls +#define __pyx_n_s_copy __pyx_mstate_global->__pyx_n_s_copy +#define __pyx_n_s_copy_2 __pyx_mstate_global->__pyx_n_s_copy_2 +#define __pyx_kp_s_cython_Py_ssize_t __pyx_mstate_global->__pyx_kp_s_cython_Py_ssize_t +#define __pyx_n_s_d __pyx_mstate_global->__pyx_n_s_d +#define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict +#define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 +#define __pyx_n_s_difference __pyx_mstate_global->__pyx_n_s_difference +#define __pyx_n_s_difference_update __pyx_mstate_global->__pyx_n_s_difference_update +#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable +#define __pyx_n_s_discard __pyx_mstate_global->__pyx_n_s_discard +#define __pyx_n_s_element __pyx_mstate_global->__pyx_n_s_element +#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable +#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc +#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate +#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import +#define __pyx_n_s_init __pyx_mstate_global->__pyx_n_s_init +#define __pyx_n_s_insert __pyx_mstate_global->__pyx_n_s_insert +#define __pyx_n_s_intersection __pyx_mstate_global->__pyx_n_s_intersection +#define __pyx_n_s_intersection_update __pyx_mstate_global->__pyx_n_s_intersection_update +#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled +#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine +#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled +#define __pyx_n_s_issubset __pyx_mstate_global->__pyx_n_s_issubset +#define __pyx_n_s_issuperset __pyx_mstate_global->__pyx_n_s_issuperset +#define __pyx_n_s_items __pyx_mstate_global->__pyx_n_s_items +#define __pyx_n_s_iterable __pyx_mstate_global->__pyx_n_s_iterable +#define __pyx_n_s_iterables __pyx_mstate_global->__pyx_n_s_iterables +#define __pyx_n_s_key __pyx_mstate_global->__pyx_n_s_key +#define __pyx_n_s_keys __pyx_mstate_global->__pyx_n_s_keys +#define __pyx_n_u_len __pyx_mstate_global->__pyx_n_u_len +#define __pyx_kp_s_lib_sqlalchemy_util__collections __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_util__collections +#define __pyx_n_u_list __pyx_mstate_global->__pyx_n_u_list +#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main +#define __pyx_n_u_members __pyx_mstate_global->__pyx_n_u_members +#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name +#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new +#define __pyx_n_s_other __pyx_mstate_global->__pyx_n_s_other +#define __pyx_n_s_other_set __pyx_mstate_global->__pyx_n_s_other_set +#define __pyx_n_s_pair __pyx_mstate_global->__pyx_n_s_pair +#define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle +#define __pyx_n_s_pop __pyx_mstate_global->__pyx_n_s_pop +#define __pyx_kp_u_pop_from_an_empty_set __pyx_mstate_global->__pyx_kp_u_pop_from_an_empty_set +#define __pyx_n_s_popitem __pyx_mstate_global->__pyx_n_s_popitem +#define __pyx_n_s_pos __pyx_mstate_global->__pyx_n_s_pos +#define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError +#define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum +#define __pyx_n_s_pyx_result __pyx_mstate_global->__pyx_n_s_pyx_result +#define __pyx_n_s_pyx_state __pyx_mstate_global->__pyx_n_s_pyx_state +#define __pyx_n_s_pyx_type __pyx_mstate_global->__pyx_n_s_pyx_type +#define __pyx_n_s_pyx_unpickle_IdentitySet __pyx_mstate_global->__pyx_n_s_pyx_unpickle_IdentitySet +#define __pyx_n_s_pyx_unpickle_OrderedSet __pyx_mstate_global->__pyx_n_s_pyx_unpickle_OrderedSet +#define __pyx_n_s_pyx_vtable __pyx_mstate_global->__pyx_n_s_pyx_vtable +#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce +#define __pyx_n_s_reduce_cython __pyx_mstate_global->__pyx_n_s_reduce_cython +#define __pyx_n_s_reduce_ex __pyx_mstate_global->__pyx_n_s_reduce_ex +#define __pyx_n_s_remove __pyx_mstate_global->__pyx_n_s_remove +#define __pyx_n_s_repr __pyx_mstate_global->__pyx_n_s_repr +#define __pyx_n_s_result __pyx_mstate_global->__pyx_n_s_result +#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return +#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self +#define __pyx_n_s_seq __pyx_mstate_global->__pyx_n_s_seq +#define __pyx_kp_u_set_objects_are_unhashable __pyx_mstate_global->__pyx_kp_u_set_objects_are_unhashable +#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate +#define __pyx_n_s_setstate_cython __pyx_mstate_global->__pyx_n_s_setstate_cython +#define __pyx_n_s_slots __pyx_mstate_global->__pyx_n_s_slots +#define __pyx_n_s_sqlalchemy_util__collections_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_util__collections_cy +#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state +#define __pyx_n_s_str __pyx_mstate_global->__pyx_n_s_str +#define __pyx_kp_s_stringsource __pyx_mstate_global->__pyx_kp_s_stringsource +#define __pyx_n_s_symmetric_difference __pyx_mstate_global->__pyx_n_s_symmetric_difference +#define __pyx_n_s_symmetric_difference_update __pyx_mstate_global->__pyx_n_s_symmetric_difference_update +#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test +#define __pyx_kp_s_type_Self __pyx_mstate_global->__pyx_kp_s_type_Self +#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing +#define __pyx_n_s_union __pyx_mstate_global->__pyx_n_s_union +#define __pyx_n_s_unique_list __pyx_mstate_global->__pyx_n_s_unique_list +#define __pyx_n_s_update __pyx_mstate_global->__pyx_n_s_update +#define __pyx_n_s_use_setstate __pyx_mstate_global->__pyx_n_s_use_setstate +#define __pyx_n_s_value __pyx_mstate_global->__pyx_n_s_value +#define __pyx_n_s_values __pyx_mstate_global->__pyx_n_s_values +#define __pyx_int_61630440 __pyx_mstate_global->__pyx_int_61630440 +#define __pyx_int_75814257 __pyx_mstate_global->__pyx_int_75814257 +#define __pyx_int_143295406 __pyx_mstate_global->__pyx_int_143295406 +#define __pyx_int_183888701 __pyx_mstate_global->__pyx_int_183888701 +#define __pyx_int_197243545 __pyx_mstate_global->__pyx_int_197243545 +#define __pyx_int_242532825 __pyx_mstate_global->__pyx_int_242532825 +#define __pyx_tuple_ __pyx_mstate_global->__pyx_tuple_ +#define __pyx_tuple__4 __pyx_mstate_global->__pyx_tuple__4 +#define __pyx_tuple__5 __pyx_mstate_global->__pyx_tuple__5 +#define __pyx_tuple__7 __pyx_mstate_global->__pyx_tuple__7 +#define __pyx_tuple__10 __pyx_mstate_global->__pyx_tuple__10 +#define __pyx_tuple__11 __pyx_mstate_global->__pyx_tuple__11 +#define __pyx_tuple__12 __pyx_mstate_global->__pyx_tuple__12 +#define __pyx_tuple__14 __pyx_mstate_global->__pyx_tuple__14 +#define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 +#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 +#define __pyx_tuple__19 __pyx_mstate_global->__pyx_tuple__19 +#define __pyx_tuple__22 __pyx_mstate_global->__pyx_tuple__22 +#define __pyx_tuple__24 __pyx_mstate_global->__pyx_tuple__24 +#define __pyx_tuple__28 __pyx_mstate_global->__pyx_tuple__28 +#define __pyx_tuple__30 __pyx_mstate_global->__pyx_tuple__30 +#define __pyx_tuple__32 __pyx_mstate_global->__pyx_tuple__32 +#define __pyx_tuple__34 __pyx_mstate_global->__pyx_tuple__34 +#define __pyx_tuple__37 __pyx_mstate_global->__pyx_tuple__37 +#define __pyx_tuple__41 __pyx_mstate_global->__pyx_tuple__41 +#define __pyx_tuple__43 __pyx_mstate_global->__pyx_tuple__43 +#define __pyx_tuple__45 __pyx_mstate_global->__pyx_tuple__45 +#define __pyx_tuple__49 __pyx_mstate_global->__pyx_tuple__49 +#define __pyx_tuple__52 __pyx_mstate_global->__pyx_tuple__52 +#define __pyx_tuple__67 __pyx_mstate_global->__pyx_tuple__67 +#define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 +#define __pyx_codeobj__13 __pyx_mstate_global->__pyx_codeobj__13 +#define __pyx_codeobj__16 __pyx_mstate_global->__pyx_codeobj__16 +#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 +#define __pyx_codeobj__20 __pyx_mstate_global->__pyx_codeobj__20 +#define __pyx_codeobj__21 __pyx_mstate_global->__pyx_codeobj__21 +#define __pyx_codeobj__23 __pyx_mstate_global->__pyx_codeobj__23 +#define __pyx_codeobj__25 __pyx_mstate_global->__pyx_codeobj__25 +#define __pyx_codeobj__26 __pyx_mstate_global->__pyx_codeobj__26 +#define __pyx_codeobj__27 __pyx_mstate_global->__pyx_codeobj__27 +#define __pyx_codeobj__29 __pyx_mstate_global->__pyx_codeobj__29 +#define __pyx_codeobj__31 __pyx_mstate_global->__pyx_codeobj__31 +#define __pyx_codeobj__33 __pyx_mstate_global->__pyx_codeobj__33 +#define __pyx_codeobj__35 __pyx_mstate_global->__pyx_codeobj__35 +#define __pyx_codeobj__36 __pyx_mstate_global->__pyx_codeobj__36 +#define __pyx_codeobj__38 __pyx_mstate_global->__pyx_codeobj__38 +#define __pyx_codeobj__39 __pyx_mstate_global->__pyx_codeobj__39 +#define __pyx_codeobj__40 __pyx_mstate_global->__pyx_codeobj__40 +#define __pyx_codeobj__42 __pyx_mstate_global->__pyx_codeobj__42 +#define __pyx_codeobj__44 __pyx_mstate_global->__pyx_codeobj__44 +#define __pyx_codeobj__46 __pyx_mstate_global->__pyx_codeobj__46 +#define __pyx_codeobj__47 __pyx_mstate_global->__pyx_codeobj__47 +#define __pyx_codeobj__48 __pyx_mstate_global->__pyx_codeobj__48 +#define __pyx_codeobj__50 __pyx_mstate_global->__pyx_codeobj__50 +#define __pyx_codeobj__51 __pyx_mstate_global->__pyx_codeobj__51 +#define __pyx_codeobj__53 __pyx_mstate_global->__pyx_codeobj__53 +#define __pyx_codeobj__54 __pyx_mstate_global->__pyx_codeobj__54 +#define __pyx_codeobj__55 __pyx_mstate_global->__pyx_codeobj__55 +#define __pyx_codeobj__56 __pyx_mstate_global->__pyx_codeobj__56 +#define __pyx_codeobj__57 __pyx_mstate_global->__pyx_codeobj__57 +#define __pyx_codeobj__58 __pyx_mstate_global->__pyx_codeobj__58 +#define __pyx_codeobj__59 __pyx_mstate_global->__pyx_codeobj__59 +#define __pyx_codeobj__60 __pyx_mstate_global->__pyx_codeobj__60 +#define __pyx_codeobj__61 __pyx_mstate_global->__pyx_codeobj__61 +#define __pyx_codeobj__62 __pyx_mstate_global->__pyx_codeobj__62 +#define __pyx_codeobj__63 __pyx_mstate_global->__pyx_codeobj__63 +#define __pyx_codeobj__64 __pyx_mstate_global->__pyx_codeobj__64 +#define __pyx_codeobj__65 __pyx_mstate_global->__pyx_codeobj__65 +#define __pyx_codeobj__66 __pyx_mstate_global->__pyx_codeobj__66 +#define __pyx_codeobj__68 __pyx_mstate_global->__pyx_codeobj__68 +#define __pyx_codeobj__69 __pyx_mstate_global->__pyx_codeobj__69 +/* #### Code section: module_code ### */ + +/* "sqlalchemy/util/_collections_cy.py":38 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +PyDoc_STRVAR(__pyx_doc_10sqlalchemy_4util_15_collections_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_4util_15_collections_cy__is_compiled}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy__is_compiled(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled", 1); + + /* "sqlalchemy/util/_collections_cy.py":40 + * def _is_compiled() -> bool: + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":38 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":48 + * + * + * @cython.ccall # <<<<<<<<<<<<<< + * def unique_list(seq: Iterable[_T]) -> List[_T]: + * # this version seems somewhat faster for smaller sizes, but it's + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_3unique_list(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_unique_list(PyObject *__pyx_v_seq, CYTHON_UNUSED int __pyx_skip_dispatch) { + PyObject *__pyx_v_seen = 0; + PyObject *__pyx_7genexpr__pyx_v_x = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + PyObject *(*__pyx_t_4)(PyObject *); + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("unique_list", 1); + + /* "sqlalchemy/util/_collections_cy.py":55 + * # return PyDict_Keys(w) if cython.compiled else list(w) + * if cython.compiled: + * seen: Set[_T] = set() # <<<<<<<<<<<<<< + * return [x for x in seq if x not in seen and not set.add(seen, x)] + * else: + */ + __pyx_t_1 = PySet_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 55, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_seen = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":56 + * if cython.compiled: + * seen: Set[_T] = set() + * return [x for x in seq if x not in seen and not set.add(seen, x)] # <<<<<<<<<<<<<< + * else: + * return list(dict.fromkeys(seq)) + */ + __Pyx_XDECREF(__pyx_r); + { /* enter inner scope */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_1); + if (likely(PyList_CheckExact(__pyx_v_seq)) || PyTuple_CheckExact(__pyx_v_seq)) { + __pyx_t_2 = __pyx_v_seq; __Pyx_INCREF(__pyx_t_2); + __pyx_t_3 = 0; + __pyx_t_4 = NULL; + } else { + __pyx_t_3 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_seq); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 56, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 56, __pyx_L5_error) + } + for (;;) { + if (likely(!__pyx_t_4)) { + if (likely(PyList_CheckExact(__pyx_t_2))) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 56, __pyx_L5_error) + #endif + if (__pyx_t_3 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_5 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_5); __pyx_t_3++; if (unlikely((0 < 0))) __PYX_ERR(0, 56, __pyx_L5_error) + #else + __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 56, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + } else { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 56, __pyx_L5_error) + #endif + if (__pyx_t_3 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_5); __pyx_t_3++; if (unlikely((0 < 0))) __PYX_ERR(0, 56, __pyx_L5_error) + #else + __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 56, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + } + } else { + __pyx_t_5 = __pyx_t_4(__pyx_t_2); + if (unlikely(!__pyx_t_5)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 56, __pyx_L5_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_5); + } + __Pyx_XDECREF_SET(__pyx_7genexpr__pyx_v_x, __pyx_t_5); + __pyx_t_5 = 0; + __pyx_t_7 = (__Pyx_PySet_ContainsTF(__pyx_7genexpr__pyx_v_x, __pyx_v_seen, Py_NE)); if (unlikely((__pyx_t_7 < 0))) __PYX_ERR(0, 56, __pyx_L5_error) + if (__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L9_bool_binop_done; + } + __pyx_t_8 = PySet_Add(__pyx_v_seen, __pyx_7genexpr__pyx_v_x); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 56, __pyx_L5_error) + __pyx_t_7 = (!(__pyx_t_8 != 0)); + __pyx_t_6 = __pyx_t_7; + __pyx_L9_bool_binop_done:; + if (__pyx_t_6) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_7genexpr__pyx_v_x))) __PYX_ERR(0, 56, __pyx_L5_error) + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_7genexpr__pyx_v_x); __pyx_7genexpr__pyx_v_x = 0; + goto __pyx_L12_exit_scope; + __pyx_L5_error:; + __Pyx_XDECREF(__pyx_7genexpr__pyx_v_x); __pyx_7genexpr__pyx_v_x = 0; + goto __pyx_L1_error; + __pyx_L12_exit_scope:; + } /* exit inner scope */ + __pyx_r = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":48 + * + * + * @cython.ccall # <<<<<<<<<<<<<< + * def unique_list(seq: Iterable[_T]) -> List[_T]: + * # this version seems somewhat faster for smaller sizes, but it's + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.unique_list", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_seen); + __Pyx_XDECREF(__pyx_7genexpr__pyx_v_x); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_3unique_list(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_3unique_list = {"unique_list", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_3unique_list, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_3unique_list(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_seq = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("unique_list (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_seq,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_seq)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 48, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "unique_list") < 0)) __PYX_ERR(0, 48, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_seq = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("unique_list", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 48, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.unique_list", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_2unique_list(__pyx_self, __pyx_v_seq); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_2unique_list(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_seq) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("unique_list", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_unique_list(__pyx_v_seq, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.unique_list", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":86 + * _list: List[_T] + * + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__(cls, key: Any) -> type[Self]: + * return cls + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__(PyObject *__pyx_v_cls, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__ = {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__(PyObject *__pyx_v_cls, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__class_getitem__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 86, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__class_getitem__") < 0)) __PYX_ERR(0, 86, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_key = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__class_getitem__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 86, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__class_getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet___class_getitem__(((PyTypeObject*)__pyx_v_cls), __pyx_v_key); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__class_getitem__", 1); + + /* "sqlalchemy/util/_collections_cy.py":88 + * @classmethod + * def __class_getitem__(cls, key: Any) -> type[Self]: + * return cls # <<<<<<<<<<<<<< + * + * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_cls); + __pyx_r = ((PyObject *)__pyx_v_cls); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":86 + * _list: List[_T] + * + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__(cls, key: Any) -> type[Self]: + * return cls + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":90 + * return cls + * + * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: # <<<<<<<<<<<<<< + * if d is not None: + * if isinstance(d, set) or isinstance(d, dict): + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_3__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_3__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_d = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_d,0}; + values[0] = __Pyx_Arg_NewRef_VARARGS(((PyObject *)Py_None)); + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (kw_args > 0) { + PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_d); + if (value) { values[0] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 90, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 90, __pyx_L3_error) + } + } else { + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_d = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 1, __pyx_nargs); __PYX_ERR(0, 90, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_2__init__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_d); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_2__init__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_d) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + unsigned int __pyx_t_6; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 1); + + /* "sqlalchemy/util/_collections_cy.py":91 + * + * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: + * if d is not None: # <<<<<<<<<<<<<< + * if isinstance(d, set) or isinstance(d, dict): + * self._list = list(d) + */ + __pyx_t_1 = (__pyx_v_d != Py_None); + if (__pyx_t_1) { + + /* "sqlalchemy/util/_collections_cy.py":92 + * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: + * if d is not None: + * if isinstance(d, set) or isinstance(d, dict): # <<<<<<<<<<<<<< + * self._list = list(d) + * else: + */ + __pyx_t_2 = PySet_Check(__pyx_v_d); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L5_bool_binop_done; + } + __pyx_t_2 = PyDict_Check(__pyx_v_d); + __pyx_t_1 = __pyx_t_2; + __pyx_L5_bool_binop_done:; + if (__pyx_t_1) { + + /* "sqlalchemy/util/_collections_cy.py":93 + * if d is not None: + * if isinstance(d, set) or isinstance(d, dict): + * self._list = list(d) # <<<<<<<<<<<<<< + * else: + * self._list = unique_list(d) + */ + __pyx_t_3 = PySequence_List(__pyx_v_d); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->_list); + __Pyx_DECREF(__pyx_v_self->_list); + __pyx_v_self->_list = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":92 + * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: + * if d is not None: + * if isinstance(d, set) or isinstance(d, dict): # <<<<<<<<<<<<<< + * self._list = list(d) + * else: + */ + goto __pyx_L4; + } + + /* "sqlalchemy/util/_collections_cy.py":95 + * self._list = list(d) + * else: + * self._list = unique_list(d) # <<<<<<<<<<<<<< + * set.__init__(self, self._list) + * else: + */ + /*else*/ { + __pyx_t_3 = __pyx_f_10sqlalchemy_4util_15_collections_cy_unique_list(__pyx_v_d, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->_list); + __Pyx_DECREF(__pyx_v_self->_list); + __pyx_v_self->_list = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + } + __pyx_L4:; + + /* "sqlalchemy/util/_collections_cy.py":96 + * else: + * self._list = unique_list(d) + * set.__init__(self, self._list) # <<<<<<<<<<<<<< + * else: + * self._list = [] + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_init); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[3] = {__pyx_t_5, ((PyObject *)__pyx_v_self), __pyx_v_self->_list}; + __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_6, 2+__pyx_t_6); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":91 + * + * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: + * if d is not None: # <<<<<<<<<<<<<< + * if isinstance(d, set) or isinstance(d, dict): + * self._list = list(d) + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":98 + * set.__init__(self, self._list) + * else: + * self._list = [] # <<<<<<<<<<<<<< + * set.__init__(self) + * + */ + /*else*/ { + __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 98, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->_list); + __Pyx_DECREF(__pyx_v_self->_list); + __pyx_v_self->_list = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":99 + * else: + * self._list = [] + * set.__init__(self) # <<<<<<<<<<<<<< + * + * def copy(self) -> OrderedSet[_T]: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_init); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_5, ((PyObject *)__pyx_v_self)}; + __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_6, 1+__pyx_t_6); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_L3:; + + /* "sqlalchemy/util/_collections_cy.py":90 + * return cls + * + * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: # <<<<<<<<<<<<<< + * if d is not None: + * if isinstance(d, set) or isinstance(d, dict): + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":101 + * set.__init__(self) + * + * def copy(self) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self._from_list(list(self._list)) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy = {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("copy (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("copy", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "copy", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_4copy(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_4copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("copy", 1); + + /* "sqlalchemy/util/_collections_cy.py":102 + * + * def copy(self) -> OrderedSet[_T]: + * return self._from_list(list(self._list)) # <<<<<<<<<<<<<< + * + * @cython.final + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PySequence_List(__pyx_v_self->_list); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 102, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_1))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 102, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":101 + * set.__init__(self) + * + * def copy(self) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self._from_list(list(self._list)) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.copy", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":104 + * return self._from_list(list(self._list)) + * + * @cython.final # <<<<<<<<<<<<<< + * @cython.cfunc + * @cython.inline + */ + +static CYTHON_INLINE struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_new_list) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_new = 0; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_from_list", 1); + + /* "sqlalchemy/util/_collections_cy.py":108 + * @cython.inline + * def _from_list(self, new_list: List[_T]) -> OrderedSet: # type: ignore[type-arg] # noqa: E501 + * new: OrderedSet = OrderedSet.__new__(OrderedSet) # type: ignore[type-arg] # noqa: E501 # <<<<<<<<<<<<<< + * new._list = new_list + * set.update(new, new_list) + */ + __pyx_t_1 = ((PyObject *)__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet(((PyTypeObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_GOTREF((PyObject *)__pyx_t_1); + __pyx_v_new = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":109 + * def _from_list(self, new_list: List[_T]) -> OrderedSet: # type: ignore[type-arg] # noqa: E501 + * new: OrderedSet = OrderedSet.__new__(OrderedSet) # type: ignore[type-arg] # noqa: E501 + * new._list = new_list # <<<<<<<<<<<<<< + * set.update(new, new_list) + * return new + */ + __Pyx_INCREF(__pyx_v_new_list); + __Pyx_GIVEREF(__pyx_v_new_list); + __Pyx_GOTREF(__pyx_v_new->_list); + __Pyx_DECREF(__pyx_v_new->_list); + __pyx_v_new->_list = __pyx_v_new_list; + + /* "sqlalchemy/util/_collections_cy.py":110 + * new: OrderedSet = OrderedSet.__new__(OrderedSet) # type: ignore[type-arg] # noqa: E501 + * new._list = new_list + * set.update(new, new_list) # <<<<<<<<<<<<<< + * return new + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[3] = {__pyx_t_3, ((PyObject *)__pyx_v_new), __pyx_v_new_list}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 2+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":111 + * new._list = new_list + * set.update(new, new_list) + * return new # <<<<<<<<<<<<<< + * + * def add(self, element: _T, /) -> None: + */ + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_new); + __pyx_r = __pyx_v_new; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":104 + * return self._from_list(list(self._list)) + * + * @cython.final # <<<<<<<<<<<<<< + * @cython.cfunc + * @cython.inline + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet._from_list", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_new); + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":113 + * return new + * + * def add(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element not in self: + * self._list.append(element) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add(PyObject *__pyx_v_self, PyObject *__pyx_v_element); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add = {"add", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add(PyObject *__pyx_v_self, PyObject *__pyx_v_element) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("add (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_6add(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_element)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_6add(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("add", 1); + + /* "sqlalchemy/util/_collections_cy.py":114 + * + * def add(self, element: _T, /) -> None: + * if element not in self: # <<<<<<<<<<<<<< + * self._list.append(element) + * set.add(self, element) + */ + __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_element, ((PyObject *)__pyx_v_self), Py_NE)); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 114, __pyx_L1_error) + if (__pyx_t_1) { + + /* "sqlalchemy/util/_collections_cy.py":115 + * def add(self, element: _T, /) -> None: + * if element not in self: + * self._list.append(element) # <<<<<<<<<<<<<< + * set.add(self, element) + * + */ + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); + __PYX_ERR(0, 115, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_self->_list, __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 115, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":116 + * if element not in self: + * self._list.append(element) + * set.add(self, element) # <<<<<<<<<<<<<< + * + * def remove(self, element: _T, /) -> None: + */ + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "add", "set"); + __PYX_ERR(0, 116, __pyx_L1_error) + } + __pyx_t_2 = PySet_Add(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 116, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":114 + * + * def add(self, element: _T, /) -> None: + * if element not in self: # <<<<<<<<<<<<<< + * self._list.append(element) + * set.add(self, element) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":113 + * return new + * + * def add(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element not in self: + * self._list.append(element) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.add", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":118 + * set.add(self, element) + * + * def remove(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * # set.remove will raise if element is not in self + * set.remove(self, element) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove(PyObject *__pyx_v_self, PyObject *__pyx_v_element); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove = {"remove", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove(PyObject *__pyx_v_self, PyObject *__pyx_v_element) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("remove (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_8remove(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_element)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_8remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("remove", 1); + + /* "sqlalchemy/util/_collections_cy.py":120 + * def remove(self, element: _T, /) -> None: + * # set.remove will raise if element is not in self + * set.remove(self, element) # <<<<<<<<<<<<<< + * self._list.remove(element) + * + */ + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "remove", "set"); + __PYX_ERR(0, 120, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PySet_Remove(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 120, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":121 + * # set.remove will raise if element is not in self + * set.remove(self, element) + * self._list.remove(element) # <<<<<<<<<<<<<< + * + * def pop(self) -> _T: + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_list, __pyx_n_s_remove); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_element}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":118 + * set.add(self, element) + * + * def remove(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * # set.remove will raise if element is not in self + * set.remove(self, element) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":123 + * self._list.remove(element) + * + * def pop(self) -> _T: # <<<<<<<<<<<<<< + * try: + * value = self._list.pop() + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop = {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("pop (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("pop", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "pop", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_10pop(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_10pop(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { + PyObject *__pyx_v_value = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("pop", 1); + + /* "sqlalchemy/util/_collections_cy.py":124 + * + * def pop(self) -> _T: + * try: # <<<<<<<<<<<<<< + * value = self._list.pop() + * except IndexError: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "sqlalchemy/util/_collections_cy.py":125 + * def pop(self) -> _T: + * try: + * value = self._list.pop() # <<<<<<<<<<<<<< + * except IndexError: + * raise KeyError("pop from an empty set") from None + */ + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "pop"); + __PYX_ERR(0, 125, __pyx_L3_error) + } + __pyx_t_4 = __Pyx_PyList_Pop(__pyx_v_self->_list); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 125, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_value = __pyx_t_4; + __pyx_t_4 = 0; + + /* "sqlalchemy/util/_collections_cy.py":124 + * + * def pop(self) -> _T: + * try: # <<<<<<<<<<<<<< + * value = self._list.pop() + * except IndexError: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "sqlalchemy/util/_collections_cy.py":126 + * try: + * value = self._list.pop() + * except IndexError: # <<<<<<<<<<<<<< + * raise KeyError("pop from an empty set") from None + * set.remove(self, value) + */ + __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_IndexError); + if (__pyx_t_5) { + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 126, __pyx_L5_except_error) + __Pyx_XGOTREF(__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + + /* "sqlalchemy/util/_collections_cy.py":127 + * value = self._list.pop() + * except IndexError: + * raise KeyError("pop from an empty set") from None # <<<<<<<<<<<<<< + * set.remove(self, value) + * return value + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 127, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, Py_None); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(0, 127, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + + /* "sqlalchemy/util/_collections_cy.py":124 + * + * def pop(self) -> _T: + * try: # <<<<<<<<<<<<<< + * value = self._list.pop() + * except IndexError: + */ + __pyx_L5_except_error:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "sqlalchemy/util/_collections_cy.py":128 + * except IndexError: + * raise KeyError("pop from an empty set") from None + * set.remove(self, value) # <<<<<<<<<<<<<< + * return value + * + */ + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "remove", "set"); + __PYX_ERR(0, 128, __pyx_L1_error) + } + __pyx_t_9 = __Pyx_PySet_Remove(((PyObject*)__pyx_v_self), __pyx_v_value); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 128, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":129 + * raise KeyError("pop from an empty set") from None + * set.remove(self, value) + * return value # <<<<<<<<<<<<<< + * + * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_value); + __pyx_r = __pyx_v_value; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":123 + * self._list.remove(element) + * + * def pop(self) -> _T: # <<<<<<<<<<<<<< + * try: + * value = self._list.pop() + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_value); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":131 + * return value + * + * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element not in self: + * self._list.insert(pos, element) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert = {"insert", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + Py_ssize_t __pyx_v_pos; + PyObject *__pyx_v_element = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[2] = {0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("insert (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {0}; + if (__pyx_kwds && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) > 0) { + if (likely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values, 0, "insert") < 0)) __PYX_ERR(0, 131, __pyx_L3_error) + } else if (unlikely(__pyx_nargs != 2)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + } + __pyx_v_pos = __Pyx_PyIndex_AsSsize_t(values[0]); if (unlikely((__pyx_v_pos == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 131, __pyx_L3_error) + __pyx_v_element = values[1]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("insert", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 131, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_12insert(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_pos, __pyx_v_element); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_12insert(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, Py_ssize_t __pyx_v_pos, PyObject *__pyx_v_element) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("insert", 1); + + /* "sqlalchemy/util/_collections_cy.py":132 + * + * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: + * if element not in self: # <<<<<<<<<<<<<< + * self._list.insert(pos, element) + * set.add(self, element) + */ + __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_element, ((PyObject *)__pyx_v_self), Py_NE)); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 132, __pyx_L1_error) + if (__pyx_t_1) { + + /* "sqlalchemy/util/_collections_cy.py":133 + * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: + * if element not in self: + * self._list.insert(pos, element) # <<<<<<<<<<<<<< + * set.add(self, element) + * + */ + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "insert"); + __PYX_ERR(0, 133, __pyx_L1_error) + } + __pyx_t_2 = PyList_Insert(__pyx_v_self->_list, __pyx_v_pos, __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 133, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":134 + * if element not in self: + * self._list.insert(pos, element) + * set.add(self, element) # <<<<<<<<<<<<<< + * + * def discard(self, element: _T, /) -> None: + */ + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "add", "set"); + __PYX_ERR(0, 134, __pyx_L1_error) + } + __pyx_t_2 = PySet_Add(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 134, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":132 + * + * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: + * if element not in self: # <<<<<<<<<<<<<< + * self._list.insert(pos, element) + * set.add(self, element) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":131 + * return value + * + * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element not in self: + * self._list.insert(pos, element) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":136 + * set.add(self, element) + * + * def discard(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element in self: + * set.remove(self, element) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard(PyObject *__pyx_v_self, PyObject *__pyx_v_element); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard = {"discard", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard(PyObject *__pyx_v_self, PyObject *__pyx_v_element) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("discard (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_14discard(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_element)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_14discard(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + unsigned int __pyx_t_6; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("discard", 1); + + /* "sqlalchemy/util/_collections_cy.py":137 + * + * def discard(self, element: _T, /) -> None: + * if element in self: # <<<<<<<<<<<<<< + * set.remove(self, element) + * self._list.remove(element) + */ + __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_element, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 137, __pyx_L1_error) + if (__pyx_t_1) { + + /* "sqlalchemy/util/_collections_cy.py":138 + * def discard(self, element: _T, /) -> None: + * if element in self: + * set.remove(self, element) # <<<<<<<<<<<<<< + * self._list.remove(element) + * + */ + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "remove", "set"); + __PYX_ERR(0, 138, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PySet_Remove(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 138, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":139 + * if element in self: + * set.remove(self, element) + * self._list.remove(element) # <<<<<<<<<<<<<< + * + * def clear(self) -> None: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_list, __pyx_n_s_remove); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_5, __pyx_v_element}; + __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_6, 1+__pyx_t_6); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":137 + * + * def discard(self, element: _T, /) -> None: + * if element in self: # <<<<<<<<<<<<<< + * set.remove(self, element) + * self._list.remove(element) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":136 + * set.add(self, element) + * + * def discard(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element in self: + * set.remove(self, element) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.discard", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":141 + * self._list.remove(element) + * + * def clear(self) -> None: # <<<<<<<<<<<<<< + * set.clear(self) # type: ignore[arg-type] + * self._list = [] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear = {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("clear (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("clear", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "clear", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_16clear(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_16clear(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("clear", 1); + + /* "sqlalchemy/util/_collections_cy.py":142 + * + * def clear(self) -> None: + * set.clear(self) # type: ignore[arg-type] # <<<<<<<<<<<<<< + * self._list = [] + * + */ + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "clear", "set"); + __PYX_ERR(0, 142, __pyx_L1_error) + } + __pyx_t_1 = PySet_Clear(((PyObject*)__pyx_v_self)); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 142, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":143 + * def clear(self) -> None: + * set.clear(self) # type: ignore[arg-type] + * self._list = [] # <<<<<<<<<<<<<< + * + * def __getitem__(self, key: cython.Py_ssize_t) -> _T: + */ + __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 143, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_list); + __Pyx_DECREF(__pyx_v_self->_list); + __pyx_v_self->_list = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":141 + * self._list.remove(element) + * + * def clear(self) -> None: # <<<<<<<<<<<<<< + * set.clear(self) # type: ignore[arg-type] + * self._list = [] + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":145 + * self._list = [] + * + * def __getitem__(self, key: cython.Py_ssize_t) -> _T: # <<<<<<<<<<<<<< + * return self._list[key] + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_19__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_arg_key); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_19__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_arg_key) { + Py_ssize_t __pyx_v_key; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + assert(__pyx_arg_key); { + __pyx_v_key = __Pyx_PyIndex_AsSsize_t(__pyx_arg_key); if (unlikely((__pyx_v_key == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 145, __pyx_L3_error) + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_18__getitem__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((Py_ssize_t)__pyx_v_key)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_18__getitem__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, Py_ssize_t __pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getitem__", 1); + + /* "sqlalchemy/util/_collections_cy.py":146 + * + * def __getitem__(self, key: cython.Py_ssize_t) -> _T: + * return self._list[key] # <<<<<<<<<<<<<< + * + * def __iter__(self) -> Iterator[_T]: + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 146, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_List(__pyx_v_self->_list, __pyx_v_key, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":145 + * self._list = [] + * + * def __getitem__(self, key: cython.Py_ssize_t) -> _T: # <<<<<<<<<<<<<< + * return self._list[key] + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":148 + * return self._list[key] + * + * def __iter__(self) -> Iterator[_T]: # <<<<<<<<<<<<<< + * return iter(self._list) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_21__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_21__iter__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_20__iter__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_20__iter__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__iter__", 1); + + /* "sqlalchemy/util/_collections_cy.py":149 + * + * def __iter__(self) -> Iterator[_T]: + * return iter(self._list) # <<<<<<<<<<<<<< + * + * def __add__(self, other: Iterator[_T]) -> OrderedSet[_T]: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_v_self->_list; + __Pyx_INCREF(__pyx_t_1); + __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":148 + * return self._list[key] + * + * def __iter__(self) -> Iterator[_T]: # <<<<<<<<<<<<<< + * return iter(self._list) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":151 + * return iter(self._list) + * + * def __add__(self, other: Iterator[_T]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self.union(other) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_23__add__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_23__add__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__add__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_22__add__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_22__add__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__add__", 1); + + /* "sqlalchemy/util/_collections_cy.py":152 + * + * def __add__(self, other: Iterator[_T]) -> OrderedSet[_T]: + * return self.union(other) # <<<<<<<<<<<<<< + * + * def __repr__(self) -> str: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_union); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":151 + * return iter(self._list) + * + * def __add__(self, other: Iterator[_T]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self.union(other) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__add__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":154 + * return self.union(other) + * + * def __repr__(self) -> str: # <<<<<<<<<<<<<< + * return "%s(%r)" % (self.__class__.__name__, self._list) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_24__repr__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_24__repr__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + Py_UCS4 __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__repr__", 1); + + /* "sqlalchemy/util/_collections_cy.py":155 + * + * def __repr__(self) -> str: + * return "%s(%r)" % (self.__class__.__name__, self._list) # <<<<<<<<<<<<<< + * + * __str__ = __repr__ + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = 0; + __pyx_t_3 = 127; + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_name); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_FormatSimpleAndDecref(PyObject_Unicode(__pyx_t_5), __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; + __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); + __pyx_t_4 = 0; + __Pyx_INCREF(__pyx_kp_u__2); + __pyx_t_2 += 1; + __Pyx_GIVEREF(__pyx_kp_u__2); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_kp_u__2); + __pyx_t_4 = __Pyx_PyObject_FormatSimpleAndDecref(PyObject_Repr(__pyx_v_self->_list), __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; + __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); + __pyx_t_4 = 0; + __Pyx_INCREF(__pyx_kp_u__3); + __pyx_t_2 += 1; + __Pyx_GIVEREF(__pyx_kp_u__3); + PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_kp_u__3); + __pyx_t_4 = __Pyx_PyUnicode_Join(__pyx_t_1, 4, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":154 + * return self.union(other) + * + * def __repr__(self) -> str: # <<<<<<<<<<<<<< + * return "%s(%r)" % (self.__class__.__name__, self._list) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":160 + * + * # @cython.ccall # cdef function cannot have star argument + * def update(self, *iterables: Iterable[_T]) -> None: # <<<<<<<<<<<<<< + * for iterable in iterables: + * for element in iterable: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update = {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_iterables = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("update (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "update", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_iterables = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_26update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_iterables); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_iterables); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_26update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_iterables) { + PyObject *__pyx_v_iterable = NULL; + PyObject *__pyx_v_element = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + PyObject *(*__pyx_t_5)(PyObject *); + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("update", 1); + + /* "sqlalchemy/util/_collections_cy.py":161 + * # @cython.ccall # cdef function cannot have star argument + * def update(self, *iterables: Iterable[_T]) -> None: + * for iterable in iterables: # <<<<<<<<<<<<<< + * for element in iterable: + * # inline of add. mainly for python, since for cython we + */ + __pyx_t_1 = __pyx_v_iterables; __Pyx_INCREF(__pyx_t_1); + __pyx_t_2 = 0; + for (;;) { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_1); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 161, __pyx_L1_error) + #endif + if (__pyx_t_2 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely((0 < 0))) __PYX_ERR(0, 161, __pyx_L1_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 161, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_iterable, __pyx_t_3); + __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":162 + * def update(self, *iterables: Iterable[_T]) -> None: + * for iterable in iterables: + * for element in iterable: # <<<<<<<<<<<<<< + * # inline of add. mainly for python, since for cython we + * # could create an @cfunc @inline _add function that would + */ + if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { + __pyx_t_3 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_3); + __pyx_t_4 = 0; + __pyx_t_5 = NULL; + } else { + __pyx_t_4 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 162, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 162, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_5)) { + if (likely(PyList_CheckExact(__pyx_t_3))) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 162, __pyx_L1_error) + #endif + if (__pyx_t_4 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 162, __pyx_L1_error) + #else + __pyx_t_6 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 162, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } else { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 162, __pyx_L1_error) + #endif + if (__pyx_t_4 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 162, __pyx_L1_error) + #else + __pyx_t_6 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 162, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } + } else { + __pyx_t_6 = __pyx_t_5(__pyx_t_3); + if (unlikely(!__pyx_t_6)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 162, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_6); + } + __Pyx_XDECREF_SET(__pyx_v_element, __pyx_t_6); + __pyx_t_6 = 0; + + /* "sqlalchemy/util/_collections_cy.py":166 + * # could create an @cfunc @inline _add function that would + * # perform the same + * if element not in self: # <<<<<<<<<<<<<< + * self._list.append(element) + * set.add(self, element) + */ + __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_v_element, ((PyObject *)__pyx_v_self), Py_NE)); if (unlikely((__pyx_t_7 < 0))) __PYX_ERR(0, 166, __pyx_L1_error) + if (__pyx_t_7) { + + /* "sqlalchemy/util/_collections_cy.py":167 + * # perform the same + * if element not in self: + * self._list.append(element) # <<<<<<<<<<<<<< + * set.add(self, element) + * + */ + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); + __PYX_ERR(0, 167, __pyx_L1_error) + } + __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_self->_list, __pyx_v_element); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 167, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":168 + * if element not in self: + * self._list.append(element) + * set.add(self, element) # <<<<<<<<<<<<<< + * + * def __ior__( + */ + if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { + PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "add", "set"); + __PYX_ERR(0, 168, __pyx_L1_error) + } + __pyx_t_8 = PySet_Add(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 168, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":166 + * # could create an @cfunc @inline _add function that would + * # perform the same + * if element not in self: # <<<<<<<<<<<<<< + * self._list.append(element) + * set.add(self, element) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":162 + * def update(self, *iterables: Iterable[_T]) -> None: + * for iterable in iterables: + * for element in iterable: # <<<<<<<<<<<<<< + * # inline of add. mainly for python, since for cython we + * # could create an @cfunc @inline _add function that would + */ + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":161 + * # @cython.ccall # cdef function cannot have star argument + * def update(self, *iterables: Iterable[_T]) -> None: + * for iterable in iterables: # <<<<<<<<<<<<<< + * for element in iterable: + * # inline of add. mainly for python, since for cython we + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":160 + * + * # @cython.ccall # cdef function cannot have star argument + * def update(self, *iterables: Iterable[_T]) -> None: # <<<<<<<<<<<<<< + * for iterable in iterables: + * for element in iterable: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_iterable); + __Pyx_XDECREF(__pyx_v_element); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":170 + * set.add(self, element) + * + * def __ior__( # <<<<<<<<<<<<<< + * self: OrderedSet[Union[_T, _S]], iterable: AbstractSet[_S] + * ) -> OrderedSet[Union[_T, _S]]: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_29__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_29__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__ior__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_28__ior__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_28__ior__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__ior__", 1); + + /* "sqlalchemy/util/_collections_cy.py":173 + * self: OrderedSet[Union[_T, _S]], iterable: AbstractSet[_S] + * ) -> OrderedSet[Union[_T, _S]]: + * self.update(iterable) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 173, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_iterable}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 173, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":174 + * ) -> OrderedSet[Union[_T, _S]]: + * self.update(iterable) + * return self # <<<<<<<<<<<<<< + * + * # @cython.ccall # cdef function cannot have star argument + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":170 + * set.add(self, element) + * + * def __ior__( # <<<<<<<<<<<<<< + * self: OrderedSet[Union[_T, _S]], iterable: AbstractSet[_S] + * ) -> OrderedSet[Union[_T, _S]]: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__ior__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":177 + * + * # @cython.ccall # cdef function cannot have star argument + * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< + * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) + * result.update(*other) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union = {"union", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_other = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("union (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "union", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_other = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_30union(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_other); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_30union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_result = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("union", 1); + + /* "sqlalchemy/util/_collections_cy.py":178 + * # @cython.ccall # cdef function cannot have star argument + * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: + * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) # <<<<<<<<<<<<<< + * result.update(*other) + * return result + */ + __pyx_t_1 = PySequence_List(__pyx_v_self->_list); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 178, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_1))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 178, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":179 + * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: + * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) + * result.update(*other) # <<<<<<<<<<<<<< + * return result + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_result), __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 179, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_v_other, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 179, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":180 + * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) + * result.update(*other) + * return result # <<<<<<<<<<<<<< + * + * def __or__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_result); + __pyx_r = ((PyObject *)__pyx_v_result); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":177 + * + * # @cython.ccall # cdef function cannot have star argument + * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< + * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) + * result.update(*other) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.union", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":182 + * return result + * + * def __or__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< + * return self.union(other) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_33__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_33__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__or__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_32__or__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_32__or__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__or__", 1); + + /* "sqlalchemy/util/_collections_cy.py":183 + * + * def __or__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: + * return self.union(other) # <<<<<<<<<<<<<< + * + * # @cython.ccall # cdef function cannot have star argument + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_union); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 183, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 183, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":182 + * return result + * + * def __or__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< + * return self.union(other) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__or__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":186 + * + * # @cython.ccall # cdef function cannot have star argument + * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * other_set: Set[Any] = set.intersection(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection = {"intersection", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_other = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("intersection (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "intersection", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_other = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_34intersection(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_other); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_34intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_v_other_set = 0; + PyObject *__pyx_8genexpr1__pyx_v_a = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("intersection", 1); + + /* "sqlalchemy/util/_collections_cy.py":187 + * # @cython.ccall # cdef function cannot have star argument + * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: + * other_set: Set[Any] = set.intersection(self, *other) # <<<<<<<<<<<<<< + * return self._from_list([a for a in self._list if a in other_set]) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_intersection); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __Pyx_GIVEREF((PyObject *)__pyx_v_self); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_self))) __PYX_ERR(0, 187, __pyx_L1_error); + __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_v_other); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PySet_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("set", __pyx_t_2))) __PYX_ERR(0, 187, __pyx_L1_error) + __pyx_v_other_set = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":188 + * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: + * other_set: Set[Any] = set.intersection(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) # <<<<<<<<<<<<<< + * + * def __and__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + */ + __Pyx_XDECREF(__pyx_r); + { /* enter inner scope */ + __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 188, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 188, __pyx_L5_error) + } + __pyx_t_3 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_3); + __pyx_t_4 = 0; + for (;;) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 188, __pyx_L5_error) + #endif + if (__pyx_t_4 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 188, __pyx_L5_error) + #else + __pyx_t_1 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 188, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + __Pyx_XDECREF_SET(__pyx_8genexpr1__pyx_v_a, __pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v_other_set == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 188, __pyx_L5_error) + } + __pyx_t_5 = (__Pyx_PySet_ContainsTF(__pyx_8genexpr1__pyx_v_a, __pyx_v_other_set, Py_EQ)); if (unlikely((__pyx_t_5 < 0))) __PYX_ERR(0, 188, __pyx_L5_error) + if (__pyx_t_5) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_8genexpr1__pyx_v_a))) __PYX_ERR(0, 188, __pyx_L5_error) + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_8genexpr1__pyx_v_a); __pyx_8genexpr1__pyx_v_a = 0; + goto __pyx_L10_exit_scope; + __pyx_L5_error:; + __Pyx_XDECREF(__pyx_8genexpr1__pyx_v_a); __pyx_8genexpr1__pyx_v_a = 0; + goto __pyx_L1_error; + __pyx_L10_exit_scope:; + } /* exit inner scope */ + __pyx_t_3 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_2))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 188, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":186 + * + * # @cython.ccall # cdef function cannot have star argument + * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * other_set: Set[Any] = set.intersection(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.intersection", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_other_set); + __Pyx_XDECREF(__pyx_8genexpr1__pyx_v_a); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":190 + * return self._from_list([a for a in self._list if a in other_set]) + * + * def __and__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self.intersection(other) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_37__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_37__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__and__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_36__and__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_36__and__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__and__", 1); + + /* "sqlalchemy/util/_collections_cy.py":191 + * + * def __and__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + * return self.intersection(other) # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_intersection); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 191, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 191, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":190 + * return self._from_list([a for a in self._list if a in other_set]) + * + * def __and__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self.intersection(other) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__and__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":193 + * return self.intersection(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference( + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_skip_dispatch) { + PyObject *__pyx_v_collection = 0; + PyObject *__pyx_v_other_set = 0; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_result = NULL; + PyObject *__pyx_8genexpr2__pyx_v_a = NULL; + PyObject *__pyx_8genexpr3__pyx_v_a = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + PyObject *__pyx_t_9 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("symmetric_difference", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_symmetric_difference); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 193, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_other}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 193, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":200 + * collection: Iterable[Any] + * other_set: Set[_S] + * if isinstance(other, set): # <<<<<<<<<<<<<< + * other_set = cython.cast(set, other) + * collection = other_set + */ + __pyx_t_6 = PySet_Check(__pyx_v_other); + if (__pyx_t_6) { + + /* "sqlalchemy/util/_collections_cy.py":201 + * other_set: Set[_S] + * if isinstance(other, set): + * other_set = cython.cast(set, other) # <<<<<<<<<<<<<< + * collection = other_set + * elif hasattr(other, "__len__"): + */ + __pyx_t_1 = __pyx_v_other; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_other_set = __pyx_t_1; + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":202 + * if isinstance(other, set): + * other_set = cython.cast(set, other) + * collection = other_set # <<<<<<<<<<<<<< + * elif hasattr(other, "__len__"): + * collection = other + */ + __Pyx_INCREF(__pyx_v_other_set); + __pyx_v_collection = __pyx_v_other_set; + + /* "sqlalchemy/util/_collections_cy.py":200 + * collection: Iterable[Any] + * other_set: Set[_S] + * if isinstance(other, set): # <<<<<<<<<<<<<< + * other_set = cython.cast(set, other) + * collection = other_set + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":203 + * other_set = cython.cast(set, other) + * collection = other_set + * elif hasattr(other, "__len__"): # <<<<<<<<<<<<<< + * collection = other + * other_set = set(other) + */ + __pyx_t_6 = __Pyx_HasAttr(__pyx_v_other, __pyx_n_u_len); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 203, __pyx_L1_error) + if (__pyx_t_6) { + + /* "sqlalchemy/util/_collections_cy.py":204 + * collection = other_set + * elif hasattr(other, "__len__"): + * collection = other # <<<<<<<<<<<<<< + * other_set = set(other) + * else: + */ + __Pyx_INCREF(__pyx_v_other); + __pyx_v_collection = __pyx_v_other; + + /* "sqlalchemy/util/_collections_cy.py":205 + * elif hasattr(other, "__len__"): + * collection = other + * other_set = set(other) # <<<<<<<<<<<<<< + * else: + * collection = list(other) + */ + __pyx_t_1 = PySet_New(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 205, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_other_set = __pyx_t_1; + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":203 + * other_set = cython.cast(set, other) + * collection = other_set + * elif hasattr(other, "__len__"): # <<<<<<<<<<<<<< + * collection = other + * other_set = set(other) + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":207 + * other_set = set(other) + * else: + * collection = list(other) # <<<<<<<<<<<<<< + * other_set = set(collection) + * result: OrderedSet[Union[_T, _S]] = self._from_list( + */ + /*else*/ { + __pyx_t_1 = PySequence_List(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 207, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_collection = __pyx_t_1; + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":208 + * else: + * collection = list(other) + * other_set = set(collection) # <<<<<<<<<<<<<< + * result: OrderedSet[Union[_T, _S]] = self._from_list( + * [a for a in self._list if a not in other_set] + */ + __pyx_t_1 = PySet_New(__pyx_v_collection); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 208, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_other_set = __pyx_t_1; + __pyx_t_1 = 0; + } + __pyx_L3:; + + /* "sqlalchemy/util/_collections_cy.py":209 + * collection = list(other) + * other_set = set(collection) + * result: OrderedSet[Union[_T, _S]] = self._from_list( # <<<<<<<<<<<<<< + * [a for a in self._list if a not in other_set] + * ) + */ + { /* enter inner scope */ + + /* "sqlalchemy/util/_collections_cy.py":210 + * other_set = set(collection) + * result: OrderedSet[Union[_T, _S]] = self._from_list( + * [a for a in self._list if a not in other_set] # <<<<<<<<<<<<<< + * ) + * result.update([a for a in collection if a not in self]) + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 210, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_1); + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 210, __pyx_L6_error) + } + __pyx_t_2 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_2); + __pyx_t_7 = 0; + for (;;) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 210, __pyx_L6_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 210, __pyx_L6_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 210, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_8genexpr2__pyx_v_a, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr2__pyx_v_a, __pyx_v_other_set, Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 210, __pyx_L6_error) + if (__pyx_t_6) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_8genexpr2__pyx_v_a))) __PYX_ERR(0, 210, __pyx_L6_error) + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_8genexpr2__pyx_v_a); __pyx_8genexpr2__pyx_v_a = 0; + goto __pyx_L11_exit_scope; + __pyx_L6_error:; + __Pyx_XDECREF(__pyx_8genexpr2__pyx_v_a); __pyx_8genexpr2__pyx_v_a = 0; + goto __pyx_L1_error; + __pyx_L11_exit_scope:; + } /* exit inner scope */ + + /* "sqlalchemy/util/_collections_cy.py":209 + * collection = list(other) + * other_set = set(collection) + * result: OrderedSet[Union[_T, _S]] = self._from_list( # <<<<<<<<<<<<<< + * [a for a in self._list if a not in other_set] + * ) + */ + __pyx_t_2 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_1))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 209, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":212 + * [a for a in self._list if a not in other_set] + * ) + * result.update([a for a in collection if a not in self]) # <<<<<<<<<<<<<< + * return result + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_result), __pyx_n_s_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 212, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + { /* enter inner scope */ + __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 212, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_3); + if (likely(PyList_CheckExact(__pyx_v_collection)) || PyTuple_CheckExact(__pyx_v_collection)) { + __pyx_t_4 = __pyx_v_collection; __Pyx_INCREF(__pyx_t_4); + __pyx_t_7 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_7 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_v_collection); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 212, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_4); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 212, __pyx_L14_error) + } + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_4))) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_4); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 212, __pyx_L14_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_9 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 212, __pyx_L14_error) + #else + __pyx_t_9 = __Pyx_PySequence_ITEM(__pyx_t_4, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 212, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_9); + #endif + } else { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_4); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 212, __pyx_L14_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_9 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 212, __pyx_L14_error) + #else + __pyx_t_9 = __Pyx_PySequence_ITEM(__pyx_t_4, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 212, __pyx_L14_error) + __Pyx_GOTREF(__pyx_t_9); + #endif + } + } else { + __pyx_t_9 = __pyx_t_8(__pyx_t_4); + if (unlikely(!__pyx_t_9)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 212, __pyx_L14_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_9); + } + __Pyx_XDECREF_SET(__pyx_8genexpr3__pyx_v_a, __pyx_t_9); + __pyx_t_9 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr3__pyx_v_a, ((PyObject *)__pyx_v_self), Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 212, __pyx_L14_error) + if (__pyx_t_6) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_3, (PyObject*)__pyx_8genexpr3__pyx_v_a))) __PYX_ERR(0, 212, __pyx_L14_error) + } + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_8genexpr3__pyx_v_a); __pyx_8genexpr3__pyx_v_a = 0; + goto __pyx_L19_exit_scope; + __pyx_L14_error:; + __Pyx_XDECREF(__pyx_8genexpr3__pyx_v_a); __pyx_8genexpr3__pyx_v_a = 0; + goto __pyx_L1_error; + __pyx_L19_exit_scope:; + } /* exit inner scope */ + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_1, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 212, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":213 + * ) + * result.update([a for a in collection if a not in self]) + * return result # <<<<<<<<<<<<<< + * + * def __xor__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_result); + __pyx_r = ((PyObject *)__pyx_v_result); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":193 + * return self.intersection(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference( + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.symmetric_difference", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_collection); + __Pyx_XDECREF(__pyx_v_other_set); + __Pyx_XDECREF((PyObject *)__pyx_v_result); + __Pyx_XDECREF(__pyx_8genexpr2__pyx_v_a); + __Pyx_XDECREF(__pyx_8genexpr3__pyx_v_a); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference = {"symmetric_difference", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("symmetric_difference (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_38symmetric_difference(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_38symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("symmetric_difference", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference(__pyx_v_self, __pyx_v_other, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 193, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.symmetric_difference", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":215 + * return result + * + * def __xor__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< + * return self.symmetric_difference(other) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_41__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_41__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__xor__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_40__xor__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_40__xor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__xor__", 1); + + /* "sqlalchemy/util/_collections_cy.py":216 + * + * def __xor__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: + * return self.symmetric_difference(other) # <<<<<<<<<<<<<< + * + * # @cython.ccall # cdef function cannot have star argument + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self->__pyx_vtab)->symmetric_difference(__pyx_v_self, __pyx_v_other, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 216, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":215 + * return result + * + * def __xor__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< + * return self.symmetric_difference(other) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__xor__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":219 + * + * # @cython.ccall # cdef function cannot have star argument + * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * other_set: Set[Any] = set.difference(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference = {"difference", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_other = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("difference (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "difference", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_other = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_42difference(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_other); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_42difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_v_other_set = 0; + PyObject *__pyx_8genexpr4__pyx_v_a = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("difference", 1); + + /* "sqlalchemy/util/_collections_cy.py":220 + * # @cython.ccall # cdef function cannot have star argument + * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: + * other_set: Set[Any] = set.difference(self, *other) # <<<<<<<<<<<<<< + * return self._from_list([a for a in self._list if a in other_set]) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_difference); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 220, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 220, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __Pyx_GIVEREF((PyObject *)__pyx_v_self); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_self))) __PYX_ERR(0, 220, __pyx_L1_error); + __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_v_other); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 220, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 220, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PySet_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("set", __pyx_t_2))) __PYX_ERR(0, 220, __pyx_L1_error) + __pyx_v_other_set = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":221 + * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: + * other_set: Set[Any] = set.difference(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) # <<<<<<<<<<<<<< + * + * def __sub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + */ + __Pyx_XDECREF(__pyx_r); + { /* enter inner scope */ + __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 221, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 221, __pyx_L5_error) + } + __pyx_t_3 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_3); + __pyx_t_4 = 0; + for (;;) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 221, __pyx_L5_error) + #endif + if (__pyx_t_4 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 221, __pyx_L5_error) + #else + __pyx_t_1 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 221, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + __Pyx_XDECREF_SET(__pyx_8genexpr4__pyx_v_a, __pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v_other_set == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 221, __pyx_L5_error) + } + __pyx_t_5 = (__Pyx_PySet_ContainsTF(__pyx_8genexpr4__pyx_v_a, __pyx_v_other_set, Py_EQ)); if (unlikely((__pyx_t_5 < 0))) __PYX_ERR(0, 221, __pyx_L5_error) + if (__pyx_t_5) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_8genexpr4__pyx_v_a))) __PYX_ERR(0, 221, __pyx_L5_error) + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_8genexpr4__pyx_v_a); __pyx_8genexpr4__pyx_v_a = 0; + goto __pyx_L10_exit_scope; + __pyx_L5_error:; + __Pyx_XDECREF(__pyx_8genexpr4__pyx_v_a); __pyx_8genexpr4__pyx_v_a = 0; + goto __pyx_L1_error; + __pyx_L10_exit_scope:; + } /* exit inner scope */ + __pyx_t_3 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_2))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 221, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":219 + * + * # @cython.ccall # cdef function cannot have star argument + * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * other_set: Set[Any] = set.difference(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.difference", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_other_set); + __Pyx_XDECREF(__pyx_8genexpr4__pyx_v_a); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":223 + * return self._from_list([a for a in self._list if a in other_set]) + * + * def __sub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self.difference(other) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_45__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_45__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__sub__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_44__sub__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_44__sub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__sub__", 1); + + /* "sqlalchemy/util/_collections_cy.py":224 + * + * def __sub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + * return self.difference(other) # <<<<<<<<<<<<<< + * + * # @cython.ccall # cdef function cannot have star argument + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_difference); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 224, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 224, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":223 + * return self._from_list([a for a in self._list if a in other_set]) + * + * def __sub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self.difference(other) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__sub__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":227 + * + * # @cython.ccall # cdef function cannot have star argument + * def intersection_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< + * set.intersection_update(self, *other) + * self._list = [a for a in self._list if a in self] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update = {"intersection_update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_other = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("intersection_update (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "intersection_update", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_other = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_46intersection_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_other); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_46intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_8genexpr5__pyx_v_a = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("intersection_update", 1); + + /* "sqlalchemy/util/_collections_cy.py":228 + * # @cython.ccall # cdef function cannot have star argument + * def intersection_update(self, *other: Iterable[Hashable]) -> None: + * set.intersection_update(self, *other) # <<<<<<<<<<<<<< + * self._list = [a for a in self._list if a in self] + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_intersection_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __Pyx_GIVEREF((PyObject *)__pyx_v_self); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_self))) __PYX_ERR(0, 228, __pyx_L1_error); + __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_v_other); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":229 + * def intersection_update(self, *other: Iterable[Hashable]) -> None: + * set.intersection_update(self, *other) + * self._list = [a for a in self._list if a in self] # <<<<<<<<<<<<<< + * + * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + */ + { /* enter inner scope */ + __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 229, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 229, __pyx_L5_error) + } + __pyx_t_3 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_3); + __pyx_t_4 = 0; + for (;;) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 229, __pyx_L5_error) + #endif + if (__pyx_t_4 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 229, __pyx_L5_error) + #else + __pyx_t_1 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 229, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + __Pyx_XDECREF_SET(__pyx_8genexpr5__pyx_v_a, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr5__pyx_v_a, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_5 < 0))) __PYX_ERR(0, 229, __pyx_L5_error) + if (__pyx_t_5) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_8genexpr5__pyx_v_a))) __PYX_ERR(0, 229, __pyx_L5_error) + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_8genexpr5__pyx_v_a); __pyx_8genexpr5__pyx_v_a = 0; + goto __pyx_L10_exit_scope; + __pyx_L5_error:; + __Pyx_XDECREF(__pyx_8genexpr5__pyx_v_a); __pyx_8genexpr5__pyx_v_a = 0; + goto __pyx_L1_error; + __pyx_L10_exit_scope:; + } /* exit inner scope */ + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_list); + __Pyx_DECREF(__pyx_v_self->_list); + __pyx_v_self->_list = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":227 + * + * # @cython.ccall # cdef function cannot have star argument + * def intersection_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< + * set.intersection_update(self, *other) + * self._list = [a for a in self._list if a in self] + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.intersection_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_8genexpr5__pyx_v_a); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":231 + * self._list = [a for a in self._list if a in self] + * + * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * self.intersection_update(other) + * return self + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_49__iand__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_49__iand__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iand__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_48__iand__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_48__iand__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__iand__", 1); + + /* "sqlalchemy/util/_collections_cy.py":232 + * + * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + * self.intersection_update(other) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_intersection_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 232, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 232, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":233 + * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + * self.intersection_update(other) + * return self # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":231 + * self._list = [a for a in self._list if a in self] + * + * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * self.intersection_update(other) + * return self + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__iand__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":235 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_skip_dispatch) { + PyObject *__pyx_v_collection = NULL; + PyObject *__pyx_8genexpr6__pyx_v_a = NULL; + PyObject *__pyx_8genexpr7__pyx_v_a = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("symmetric_difference_update", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_symmetric_difference_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_other}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":238 + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: + * collection = other if hasattr(other, "__len__") else list(other) # <<<<<<<<<<<<<< + * set.symmetric_difference_update(self, collection) + * self._list = [a for a in self._list if a in self] + */ + __pyx_t_6 = __Pyx_HasAttr(__pyx_v_other, __pyx_n_u_len); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 238, __pyx_L1_error) + if (__pyx_t_6) { + __Pyx_INCREF(__pyx_v_other); + __pyx_t_1 = __pyx_v_other; + } else { + __pyx_t_2 = PySequence_List(__pyx_v_other); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 238, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __pyx_t_2; + __pyx_t_2 = 0; + } + __pyx_v_collection = __pyx_t_1; + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":239 + * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: + * collection = other if hasattr(other, "__len__") else list(other) + * set.symmetric_difference_update(self, collection) # <<<<<<<<<<<<<< + * self._list = [a for a in self._list if a in self] + * self._list += [a for a in collection if a in self] + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_symmetric_difference_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 239, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[3] = {__pyx_t_3, ((PyObject *)__pyx_v_self), __pyx_v_collection}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 2+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 239, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":240 + * collection = other if hasattr(other, "__len__") else list(other) + * set.symmetric_difference_update(self, collection) + * self._list = [a for a in self._list if a in self] # <<<<<<<<<<<<<< + * self._list += [a for a in collection if a in self] + * + */ + { /* enter inner scope */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 240, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_1); + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 240, __pyx_L5_error) + } + __pyx_t_2 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_2); + __pyx_t_7 = 0; + for (;;) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 240, __pyx_L5_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 240, __pyx_L5_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 240, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_8genexpr6__pyx_v_a, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr6__pyx_v_a, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 240, __pyx_L5_error) + if (__pyx_t_6) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_8genexpr6__pyx_v_a))) __PYX_ERR(0, 240, __pyx_L5_error) + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_8genexpr6__pyx_v_a); __pyx_8genexpr6__pyx_v_a = 0; + goto __pyx_L10_exit_scope; + __pyx_L5_error:; + __Pyx_XDECREF(__pyx_8genexpr6__pyx_v_a); __pyx_8genexpr6__pyx_v_a = 0; + goto __pyx_L1_error; + __pyx_L10_exit_scope:; + } /* exit inner scope */ + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_list); + __Pyx_DECREF(__pyx_v_self->_list); + __pyx_v_self->_list = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":241 + * set.symmetric_difference_update(self, collection) + * self._list = [a for a in self._list if a in self] + * self._list += [a for a in collection if a in self] # <<<<<<<<<<<<<< + * + * def __ixor__( + */ + { /* enter inner scope */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 241, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_1); + if (likely(PyList_CheckExact(__pyx_v_collection)) || PyTuple_CheckExact(__pyx_v_collection)) { + __pyx_t_2 = __pyx_v_collection; __Pyx_INCREF(__pyx_t_2); + __pyx_t_7 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_7 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_collection); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 241, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 241, __pyx_L13_error) + } + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_2))) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 241, __pyx_L13_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 241, __pyx_L13_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 241, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } else { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 241, __pyx_L13_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 241, __pyx_L13_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 241, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } + } else { + __pyx_t_3 = __pyx_t_8(__pyx_t_2); + if (unlikely(!__pyx_t_3)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 241, __pyx_L13_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_3); + } + __Pyx_XDECREF_SET(__pyx_8genexpr7__pyx_v_a, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr7__pyx_v_a, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 241, __pyx_L13_error) + if (__pyx_t_6) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_8genexpr7__pyx_v_a))) __PYX_ERR(0, 241, __pyx_L13_error) + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_8genexpr7__pyx_v_a); __pyx_8genexpr7__pyx_v_a = 0; + goto __pyx_L18_exit_scope; + __pyx_L13_error:; + __Pyx_XDECREF(__pyx_8genexpr7__pyx_v_a); __pyx_8genexpr7__pyx_v_a = 0; + goto __pyx_L1_error; + __pyx_L18_exit_scope:; + } /* exit inner scope */ + __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_self->_list, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 241, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_list); + __Pyx_DECREF(__pyx_v_self->_list); + __pyx_v_self->_list = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":235 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.symmetric_difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_collection); + __Pyx_XDECREF(__pyx_8genexpr6__pyx_v_a); + __Pyx_XDECREF(__pyx_8genexpr7__pyx_v_a); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update = {"symmetric_difference_update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("symmetric_difference_update (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_50symmetric_difference_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_50symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("symmetric_difference_update", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference_update(__pyx_v_self, __pyx_v_other, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.symmetric_difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":243 + * self._list += [a for a in collection if a in self] + * + * def __ixor__( # <<<<<<<<<<<<<< + * self: OrderedSet[Union[_T, _S]], other: AbstractSet[_S] + * ) -> OrderedSet[Union[_T, _S]]: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_53__ixor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_53__ixor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__ixor__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_52__ixor__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_52__ixor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__ixor__", 1); + + /* "sqlalchemy/util/_collections_cy.py":246 + * self: OrderedSet[Union[_T, _S]], other: AbstractSet[_S] + * ) -> OrderedSet[Union[_T, _S]]: + * self.symmetric_difference_update(other) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self->__pyx_vtab)->symmetric_difference_update(__pyx_v_self, __pyx_v_other, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 246, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":247 + * ) -> OrderedSet[Union[_T, _S]]: + * self.symmetric_difference_update(other) + * return self # <<<<<<<<<<<<<< + * + * # @cython.ccall # cdef function cannot have star argument + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":243 + * self._list += [a for a in collection if a in self] + * + * def __ixor__( # <<<<<<<<<<<<<< + * self: OrderedSet[Union[_T, _S]], other: AbstractSet[_S] + * ) -> OrderedSet[Union[_T, _S]]: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__ixor__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":250 + * + * # @cython.ccall # cdef function cannot have star argument + * def difference_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< + * set.difference_update(self, *other) + * self._list = [a for a in self._list if a in self] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update = {"difference_update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_other = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("difference_update (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "difference_update", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_other = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_54difference_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_other); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_54difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_8genexpr8__pyx_v_a = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("difference_update", 1); + + /* "sqlalchemy/util/_collections_cy.py":251 + * # @cython.ccall # cdef function cannot have star argument + * def difference_update(self, *other: Iterable[Hashable]) -> None: + * set.difference_update(self, *other) # <<<<<<<<<<<<<< + * self._list = [a for a in self._list if a in self] + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_difference_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 251, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 251, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __Pyx_GIVEREF((PyObject *)__pyx_v_self); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_self))) __PYX_ERR(0, 251, __pyx_L1_error); + __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_v_other); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 251, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 251, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":252 + * def difference_update(self, *other: Iterable[Hashable]) -> None: + * set.difference_update(self, *other) + * self._list = [a for a in self._list if a in self] # <<<<<<<<<<<<<< + * + * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + */ + { /* enter inner scope */ + __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 252, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely(__pyx_v_self->_list == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 252, __pyx_L5_error) + } + __pyx_t_3 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_3); + __pyx_t_4 = 0; + for (;;) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 252, __pyx_L5_error) + #endif + if (__pyx_t_4 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 252, __pyx_L5_error) + #else + __pyx_t_1 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 252, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + __Pyx_XDECREF_SET(__pyx_8genexpr8__pyx_v_a, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr8__pyx_v_a, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_5 < 0))) __PYX_ERR(0, 252, __pyx_L5_error) + if (__pyx_t_5) { + if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_8genexpr8__pyx_v_a))) __PYX_ERR(0, 252, __pyx_L5_error) + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_8genexpr8__pyx_v_a); __pyx_8genexpr8__pyx_v_a = 0; + goto __pyx_L10_exit_scope; + __pyx_L5_error:; + __Pyx_XDECREF(__pyx_8genexpr8__pyx_v_a); __pyx_8genexpr8__pyx_v_a = 0; + goto __pyx_L1_error; + __pyx_L10_exit_scope:; + } /* exit inner scope */ + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_list); + __Pyx_DECREF(__pyx_v_self->_list); + __pyx_v_self->_list = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":250 + * + * # @cython.ccall # cdef function cannot have star argument + * def difference_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< + * set.difference_update(self, *other) + * self._list = [a for a in self._list if a in self] + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_8genexpr8__pyx_v_a); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":254 + * self._list = [a for a in self._list if a in self] + * + * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * self.difference_update(other) + * return self + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_57__isub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_57__isub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__isub__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_56__isub__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_56__isub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__isub__", 1); + + /* "sqlalchemy/util/_collections_cy.py":255 + * + * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + * self.difference_update(other) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_difference_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 255, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 255, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":256 + * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: + * self.difference_update(other) + * return self # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":254 + * self._list = [a for a in self._list if a in self] + * + * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * self.difference_update(other) + * return self + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__isub__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_58__reduce_cython__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_58__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 1); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self._list,) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->_list); + __Pyx_GIVEREF(__pyx_v_self->_list); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->_list)) __PYX_ERR(1, 5, __pyx_L1_error); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self._list,) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self._list,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(1, 8, __pyx_L1_error); + __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._list is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self._list,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self._list is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state + */ + /*else*/ { + __pyx_t_2 = (__pyx_v_self->_list != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_2; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._list is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state + * else: + */ + if (__pyx_v_use_setstate) { + + /* "(tree fragment)":13 + * use_setstate = self._list is not None + * if use_setstate: + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_OrderedSet); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_242532825); + __Pyx_GIVEREF(__pyx_int_242532825); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_242532825)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_3 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._list is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state + * else: + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_OrderedSet); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_242532825); + __Pyx_GIVEREF(__pyx_int_242532825); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_242532825)) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 16, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(1, 16, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v___pyx_state = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(1, 16, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_60__setstate_cython__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_60__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 1); + + /* "(tree fragment)":17 + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_OrderedSet__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":261 + * if cython.compiled: + * + * @cython.cfunc # <<<<<<<<<<<<<< + * @cython.inline + * def _get_id(item: object, /) -> cython.ulonglong: + */ + +static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(PyObject *__pyx_v_item) { + unsigned PY_LONG_LONG __pyx_r; + + /* "sqlalchemy/util/_collections_cy.py":264 + * @cython.inline + * def _get_id(item: object, /) -> cython.ulonglong: + * return cython.cast( # <<<<<<<<<<<<<< + * cython.ulonglong, + * cython.cast(cython.pointer(cython.void), item), + */ + __pyx_r = ((unsigned PY_LONG_LONG)((void *)__pyx_v_item)); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":261 + * if cython.compiled: + * + * @cython.cfunc # <<<<<<<<<<<<<< + * @cython.inline + * def _get_id(item: object, /) -> cython.ulonglong: + */ + + /* function exit code */ + __pyx_L0:; + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":285 + * _members: Dict[int, Any] + * + * def __init__(self, iterable: Optional[Iterable[Any]] = None): # <<<<<<<<<<<<<< + * # the code assumes this class is ordered + * self._members = {} + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_iterable = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_iterable,0}; + values[0] = __Pyx_Arg_NewRef_VARARGS(((PyObject *)Py_None)); + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (kw_args > 0) { + PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_iterable); + if (value) { values[0] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 285, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 285, __pyx_L3_error) + } + } else { + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_iterable = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 1, __pyx_nargs); __PYX_ERR(0, 285, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet___init__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), __pyx_v_iterable); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet___init__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 1); + + /* "sqlalchemy/util/_collections_cy.py":287 + * def __init__(self, iterable: Optional[Iterable[Any]] = None): + * # the code assumes this class is ordered + * self._members = {} # <<<<<<<<<<<<<< + * if iterable: + * self.update(iterable) + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 287, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_members); + __Pyx_DECREF(__pyx_v_self->_members); + __pyx_v_self->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":288 + * # the code assumes this class is ordered + * self._members = {} + * if iterable: # <<<<<<<<<<<<<< + * self.update(iterable) + * + */ + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_iterable); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(0, 288, __pyx_L1_error) + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":289 + * self._members = {} + * if iterable: + * self.update(iterable) # <<<<<<<<<<<<<< + * + * def add(self, value: Any, /) -> None: + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->update(__pyx_v_self, __pyx_v_iterable, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 289, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":288 + * # the code assumes this class is ordered + * self._members = {} + * if iterable: # <<<<<<<<<<<<<< + * self.update(iterable) + * + */ + } + + /* "sqlalchemy/util/_collections_cy.py":285 + * _members: Dict[int, Any] + * + * def __init__(self, iterable: Optional[Iterable[Any]] = None): # <<<<<<<<<<<<<< + * # the code assumes this class is ordered + * self._members = {} + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":291 + * self.update(iterable) + * + * def add(self, value: Any, /) -> None: # <<<<<<<<<<<<<< + * self._members[_get_id(value)] = value + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add = {"add", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("add (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_2add(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_2add(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + unsigned PY_LONG_LONG __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("add", 1); + + /* "sqlalchemy/util/_collections_cy.py":292 + * + * def add(self, value: Any, /) -> None: + * self._members[_get_id(value)] = value # <<<<<<<<<<<<<< + * + * def __contains__(self, value) -> bool: + */ + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 292, __pyx_L1_error) + } + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_value); if (unlikely(__pyx_t_1 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 292, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely((PyDict_SetItem(__pyx_v_self->_members, __pyx_t_2, __pyx_v_value) < 0))) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":291 + * self.update(iterable) + * + * def add(self, value: Any, /) -> None: # <<<<<<<<<<<<<< + * self._members[_get_id(value)] = value + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.add", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":294 + * self._members[_get_id(value)] = value + * + * def __contains__(self, value) -> bool: # <<<<<<<<<<<<<< + * return _get_id(value) in self._members + * + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_5__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_5__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_4__contains__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_4__contains__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + unsigned PY_LONG_LONG __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__contains__", 1); + + /* "sqlalchemy/util/_collections_cy.py":295 + * + * def __contains__(self, value) -> bool: + * return _get_id(value) in self._members # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_value); if (unlikely(__pyx_t_1 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 295, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 295, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 295, __pyx_L1_error) + } + __pyx_t_3 = (__Pyx_PyDict_ContainsTF(__pyx_t_2, __pyx_v_self->_members, Py_EQ)); if (unlikely((__pyx_t_3 < 0))) __PYX_ERR(0, 295, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_3; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":294 + * self._members[_get_id(value)] = value + * + * def __contains__(self, value) -> bool: # <<<<<<<<<<<<<< + * return _get_id(value) in self._members + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":297 + * return _get_id(value) in self._members + * + * @cython.ccall # <<<<<<<<<<<<<< + * def remove(self, value: Any, /): + * del self._members[_get_id(value)] + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value, int __pyx_skip_dispatch) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + unsigned PY_LONG_LONG __pyx_t_6; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("remove", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_remove); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 297, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_value}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 297, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":299 + * @cython.ccall + * def remove(self, value: Any, /): + * del self._members[_get_id(value)] # <<<<<<<<<<<<<< + * + * def discard(self, value, /) -> None: + */ + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 299, __pyx_L1_error) + } + __pyx_t_6 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_value); if (unlikely(__pyx_t_6 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 299, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 299, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (unlikely((PyDict_DelItem(__pyx_v_self->_members, __pyx_t_1) < 0))) __PYX_ERR(0, 299, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":297 + * return _get_id(value) in self._members + * + * @cython.ccall # <<<<<<<<<<<<<< + * def remove(self, value: Any, /): + * del self._members[_get_id(value)] + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove = {"remove", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("remove (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_6remove(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_6remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("remove", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_remove(__pyx_v_self, __pyx_v_value, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 297, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":301 + * del self._members[_get_id(value)] + * + * def discard(self, value, /) -> None: # <<<<<<<<<<<<<< + * try: + * self.remove(value) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard = {"discard", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("discard (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_8discard(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_8discard(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("discard", 1); + + /* "sqlalchemy/util/_collections_cy.py":302 + * + * def discard(self, value, /) -> None: + * try: # <<<<<<<<<<<<<< + * self.remove(value) + * except KeyError: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "sqlalchemy/util/_collections_cy.py":303 + * def discard(self, value, /) -> None: + * try: + * self.remove(value) # <<<<<<<<<<<<<< + * except KeyError: + * pass + */ + __pyx_t_4 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->remove(__pyx_v_self, __pyx_v_value, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 303, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "sqlalchemy/util/_collections_cy.py":302 + * + * def discard(self, value, /) -> None: + * try: # <<<<<<<<<<<<<< + * self.remove(value) + * except KeyError: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "sqlalchemy/util/_collections_cy.py":304 + * try: + * self.remove(value) + * except KeyError: # <<<<<<<<<<<<<< + * pass + * + */ + __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyError); + if (__pyx_t_5) { + __Pyx_ErrRestore(0,0,0); + goto __pyx_L4_exception_handled; + } + goto __pyx_L5_except_error; + + /* "sqlalchemy/util/_collections_cy.py":302 + * + * def discard(self, value, /) -> None: + * try: # <<<<<<<<<<<<<< + * self.remove(value) + * except KeyError: + */ + __pyx_L5_except_error:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L4_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + __pyx_L8_try_end:; + } + + /* "sqlalchemy/util/_collections_cy.py":301 + * del self._members[_get_id(value)] + * + * def discard(self, value, /) -> None: # <<<<<<<<<<<<<< + * try: + * self.remove(value) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.discard", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":307 + * pass + * + * def pop(self) -> Any: # <<<<<<<<<<<<<< + * pair: Tuple[Any, Any] + * try: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop = {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("pop (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("pop", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "pop", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_10pop(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_10pop(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + PyObject *__pyx_v_pair = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + unsigned int __pyx_t_7; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("pop", 1); + + /* "sqlalchemy/util/_collections_cy.py":309 + * def pop(self) -> Any: + * pair: Tuple[Any, Any] + * try: # <<<<<<<<<<<<<< + * pair = self._members.popitem() + * return pair[1] + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "sqlalchemy/util/_collections_cy.py":310 + * pair: Tuple[Any, Any] + * try: + * pair = self._members.popitem() # <<<<<<<<<<<<<< + * return pair[1] + * except KeyError: + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_members, __pyx_n_s_popitem); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 310, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + __pyx_t_7 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + __pyx_t_7 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_6, NULL}; + __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_5, __pyx_callargs+1-__pyx_t_7, 0+__pyx_t_7); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 310, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + if (!(likely(PyTuple_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_t_4))) __PYX_ERR(0, 310, __pyx_L3_error) + __pyx_v_pair = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "sqlalchemy/util/_collections_cy.py":311 + * try: + * pair = self._members.popitem() + * return pair[1] # <<<<<<<<<<<<<< + * except KeyError: + * raise KeyError("pop from an empty set") + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_pair == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 311, __pyx_L3_error) + } + __pyx_t_4 = __Pyx_GetItemInt_Tuple(__pyx_v_pair, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 311, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L7_try_return; + + /* "sqlalchemy/util/_collections_cy.py":309 + * def pop(self) -> Any: + * pair: Tuple[Any, Any] + * try: # <<<<<<<<<<<<<< + * pair = self._members.popitem() + * return pair[1] + */ + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "sqlalchemy/util/_collections_cy.py":312 + * pair = self._members.popitem() + * return pair[1] + * except KeyError: # <<<<<<<<<<<<<< + * raise KeyError("pop from an empty set") + * + */ + __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyError); + if (__pyx_t_8) { + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_5, &__pyx_t_6) < 0) __PYX_ERR(0, 312, __pyx_L5_except_error) + __Pyx_XGOTREF(__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_6); + + /* "sqlalchemy/util/_collections_cy.py":313 + * return pair[1] + * except KeyError: + * raise KeyError("pop from an empty set") # <<<<<<<<<<<<<< + * + * def clear(self) -> None: + */ + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 313, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_Raise(__pyx_t_9, 0, 0, 0); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __PYX_ERR(0, 313, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + + /* "sqlalchemy/util/_collections_cy.py":309 + * def pop(self) -> Any: + * pair: Tuple[Any, Any] + * try: # <<<<<<<<<<<<<< + * pair = self._members.popitem() + * return pair[1] + */ + __pyx_L5_except_error:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L7_try_return:; + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L0; + } + + /* "sqlalchemy/util/_collections_cy.py":307 + * pass + * + * def pop(self) -> Any: # <<<<<<<<<<<<<< + * pair: Tuple[Any, Any] + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_pair); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":315 + * raise KeyError("pop from an empty set") + * + * def clear(self) -> None: # <<<<<<<<<<<<<< + * self._members.clear() + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear = {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("clear (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("clear", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "clear", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_12clear(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_12clear(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("clear", 1); + + /* "sqlalchemy/util/_collections_cy.py":316 + * + * def clear(self) -> None: + * self._members.clear() # <<<<<<<<<<<<<< + * + * def __eq__(self, other: Any) -> bool: + */ + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "clear"); + __PYX_ERR(0, 316, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyDict_Clear(__pyx_v_self->_members); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 316, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":315 + * raise KeyError("pop from an empty set") + * + * def clear(self) -> None: # <<<<<<<<<<<<<< + * self._members.clear() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":318 + * self._members.clear() + * + * def __eq__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * other_: IdentitySet + * if isinstance(other, IdentitySet): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_15__eq__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_15__eq__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__eq__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_14__eq__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_14__eq__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other_ = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__eq__", 1); + + /* "sqlalchemy/util/_collections_cy.py":320 + * def __eq__(self, other: Any) -> bool: + * other_: IdentitySet + * if isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * other_ = other + * return self._members == other_._members + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (__pyx_t_1) { + + /* "sqlalchemy/util/_collections_cy.py":321 + * other_: IdentitySet + * if isinstance(other, IdentitySet): + * other_ = other # <<<<<<<<<<<<<< + * return self._members == other_._members + * else: + */ + if (!(likely(((__pyx_v_other) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 321, __pyx_L1_error) + __pyx_t_2 = __pyx_v_other; + __Pyx_INCREF(__pyx_t_2); + __pyx_v_other_ = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":322 + * if isinstance(other, IdentitySet): + * other_ = other + * return self._members == other_._members # <<<<<<<<<<<<<< + * else: + * return False + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = PyObject_RichCompare(__pyx_v_self->_members, __pyx_v_other_->_members, Py_EQ); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 322, __pyx_L1_error) + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":320 + * def __eq__(self, other: Any) -> bool: + * other_: IdentitySet + * if isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * other_ = other + * return self._members == other_._members + */ + } + + /* "sqlalchemy/util/_collections_cy.py":324 + * return self._members == other_._members + * else: + * return False # <<<<<<<<<<<<<< + * + * def __ne__(self, other: Any) -> bool: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + } + + /* "sqlalchemy/util/_collections_cy.py":318 + * self._members.clear() + * + * def __eq__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * other_: IdentitySet + * if isinstance(other, IdentitySet): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__eq__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_other_); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":326 + * return False + * + * def __ne__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * other_: IdentitySet + * if isinstance(other, IdentitySet): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_17__ne__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_17__ne__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__ne__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_16__ne__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_16__ne__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other_ = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__ne__", 1); + + /* "sqlalchemy/util/_collections_cy.py":328 + * def __ne__(self, other: Any) -> bool: + * other_: IdentitySet + * if isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * other_ = other + * return self._members != other_._members + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (__pyx_t_1) { + + /* "sqlalchemy/util/_collections_cy.py":329 + * other_: IdentitySet + * if isinstance(other, IdentitySet): + * other_ = other # <<<<<<<<<<<<<< + * return self._members != other_._members + * else: + */ + if (!(likely(((__pyx_v_other) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 329, __pyx_L1_error) + __pyx_t_2 = __pyx_v_other; + __Pyx_INCREF(__pyx_t_2); + __pyx_v_other_ = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":330 + * if isinstance(other, IdentitySet): + * other_ = other + * return self._members != other_._members # <<<<<<<<<<<<<< + * else: + * return True + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = PyObject_RichCompare(__pyx_v_self->_members, __pyx_v_other_->_members, Py_NE); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 330, __pyx_L1_error) + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":328 + * def __ne__(self, other: Any) -> bool: + * other_: IdentitySet + * if isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * other_ = other + * return self._members != other_._members + */ + } + + /* "sqlalchemy/util/_collections_cy.py":332 + * return self._members != other_._members + * else: + * return True # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + } + + /* "sqlalchemy/util/_collections_cy.py":326 + * return False + * + * def __ne__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * other_: IdentitySet + * if isinstance(other, IdentitySet): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__ne__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_other_); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":334 + * return True + * + * @cython.ccall # <<<<<<<<<<<<<< + * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static int __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issubset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_t_6; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("issubset", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_issubset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset)) { + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_6; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":337 + * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = iterable + * else: + */ + __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (__pyx_t_6) { + + /* "sqlalchemy/util/_collections_cy.py":338 + * other: IdentitySet + * if isinstance(iterable, IdentitySet): + * other = iterable # <<<<<<<<<<<<<< + * else: + * other = self.__class__(iterable) + */ + if (!(likely(((__pyx_v_iterable) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 338, __pyx_L1_error) + __pyx_t_1 = __pyx_v_iterable; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":337 + * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = iterable + * else: + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":340 + * other = iterable + * else: + * other = self.__class__(iterable) # <<<<<<<<<<<<<< + * + * return self._members.keys() <= other._members.keys() + */ + /*else*/ { + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 340, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_iterable}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 340, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 340, __pyx_L1_error) + __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + } + __pyx_L3:; + + /* "sqlalchemy/util/_collections_cy.py":342 + * other = self.__class__(iterable) + * + * return self._members.keys() <= other._members.keys() # <<<<<<<<<<<<<< + * + * def __le__(self, other: Any) -> bool: + */ + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); + __PYX_ERR(0, 342, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyDict_Keys(__pyx_v_self->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 342, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (unlikely(__pyx_v_other->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); + __PYX_ERR(0, 342, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyDict_Keys(__pyx_v_other->_members); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 342, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_t_2, Py_LE); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 342, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 342, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_6; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":334 + * return True + * + * @cython.ccall # <<<<<<<<<<<<<< + * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.issubset", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_other); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset = {"issubset", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("issubset (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_18issubset(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_18issubset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("issubset", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issubset(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 334, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.issubset", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":344 + * return self._members.keys() <= other._members.keys() + * + * def __le__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_21__le__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_21__le__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__le__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_20__le__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_20__le__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__le__", 1); + + /* "sqlalchemy/util/_collections_cy.py":345 + * + * def __le__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.issubset(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":346 + * def __le__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * return self.issubset(other) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":345 + * + * def __le__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.issubset(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":347 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * return self.issubset(other) # <<<<<<<<<<<<<< + * + * def __lt__(self, other: Any) -> bool: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->issubset(__pyx_v_self, __pyx_v_other, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 347, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 347, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":344 + * return self._members.keys() <= other._members.keys() + * + * def __le__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__le__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":349 + * return self.issubset(other) + * + * def __lt__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_23__lt__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_23__lt__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__lt__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_22__lt__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_22__lt__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + Py_ssize_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__lt__", 1); + + /* "sqlalchemy/util/_collections_cy.py":350 + * + * def __lt__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) < len(other) and self.issubset(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":351 + * def __lt__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * return len(self) < len(other) and self.issubset(other) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":350 + * + * def __lt__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) < len(other) and self.issubset(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":352 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * return len(self) < len(other) and self.issubset(other) # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyObject_Length(((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 352, __pyx_L1_error) + __pyx_t_5 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 352, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_4 < __pyx_t_5); + if (__pyx_t_2) { + } else { + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 352, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->issubset(__pyx_v_self, __pyx_v_other, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 352, __pyx_L1_error) + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 352, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __pyx_t_6; + __pyx_t_6 = 0; + __pyx_L4_bool_binop_done:; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":349 + * return self.issubset(other) + * + * def __lt__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__lt__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":354 + * return len(self) < len(other) and self.issubset(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static int __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issuperset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_t_6; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("issuperset", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_issuperset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 354, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset)) { + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 354, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 354, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_6; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":357 + * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = iterable + * else: + */ + __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (__pyx_t_6) { + + /* "sqlalchemy/util/_collections_cy.py":358 + * other: IdentitySet + * if isinstance(iterable, IdentitySet): + * other = iterable # <<<<<<<<<<<<<< + * else: + * other = self.__class__(iterable) + */ + if (!(likely(((__pyx_v_iterable) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 358, __pyx_L1_error) + __pyx_t_1 = __pyx_v_iterable; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":357 + * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = iterable + * else: + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":360 + * other = iterable + * else: + * other = self.__class__(iterable) # <<<<<<<<<<<<<< + * + * return self._members.keys() >= other._members.keys() + */ + /*else*/ { + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 360, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_iterable}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 360, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 360, __pyx_L1_error) + __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + } + __pyx_L3:; + + /* "sqlalchemy/util/_collections_cy.py":362 + * other = self.__class__(iterable) + * + * return self._members.keys() >= other._members.keys() # <<<<<<<<<<<<<< + * + * def __ge__(self, other: Any) -> bool: + */ + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); + __PYX_ERR(0, 362, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyDict_Keys(__pyx_v_self->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (unlikely(__pyx_v_other->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); + __PYX_ERR(0, 362, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyDict_Keys(__pyx_v_other->_members); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_t_2, Py_GE); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 362, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_6; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":354 + * return len(self) < len(other) and self.issubset(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.issuperset", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_other); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset = {"issuperset", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("issuperset (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_24issuperset(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_24issuperset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("issuperset", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issuperset(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 354, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 354, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.issuperset", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":364 + * return self._members.keys() >= other._members.keys() + * + * def __ge__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_27__ge__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_27__ge__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__ge__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_26__ge__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_26__ge__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__ge__", 1); + + /* "sqlalchemy/util/_collections_cy.py":365 + * + * def __ge__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.issuperset(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":366 + * def __ge__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * return self.issuperset(other) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":365 + * + * def __ge__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.issuperset(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":367 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * return self.issuperset(other) # <<<<<<<<<<<<<< + * + * def __gt__(self, other: Any) -> bool: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->issuperset(__pyx_v_self, __pyx_v_other, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 367, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 367, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":364 + * return self._members.keys() >= other._members.keys() + * + * def __ge__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__ge__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":369 + * return self.issuperset(other) + * + * def __gt__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_29__gt__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_29__gt__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__gt__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_28__gt__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_28__gt__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + Py_ssize_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__gt__", 1); + + /* "sqlalchemy/util/_collections_cy.py":370 + * + * def __gt__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) > len(other) and self.issuperset(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":371 + * def __gt__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * return len(self) > len(other) and self.issuperset(other) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":370 + * + * def __gt__(self, other: Any) -> bool: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) > len(other) and self.issuperset(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":372 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * return len(self) > len(other) and self.issuperset(other) # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyObject_Length(((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 372, __pyx_L1_error) + __pyx_t_5 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 372, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_4 > __pyx_t_5); + if (__pyx_t_2) { + } else { + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 372, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->issuperset(__pyx_v_self, __pyx_v_other, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 372, __pyx_L1_error) + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 372, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __pyx_t_6; + __pyx_t_6 = 0; + __pyx_L4_bool_binop_done:; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":369 + * return self.issuperset(other) + * + * def __gt__(self, other: Any) -> bool: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__gt__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":374 + * return len(self) > len(other) and self.issuperset(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def union(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__class__() + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_result = 0; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("union", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_union); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 374, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union)) { + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 374, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 374, __pyx_L1_error) + __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":376 + * @cython.ccall + * def union(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__class__() # <<<<<<<<<<<<<< + * result._members.update(self._members) + * result.update(iterable) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 376, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 0+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 376, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 376, __pyx_L1_error) + __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":377 + * def union(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__class__() + * result._members.update(self._members) # <<<<<<<<<<<<<< + * result.update(iterable) + * return result + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_result->_members, __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 377, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_self->_members}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 377, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":378 + * result: IdentitySet = self.__class__() + * result._members.update(self._members) + * result.update(iterable) # <<<<<<<<<<<<<< + * return result + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_result->__pyx_vtab)->update(__pyx_v_result, __pyx_v_iterable, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":379 + * result._members.update(self._members) + * result.update(iterable) + * return result # <<<<<<<<<<<<<< + * + * def __or__(self, other: Any) -> IdentitySet: + */ + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":374 + * return len(self) > len(other) and self.issuperset(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def union(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__class__() + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.union", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_result); + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union = {"union", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("union (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_30union(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_30union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("union", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_union(__pyx_v_self, __pyx_v_iterable, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 374, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.union", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":381 + * return result + * + * def __or__(self, other: Any) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_33__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_33__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__or__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_32__or__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_32__or__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__or__", 1); + + /* "sqlalchemy/util/_collections_cy.py":382 + * + * def __or__(self, other: Any) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.union(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":383 + * def __or__(self, other: Any) -> IdentitySet: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * return self.union(other) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":382 + * + * def __or__(self, other: Any) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.union(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":384 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * return self.union(other) # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->__pyx_union(__pyx_v_self, __pyx_v_other, 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 384, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":381 + * return result + * + * def __or__(self, other: Any) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__or__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":386 + * return self.union(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def update(self, iterable: Iterable[Any], /): + * members: Dict[int, Any] = self._members + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + PyObject *__pyx_v_members = 0; + PyObject *__pyx_v_obj = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + unsigned PY_LONG_LONG __pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("update", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 386, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 386, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":388 + * @cython.ccall + * def update(self, iterable: Iterable[Any], /): + * members: Dict[int, Any] = self._members # <<<<<<<<<<<<<< + * if isinstance(iterable, IdentitySet): + * members.update(cython.cast(IdentitySet, iterable)._members) + */ + __pyx_t_1 = __pyx_v_self->_members; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":389 + * def update(self, iterable: Iterable[Any], /): + * members: Dict[int, Any] = self._members + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * members.update(cython.cast(IdentitySet, iterable)._members) + * else: + */ + __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (__pyx_t_6) { + + /* "sqlalchemy/util/_collections_cy.py":390 + * members: Dict[int, Any] = self._members + * if isinstance(iterable, IdentitySet): + * members.update(cython.cast(IdentitySet, iterable)._members) # <<<<<<<<<<<<<< + * else: + * for obj in iterable: + */ + __pyx_t_1 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_update, __pyx_v_members, ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":389 + * def update(self, iterable: Iterable[Any], /): + * members: Dict[int, Any] = self._members + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * members.update(cython.cast(IdentitySet, iterable)._members) + * else: + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":392 + * members.update(cython.cast(IdentitySet, iterable)._members) + * else: + * for obj in iterable: # <<<<<<<<<<<<<< + * members[_get_id(obj)] = obj + * + */ + /*else*/ { + if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { + __pyx_t_1 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_1); + __pyx_t_7 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_7 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 392, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_1); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 392, __pyx_L1_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_2); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 392, __pyx_L1_error) + #else + __pyx_t_2 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + #endif + } else { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_1); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 392, __pyx_L1_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_2); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 392, __pyx_L1_error) + #else + __pyx_t_2 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + #endif + } + } else { + __pyx_t_2 = __pyx_t_8(__pyx_t_1); + if (unlikely(!__pyx_t_2)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 392, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_2); + } + __Pyx_XDECREF_SET(__pyx_v_obj, __pyx_t_2); + __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":393 + * else: + * for obj in iterable: + * members[_get_id(obj)] = obj # <<<<<<<<<<<<<< + * + * def __ior__(self, other: Any) -> IdentitySet: + */ + if (unlikely(__pyx_v_members == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 393, __pyx_L1_error) + } + __pyx_t_9 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_obj); if (unlikely(__pyx_t_9 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 393, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_9); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 393, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely((PyDict_SetItem(__pyx_v_members, __pyx_t_2, __pyx_v_obj) < 0))) __PYX_ERR(0, 393, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":392 + * members.update(cython.cast(IdentitySet, iterable)._members) + * else: + * for obj in iterable: # <<<<<<<<<<<<<< + * members[_get_id(obj)] = obj + * + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __pyx_L3:; + + /* "sqlalchemy/util/_collections_cy.py":386 + * return self.union(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def update(self, iterable: Iterable[Any], /): + * members: Dict[int, Any] = self._members + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_members); + __Pyx_XDECREF(__pyx_v_obj); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update = {"update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("update (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_34update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_34update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("update", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_update(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 386, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":395 + * members[_get_id(obj)] = obj + * + * def __ior__(self, other: Any) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_37__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_37__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__ior__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_36__ior__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_36__ior__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__ior__", 1); + + /* "sqlalchemy/util/_collections_cy.py":396 + * + * def __ior__(self, other: Any) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * self.update(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":397 + * def __ior__(self, other: Any) -> IdentitySet: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * self.update(other) + * return self + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":396 + * + * def __ior__(self, other: Any) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * self.update(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":398 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * self.update(other) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_3 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->update(__pyx_v_self, __pyx_v_other, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 398, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":399 + * return NotImplemented + * self.update(other) + * return self # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":395 + * members[_get_id(obj)] = obj + * + * def __ior__(self, other: Any) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__ior__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":401 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_result = 0; + PyObject *__pyx_v_other = NULL; + PyObject *__pyx_8genexpr9__pyx_v_obj = NULL; + PyObject *__pyx_9genexpr10__pyx_v_k = NULL; + PyObject *__pyx_9genexpr10__pyx_v_v = NULL; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + unsigned PY_LONG_LONG __pyx_t_9; + Py_ssize_t __pyx_t_10; + int __pyx_t_11; + int __pyx_t_12; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("difference", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_difference); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference)) { + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 401, __pyx_L1_error) + __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":403 + * @cython.ccall + * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) # <<<<<<<<<<<<<< + * if isinstance(iterable, IdentitySet): + * other = cython.cast(IdentitySet, iterable)._members.keys() + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 403, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 403, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 403, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 403, __pyx_L1_error) + __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":404 + * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = cython.cast(IdentitySet, iterable)._members.keys() + * else: + */ + __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (__pyx_t_6) { + + /* "sqlalchemy/util/_collections_cy.py":405 + * result: IdentitySet = self.__new__(self.__class__) + * if isinstance(iterable, IdentitySet): + * other = cython.cast(IdentitySet, iterable)._members.keys() # <<<<<<<<<<<<<< + * else: + * other = {_get_id(obj) for obj in iterable} + */ + if (unlikely(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); + __PYX_ERR(0, 405, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyDict_Keys(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 405, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_other = __pyx_t_1; + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":404 + * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = cython.cast(IdentitySet, iterable)._members.keys() + * else: + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":407 + * other = cython.cast(IdentitySet, iterable)._members.keys() + * else: + * other = {_get_id(obj) for obj in iterable} # <<<<<<<<<<<<<< + * + * result._members = { + */ + /*else*/ { + { /* enter inner scope */ + __pyx_t_1 = PySet_New(NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 407, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_1); + if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { + __pyx_t_2 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_2); + __pyx_t_7 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_7 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 407, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 407, __pyx_L6_error) + } + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_2))) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 407, __pyx_L6_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 407, __pyx_L6_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 407, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } else { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 407, __pyx_L6_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 407, __pyx_L6_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 407, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } + } else { + __pyx_t_3 = __pyx_t_8(__pyx_t_2); + if (unlikely(!__pyx_t_3)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 407, __pyx_L6_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_3); + } + __Pyx_XDECREF_SET(__pyx_8genexpr9__pyx_v_obj, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_9 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_8genexpr9__pyx_v_obj); if (unlikely(__pyx_t_9 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 407, __pyx_L6_error) + __pyx_t_3 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 407, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + if (unlikely(PySet_Add(__pyx_t_1, (PyObject*)__pyx_t_3))) __PYX_ERR(0, 407, __pyx_L6_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_8genexpr9__pyx_v_obj); __pyx_8genexpr9__pyx_v_obj = 0; + goto __pyx_L10_exit_scope; + __pyx_L6_error:; + __Pyx_XDECREF(__pyx_8genexpr9__pyx_v_obj); __pyx_8genexpr9__pyx_v_obj = 0; + goto __pyx_L1_error; + __pyx_L10_exit_scope:; + } /* exit inner scope */ + __pyx_v_other = __pyx_t_1; + __pyx_t_1 = 0; + } + __pyx_L3:; + + /* "sqlalchemy/util/_collections_cy.py":409 + * other = {_get_id(obj) for obj in iterable} + * + * result._members = { # <<<<<<<<<<<<<< + * k: v for k, v in self._members.items() if k not in other + * } + */ + { /* enter inner scope */ + __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 409, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "sqlalchemy/util/_collections_cy.py":410 + * + * result._members = { + * k: v for k, v in self._members.items() if k not in other # <<<<<<<<<<<<<< + * } + * return result + */ + __pyx_t_7 = 0; + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); + __PYX_ERR(0, 410, __pyx_L13_error) + } + __pyx_t_3 = __Pyx_dict_iterator(__pyx_v_self->_members, 1, __pyx_n_s_items, (&__pyx_t_10), (&__pyx_t_11)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 410, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_2); + __pyx_t_2 = __pyx_t_3; + __pyx_t_3 = 0; + while (1) { + __pyx_t_12 = __Pyx_dict_iter_next(__pyx_t_2, __pyx_t_10, &__pyx_t_7, &__pyx_t_3, &__pyx_t_4, NULL, __pyx_t_11); + if (unlikely(__pyx_t_12 == 0)) break; + if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 410, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_9genexpr10__pyx_v_k, __pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_9genexpr10__pyx_v_v, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_9genexpr10__pyx_v_k, __pyx_v_other, Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 410, __pyx_L13_error) + if (__pyx_t_6) { + if (unlikely(PyDict_SetItem(__pyx_t_1, (PyObject*)__pyx_9genexpr10__pyx_v_k, (PyObject*)__pyx_9genexpr10__pyx_v_v))) __PYX_ERR(0, 410, __pyx_L13_error) + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_k); __pyx_9genexpr10__pyx_v_k = 0; + __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_v); __pyx_9genexpr10__pyx_v_v = 0; + goto __pyx_L17_exit_scope; + __pyx_L13_error:; + __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_k); __pyx_9genexpr10__pyx_v_k = 0; + __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_v); __pyx_9genexpr10__pyx_v_v = 0; + goto __pyx_L1_error; + __pyx_L17_exit_scope:; + } /* exit inner scope */ + + /* "sqlalchemy/util/_collections_cy.py":409 + * other = {_get_id(obj) for obj in iterable} + * + * result._members = { # <<<<<<<<<<<<<< + * k: v for k, v in self._members.items() if k not in other + * } + */ + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->_members); + __Pyx_DECREF(__pyx_v_result->_members); + __pyx_v_result->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":412 + * k: v for k, v in self._members.items() if k not in other + * } + * return result # <<<<<<<<<<<<<< + * + * def __sub__(self, other: IdentitySet) -> IdentitySet: + */ + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":401 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.difference", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_result); + __Pyx_XDECREF(__pyx_v_other); + __Pyx_XDECREF(__pyx_8genexpr9__pyx_v_obj); + __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_k); + __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_v); + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference = {"difference", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("difference (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_38difference(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_38difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("difference", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference(__pyx_v_self, __pyx_v_iterable, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.difference", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":414 + * return result + * + * def __sub__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_41__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_41__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__sub__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 414, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_40__sub__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_40__sub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__sub__", 1); + + /* "sqlalchemy/util/_collections_cy.py":415 + * + * def __sub__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.difference(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":416 + * def __sub__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * return self.difference(other) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":415 + * + * def __sub__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.difference(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":417 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * return self.difference(other) # <<<<<<<<<<<<<< + * + * # def difference_update(self, iterable: Iterable[Any]) -> None: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->difference(__pyx_v_self, ((PyObject *)__pyx_v_other), 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 417, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":414 + * return result + * + * def __sub__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__sub__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":420 + * + * # def difference_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.difference(iterable) + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("difference_update", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_difference_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":422 + * @cython.ccall + * def difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.difference(iterable) # <<<<<<<<<<<<<< + * self._members = other._members + * + */ + __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->difference(__pyx_v_self, __pyx_v_iterable, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 422, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":423 + * def difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.difference(iterable) + * self._members = other._members # <<<<<<<<<<<<<< + * + * def __isub__(self, other: IdentitySet) -> IdentitySet: + */ + __pyx_t_1 = __pyx_v_other->_members; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_members); + __Pyx_DECREF(__pyx_v_self->_members); + __pyx_v_self->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":420 + * + * # def difference_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.difference(iterable) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_other); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update = {"difference_update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("difference_update (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_42difference_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_42difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("difference_update", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference_update(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":425 + * self._members = other._members + * + * def __isub__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_45__isub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_45__isub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__isub__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 425, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_44__isub__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_44__isub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__isub__", 1); + + /* "sqlalchemy/util/_collections_cy.py":426 + * + * def __isub__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * self.difference_update(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":427 + * def __isub__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * self.difference_update(other) + * return self + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":426 + * + * def __isub__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * self.difference_update(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":428 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * self.difference_update(other) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_3 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->difference_update(__pyx_v_self, ((PyObject *)__pyx_v_other), 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 428, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":429 + * return NotImplemented + * self.difference_update(other) + * return self # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":425 + * self._members = other._members + * + * def __isub__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__isub__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":431 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_result = 0; + PyObject *__pyx_v_other = NULL; + PyObject *__pyx_9genexpr11__pyx_v_obj = NULL; + PyObject *__pyx_9genexpr12__pyx_v_k = NULL; + PyObject *__pyx_9genexpr12__pyx_v_v = NULL; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + unsigned PY_LONG_LONG __pyx_t_9; + Py_ssize_t __pyx_t_10; + int __pyx_t_11; + int __pyx_t_12; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("intersection", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_intersection); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection)) { + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 431, __pyx_L1_error) + __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":433 + * @cython.ccall + * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) # <<<<<<<<<<<<<< + * if isinstance(iterable, IdentitySet): + * other = cython.cast(IdentitySet, iterable)._members + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 433, __pyx_L1_error) + __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":434 + * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = cython.cast(IdentitySet, iterable)._members + * else: + */ + __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (__pyx_t_6) { + + /* "sqlalchemy/util/_collections_cy.py":435 + * result: IdentitySet = self.__new__(self.__class__) + * if isinstance(iterable, IdentitySet): + * other = cython.cast(IdentitySet, iterable)._members # <<<<<<<<<<<<<< + * else: + * other = {_get_id(obj) for obj in iterable} + */ + __pyx_t_1 = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_other = __pyx_t_1; + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":434 + * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = cython.cast(IdentitySet, iterable)._members + * else: + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":437 + * other = cython.cast(IdentitySet, iterable)._members + * else: + * other = {_get_id(obj) for obj in iterable} # <<<<<<<<<<<<<< + * result._members = { + * k: v for k, v in self._members.items() if k in other + */ + /*else*/ { + { /* enter inner scope */ + __pyx_t_1 = PySet_New(NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 437, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_1); + if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { + __pyx_t_2 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_2); + __pyx_t_7 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_7 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 437, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 437, __pyx_L6_error) + } + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_2))) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 437, __pyx_L6_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 437, __pyx_L6_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 437, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } else { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 437, __pyx_L6_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 437, __pyx_L6_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 437, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } + } else { + __pyx_t_3 = __pyx_t_8(__pyx_t_2); + if (unlikely(!__pyx_t_3)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 437, __pyx_L6_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_3); + } + __Pyx_XDECREF_SET(__pyx_9genexpr11__pyx_v_obj, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_9 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_9genexpr11__pyx_v_obj); if (unlikely(__pyx_t_9 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 437, __pyx_L6_error) + __pyx_t_3 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 437, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + if (unlikely(PySet_Add(__pyx_t_1, (PyObject*)__pyx_t_3))) __PYX_ERR(0, 437, __pyx_L6_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_9genexpr11__pyx_v_obj); __pyx_9genexpr11__pyx_v_obj = 0; + goto __pyx_L10_exit_scope; + __pyx_L6_error:; + __Pyx_XDECREF(__pyx_9genexpr11__pyx_v_obj); __pyx_9genexpr11__pyx_v_obj = 0; + goto __pyx_L1_error; + __pyx_L10_exit_scope:; + } /* exit inner scope */ + __pyx_v_other = __pyx_t_1; + __pyx_t_1 = 0; + } + __pyx_L3:; + + /* "sqlalchemy/util/_collections_cy.py":438 + * else: + * other = {_get_id(obj) for obj in iterable} + * result._members = { # <<<<<<<<<<<<<< + * k: v for k, v in self._members.items() if k in other + * } + */ + { /* enter inner scope */ + __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 438, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "sqlalchemy/util/_collections_cy.py":439 + * other = {_get_id(obj) for obj in iterable} + * result._members = { + * k: v for k, v in self._members.items() if k in other # <<<<<<<<<<<<<< + * } + * return result + */ + __pyx_t_7 = 0; + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); + __PYX_ERR(0, 439, __pyx_L13_error) + } + __pyx_t_3 = __Pyx_dict_iterator(__pyx_v_self->_members, 1, __pyx_n_s_items, (&__pyx_t_10), (&__pyx_t_11)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 439, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_2); + __pyx_t_2 = __pyx_t_3; + __pyx_t_3 = 0; + while (1) { + __pyx_t_12 = __Pyx_dict_iter_next(__pyx_t_2, __pyx_t_10, &__pyx_t_7, &__pyx_t_3, &__pyx_t_4, NULL, __pyx_t_11); + if (unlikely(__pyx_t_12 == 0)) break; + if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 439, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_9genexpr12__pyx_v_k, __pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_9genexpr12__pyx_v_v, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_9genexpr12__pyx_v_k, __pyx_v_other, Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 439, __pyx_L13_error) + if (__pyx_t_6) { + if (unlikely(PyDict_SetItem(__pyx_t_1, (PyObject*)__pyx_9genexpr12__pyx_v_k, (PyObject*)__pyx_9genexpr12__pyx_v_v))) __PYX_ERR(0, 439, __pyx_L13_error) + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_k); __pyx_9genexpr12__pyx_v_k = 0; + __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_v); __pyx_9genexpr12__pyx_v_v = 0; + goto __pyx_L17_exit_scope; + __pyx_L13_error:; + __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_k); __pyx_9genexpr12__pyx_v_k = 0; + __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_v); __pyx_9genexpr12__pyx_v_v = 0; + goto __pyx_L1_error; + __pyx_L17_exit_scope:; + } /* exit inner scope */ + + /* "sqlalchemy/util/_collections_cy.py":438 + * else: + * other = {_get_id(obj) for obj in iterable} + * result._members = { # <<<<<<<<<<<<<< + * k: v for k, v in self._members.items() if k in other + * } + */ + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->_members); + __Pyx_DECREF(__pyx_v_result->_members); + __pyx_v_result->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":441 + * k: v for k, v in self._members.items() if k in other + * } + * return result # <<<<<<<<<<<<<< + * + * def __and__(self, other): + */ + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":431 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.intersection", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_result); + __Pyx_XDECREF(__pyx_v_other); + __Pyx_XDECREF(__pyx_9genexpr11__pyx_v_obj); + __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_k); + __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_v); + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection = {"intersection", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("intersection (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_46intersection(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_46intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("intersection", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection(__pyx_v_self, __pyx_v_iterable, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.intersection", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":443 + * return result + * + * def __and__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_49__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_49__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__and__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_48__and__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_48__and__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__and__", 1); + + /* "sqlalchemy/util/_collections_cy.py":444 + * + * def __and__(self, other): + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.intersection(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":445 + * def __and__(self, other): + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * return self.intersection(other) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":444 + * + * def __and__(self, other): + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.intersection(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":446 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * return self.intersection(other) # <<<<<<<<<<<<<< + * + * # def intersection_update(self, iterable: Iterable[Any]) -> None: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->intersection(__pyx_v_self, __pyx_v_other, 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 446, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":443 + * return result + * + * def __and__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__and__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":449 + * + * # def intersection_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def intersection_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.intersection(iterable) + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("intersection_update", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_intersection_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 449, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 449, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":451 + * @cython.ccall + * def intersection_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.intersection(iterable) # <<<<<<<<<<<<<< + * self._members = other._members + * + */ + __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->intersection(__pyx_v_self, __pyx_v_iterable, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 451, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":452 + * def intersection_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.intersection(iterable) + * self._members = other._members # <<<<<<<<<<<<<< + * + * def __iand__(self, other: IdentitySet) -> IdentitySet: + */ + __pyx_t_1 = __pyx_v_other->_members; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_members); + __Pyx_DECREF(__pyx_v_self->_members); + __pyx_v_self->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":449 + * + * # def intersection_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def intersection_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.intersection(iterable) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.intersection_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_other); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update = {"intersection_update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("intersection_update (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_50intersection_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_50intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("intersection_update", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection_update(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 449, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.intersection_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":454 + * self._members = other._members + * + * def __iand__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_53__iand__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_53__iand__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iand__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 454, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_52__iand__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_52__iand__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__iand__", 1); + + /* "sqlalchemy/util/_collections_cy.py":455 + * + * def __iand__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * self.intersection_update(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":456 + * def __iand__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * self.intersection_update(other) + * return self + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":455 + * + * def __iand__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * self.intersection_update(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":457 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * self.intersection_update(other) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_3 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->intersection_update(__pyx_v_self, ((PyObject *)__pyx_v_other), 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 457, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":458 + * return NotImplemented + * self.intersection_update(other) + * return self # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":454 + * self._members = other._members + * + * def __iand__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__iand__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":460 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_result = 0; + PyObject *__pyx_v_other = 0; + PyObject *__pyx_9genexpr13__pyx_v_obj = NULL; + PyObject *__pyx_9genexpr14__pyx_v_k = NULL; + PyObject *__pyx_9genexpr14__pyx_v_v = NULL; + PyObject *__pyx_9genexpr15__pyx_v_k = NULL; + PyObject *__pyx_9genexpr15__pyx_v_v = NULL; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + unsigned PY_LONG_LONG __pyx_t_9; + Py_ssize_t __pyx_t_10; + int __pyx_t_11; + int __pyx_t_12; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("symmetric_difference", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_symmetric_difference); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 460, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference)) { + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 460, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 460, __pyx_L1_error) + __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":462 + * @cython.ccall + * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) # <<<<<<<<<<<<<< + * other: Dict[int, Any] + * if isinstance(iterable, IdentitySet): + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 462, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 462, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 462, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 462, __pyx_L1_error) + __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":464 + * result: IdentitySet = self.__new__(self.__class__) + * other: Dict[int, Any] + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = cython.cast(IdentitySet, iterable)._members + * else: + */ + __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (__pyx_t_6) { + + /* "sqlalchemy/util/_collections_cy.py":465 + * other: Dict[int, Any] + * if isinstance(iterable, IdentitySet): + * other = cython.cast(IdentitySet, iterable)._members # <<<<<<<<<<<<<< + * else: + * other = {_get_id(obj): obj for obj in iterable} + */ + __pyx_t_1 = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_other = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":464 + * result: IdentitySet = self.__new__(self.__class__) + * other: Dict[int, Any] + * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< + * other = cython.cast(IdentitySet, iterable)._members + * else: + */ + goto __pyx_L3; + } + + /* "sqlalchemy/util/_collections_cy.py":467 + * other = cython.cast(IdentitySet, iterable)._members + * else: + * other = {_get_id(obj): obj for obj in iterable} # <<<<<<<<<<<<<< + * result._members = { + * k: v for k, v in self._members.items() if k not in other + */ + /*else*/ { + { /* enter inner scope */ + __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 467, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_1); + if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { + __pyx_t_2 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_2); + __pyx_t_7 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_7 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 467, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 467, __pyx_L6_error) + } + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_2))) { + { + Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 467, __pyx_L6_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 467, __pyx_L6_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 467, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } else { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 467, __pyx_L6_error) + #endif + if (__pyx_t_7 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 467, __pyx_L6_error) + #else + __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 467, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } + } else { + __pyx_t_3 = __pyx_t_8(__pyx_t_2); + if (unlikely(!__pyx_t_3)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 467, __pyx_L6_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_3); + } + __Pyx_XDECREF_SET(__pyx_9genexpr13__pyx_v_obj, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_9 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_9genexpr13__pyx_v_obj); if (unlikely(__pyx_t_9 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 467, __pyx_L6_error) + __pyx_t_3 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 467, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_3); + if (unlikely(PyDict_SetItem(__pyx_t_1, (PyObject*)__pyx_t_3, (PyObject*)__pyx_9genexpr13__pyx_v_obj))) __PYX_ERR(0, 467, __pyx_L6_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_9genexpr13__pyx_v_obj); __pyx_9genexpr13__pyx_v_obj = 0; + goto __pyx_L10_exit_scope; + __pyx_L6_error:; + __Pyx_XDECREF(__pyx_9genexpr13__pyx_v_obj); __pyx_9genexpr13__pyx_v_obj = 0; + goto __pyx_L1_error; + __pyx_L10_exit_scope:; + } /* exit inner scope */ + __pyx_v_other = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + } + __pyx_L3:; + + /* "sqlalchemy/util/_collections_cy.py":468 + * else: + * other = {_get_id(obj): obj for obj in iterable} + * result._members = { # <<<<<<<<<<<<<< + * k: v for k, v in self._members.items() if k not in other + * } + */ + { /* enter inner scope */ + __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 468, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "sqlalchemy/util/_collections_cy.py":469 + * other = {_get_id(obj): obj for obj in iterable} + * result._members = { + * k: v for k, v in self._members.items() if k not in other # <<<<<<<<<<<<<< + * } + * result._members.update( + */ + __pyx_t_7 = 0; + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); + __PYX_ERR(0, 469, __pyx_L13_error) + } + __pyx_t_3 = __Pyx_dict_iterator(__pyx_v_self->_members, 1, __pyx_n_s_items, (&__pyx_t_10), (&__pyx_t_11)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 469, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_2); + __pyx_t_2 = __pyx_t_3; + __pyx_t_3 = 0; + while (1) { + __pyx_t_12 = __Pyx_dict_iter_next(__pyx_t_2, __pyx_t_10, &__pyx_t_7, &__pyx_t_3, &__pyx_t_4, NULL, __pyx_t_11); + if (unlikely(__pyx_t_12 == 0)) break; + if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 469, __pyx_L13_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_9genexpr14__pyx_v_k, __pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_9genexpr14__pyx_v_v, __pyx_t_4); + __pyx_t_4 = 0; + if (unlikely(__pyx_v_other == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 469, __pyx_L13_error) + } + __pyx_t_6 = (__Pyx_PyDict_ContainsTF(__pyx_9genexpr14__pyx_v_k, __pyx_v_other, Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 469, __pyx_L13_error) + if (__pyx_t_6) { + if (unlikely(PyDict_SetItem(__pyx_t_1, (PyObject*)__pyx_9genexpr14__pyx_v_k, (PyObject*)__pyx_9genexpr14__pyx_v_v))) __PYX_ERR(0, 469, __pyx_L13_error) + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_k); __pyx_9genexpr14__pyx_v_k = 0; + __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_v); __pyx_9genexpr14__pyx_v_v = 0; + goto __pyx_L17_exit_scope; + __pyx_L13_error:; + __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_k); __pyx_9genexpr14__pyx_v_k = 0; + __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_v); __pyx_9genexpr14__pyx_v_v = 0; + goto __pyx_L1_error; + __pyx_L17_exit_scope:; + } /* exit inner scope */ + + /* "sqlalchemy/util/_collections_cy.py":468 + * else: + * other = {_get_id(obj): obj for obj in iterable} + * result._members = { # <<<<<<<<<<<<<< + * k: v for k, v in self._members.items() if k not in other + * } + */ + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->_members); + __Pyx_DECREF(__pyx_v_result->_members); + __pyx_v_result->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":471 + * k: v for k, v in self._members.items() if k not in other + * } + * result._members.update( # <<<<<<<<<<<<<< + * [(k, v) for k, v in other.items() if k not in self._members] + * ) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_result->_members, __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 471, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + { /* enter inner scope */ + + /* "sqlalchemy/util/_collections_cy.py":472 + * } + * result._members.update( + * [(k, v) for k, v in other.items() if k not in self._members] # <<<<<<<<<<<<<< + * ) + * return result + */ + __pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 472, __pyx_L20_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_10 = 0; + if (unlikely(__pyx_v_other == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); + __PYX_ERR(0, 472, __pyx_L20_error) + } + __pyx_t_13 = __Pyx_dict_iterator(__pyx_v_other, 1, __pyx_n_s_items, (&__pyx_t_7), (&__pyx_t_11)); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 472, __pyx_L20_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_XDECREF(__pyx_t_3); + __pyx_t_3 = __pyx_t_13; + __pyx_t_13 = 0; + while (1) { + __pyx_t_12 = __Pyx_dict_iter_next(__pyx_t_3, __pyx_t_7, &__pyx_t_10, &__pyx_t_13, &__pyx_t_14, NULL, __pyx_t_11); + if (unlikely(__pyx_t_12 == 0)) break; + if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 472, __pyx_L20_error) + __Pyx_GOTREF(__pyx_t_13); + __Pyx_GOTREF(__pyx_t_14); + __Pyx_XDECREF_SET(__pyx_9genexpr15__pyx_v_k, __pyx_t_13); + __pyx_t_13 = 0; + __Pyx_XDECREF_SET(__pyx_9genexpr15__pyx_v_v, __pyx_t_14); + __pyx_t_14 = 0; + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 472, __pyx_L20_error) + } + __pyx_t_6 = (__Pyx_PyDict_ContainsTF(__pyx_9genexpr15__pyx_v_k, __pyx_v_self->_members, Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 472, __pyx_L20_error) + if (__pyx_t_6) { + __pyx_t_14 = PyTuple_New(2); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 472, __pyx_L20_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_INCREF(__pyx_9genexpr15__pyx_v_k); + __Pyx_GIVEREF(__pyx_9genexpr15__pyx_v_k); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_9genexpr15__pyx_v_k)) __PYX_ERR(0, 472, __pyx_L20_error); + __Pyx_INCREF(__pyx_9genexpr15__pyx_v_v); + __Pyx_GIVEREF(__pyx_9genexpr15__pyx_v_v); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_14, 1, __pyx_9genexpr15__pyx_v_v)) __PYX_ERR(0, 472, __pyx_L20_error); + if (unlikely(__Pyx_ListComp_Append(__pyx_t_4, (PyObject*)__pyx_t_14))) __PYX_ERR(0, 472, __pyx_L20_error) + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_k); __pyx_9genexpr15__pyx_v_k = 0; + __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_v); __pyx_9genexpr15__pyx_v_v = 0; + goto __pyx_L24_exit_scope; + __pyx_L20_error:; + __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_k); __pyx_9genexpr15__pyx_v_k = 0; + __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_v); __pyx_9genexpr15__pyx_v_v = 0; + goto __pyx_L1_error; + __pyx_L24_exit_scope:; + } /* exit inner scope */ + __pyx_t_3 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_t_4}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 471, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":474 + * [(k, v) for k, v in other.items() if k not in self._members] + * ) + * return result # <<<<<<<<<<<<<< + * + * def __xor__(self, other: IdentitySet) -> IdentitySet: + */ + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":460 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_XDECREF(__pyx_t_14); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.symmetric_difference", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_result); + __Pyx_XDECREF(__pyx_v_other); + __Pyx_XDECREF(__pyx_9genexpr13__pyx_v_obj); + __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_k); + __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_v); + __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_k); + __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_v); + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference = {"symmetric_difference", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("symmetric_difference (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_54symmetric_difference(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_54symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("symmetric_difference", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference(__pyx_v_self, __pyx_v_iterable, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 460, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.symmetric_difference", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":476 + * return result + * + * def __xor__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_57__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_57__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__xor__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 476, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_56__xor__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_56__xor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__xor__", 1); + + /* "sqlalchemy/util/_collections_cy.py":477 + * + * def __xor__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.symmetric_difference(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":478 + * def __xor__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * return self.symmetric_difference(other) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":477 + * + * def __xor__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * return self.symmetric_difference(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":479 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * return self.symmetric_difference(other) # <<<<<<<<<<<<<< + * + * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->symmetric_difference(__pyx_v_self, ((PyObject *)__pyx_v_other), 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 479, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":476 + * return result + * + * def __xor__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__xor__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":482 + * + * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def symmetric_difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.symmetric_difference(iterable) + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("symmetric_difference_update", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_symmetric_difference_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 482, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 482, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":484 + * @cython.ccall + * def symmetric_difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.symmetric_difference(iterable) # <<<<<<<<<<<<<< + * self._members = other._members + * + */ + __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->symmetric_difference(__pyx_v_self, __pyx_v_iterable, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":485 + * def symmetric_difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.symmetric_difference(iterable) + * self._members = other._members # <<<<<<<<<<<<<< + * + * def __ixor__(self, other: IdentitySet) -> IdentitySet: + */ + __pyx_t_1 = __pyx_v_other->_members; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_members); + __Pyx_DECREF(__pyx_v_self->_members); + __pyx_v_self->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":482 + * + * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def symmetric_difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.symmetric_difference(iterable) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.symmetric_difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_other); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update = {"symmetric_difference_update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update, METH_O, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("symmetric_difference_update (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_58symmetric_difference_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_58symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("symmetric_difference_update", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference_update(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 482, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.symmetric_difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":487 + * self._members = other._members + * + * def __ixor__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_61__ixor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_61__ixor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__ixor__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 487, __pyx_L1_error) + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_60__ixor__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_60__ixor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__ixor__", 1); + + /* "sqlalchemy/util/_collections_cy.py":488 + * + * def __ixor__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * self.symmetric_difference(other) + */ + __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_collections_cy.py":489 + * def __ixor__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): + * return NotImplemented # <<<<<<<<<<<<<< + * self.symmetric_difference(other) + * return self + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":488 + * + * def __ixor__(self, other: IdentitySet) -> IdentitySet: + * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< + * return NotImplemented + * self.symmetric_difference(other) + */ + } + + /* "sqlalchemy/util/_collections_cy.py":490 + * if not isinstance(other, IdentitySet): + * return NotImplemented + * self.symmetric_difference(other) # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->symmetric_difference(__pyx_v_self, ((PyObject *)__pyx_v_other), 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 490, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":491 + * return NotImplemented + * self.symmetric_difference(other) + * return self # <<<<<<<<<<<<<< + * + * @cython.ccall + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":487 + * self._members = other._members + * + * def __ixor__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< + * if not isinstance(other, IdentitySet): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__ixor__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":493 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def copy(self) -> IdentitySet: + * cp: IdentitySet = self.__new__(self.__class__) + */ + +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, int __pyx_skip_dispatch) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_cp = 0; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("copy", 1); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { + PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + #endif + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_copy); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 493, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy)) { + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, NULL}; + __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 0+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 493, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 493, __pyx_L1_error) + __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); + __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); + if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { + __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS + } + #endif + } + + /* "sqlalchemy/util/_collections_cy.py":495 + * @cython.ccall + * def copy(self) -> IdentitySet: + * cp: IdentitySet = self.__new__(self.__class__) # <<<<<<<<<<<<<< + * cp._members = self._members.copy() + * return cp + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 495, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 495, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 495, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 495, __pyx_L1_error) + __pyx_v_cp = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":496 + * def copy(self) -> IdentitySet: + * cp: IdentitySet = self.__new__(self.__class__) + * cp._members = self._members.copy() # <<<<<<<<<<<<<< + * return cp + * + */ + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "copy"); + __PYX_ERR(0, 496, __pyx_L1_error) + } + __pyx_t_1 = PyDict_Copy(__pyx_v_self->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 496, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_cp->_members); + __Pyx_DECREF(__pyx_v_cp->_members); + __pyx_v_cp->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "sqlalchemy/util/_collections_cy.py":497 + * cp: IdentitySet = self.__new__(self.__class__) + * cp._members = self._members.copy() + * return cp # <<<<<<<<<<<<<< + * + * def __copy__(self) -> IdentitySet: + */ + __Pyx_XDECREF((PyObject *)__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_cp); + __pyx_r = __pyx_v_cp; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":493 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def copy(self) -> IdentitySet: + * cp: IdentitySet = self.__new__(self.__class__) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.copy", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_cp); + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy = {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("copy (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("copy", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "copy", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_62copy(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_62copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("copy", 1); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_copy(__pyx_v_self, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 493, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.copy", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":499 + * return cp + * + * def __copy__(self) -> IdentitySet: # <<<<<<<<<<<<<< + * return self.copy() + * + */ + +/* Python wrapper */ +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__ = {"__copy__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__copy__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__copy__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__copy__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_64__copy__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_64__copy__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__copy__", 1); + + /* "sqlalchemy/util/_collections_cy.py":500 + * + * def __copy__(self) -> IdentitySet: + * return self.copy() # <<<<<<<<<<<<<< + * + * def __len__(self) -> int: + */ + __Pyx_XDECREF((PyObject *)__pyx_r); + __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->copy(__pyx_v_self, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 500, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":499 + * return cp + * + * def __copy__(self) -> IdentitySet: # <<<<<<<<<<<<<< + * return self.copy() + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__copy__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF((PyObject *)__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":502 + * return self.copy() + * + * def __len__(self) -> int: # <<<<<<<<<<<<<< + * return len(self._members) + * + */ + +/* Python wrapper */ +static Py_ssize_t __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__(PyObject *__pyx_v_self); /*proto*/ +static Py_ssize_t __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_66__len__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static Py_ssize_t __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_66__len__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__len__", 1); + + /* "sqlalchemy/util/_collections_cy.py":503 + * + * def __len__(self) -> int: + * return len(self._members) # <<<<<<<<<<<<<< + * + * def __iter__(self) -> Iterator[Any]: + */ + __pyx_t_1 = __pyx_v_self->_members; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 503, __pyx_L1_error) + } + __pyx_t_2 = PyDict_Size(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 503, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":502 + * return self.copy() + * + * def __len__(self) -> int: # <<<<<<<<<<<<<< + * return len(self._members) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":505 + * return len(self._members) + * + * def __iter__(self) -> Iterator[Any]: # <<<<<<<<<<<<<< + * return iter(self._members.values()) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_69__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_69__iter__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_68__iter__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_68__iter__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__iter__", 1); + + /* "sqlalchemy/util/_collections_cy.py":506 + * + * def __iter__(self) -> Iterator[Any]: + * return iter(self._members.values()) # <<<<<<<<<<<<<< + * + * def __hash__(self) -> NoReturn: + */ + __Pyx_XDECREF(__pyx_r); + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "values"); + __PYX_ERR(0, 506, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyDict_Values(__pyx_v_self->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 506, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 506, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":505 + * return len(self._members) + * + * def __iter__(self) -> Iterator[Any]: # <<<<<<<<<<<<<< + * return iter(self._members.values()) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":508 + * return iter(self._members.values()) + * + * def __hash__(self) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError("set objects are unhashable") + * + */ + +/* Python wrapper */ +static Py_hash_t __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_71__hash__(PyObject *__pyx_v_self); /*proto*/ +static Py_hash_t __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_71__hash__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + Py_hash_t __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__hash__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_70__hash__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static Py_hash_t __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_70__hash__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + Py_hash_t __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__hash__", 1); + + /* "sqlalchemy/util/_collections_cy.py":509 + * + * def __hash__(self) -> NoReturn: + * raise TypeError("set objects are unhashable") # <<<<<<<<<<<<<< + * + * def __repr__(self) -> str: + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 509, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 509, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":508 + * return iter(self._members.values()) + * + * def __hash__(self) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError("set objects are unhashable") + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__hash__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (unlikely(__pyx_r == -1) && !PyErr_Occurred()) __pyx_r = -2; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_collections_cy.py":511 + * raise TypeError("set objects are unhashable") + * + * def __repr__(self) -> str: # <<<<<<<<<<<<<< + * return "%s(%r)" % ( + * self.__class__.__name__, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_72__repr__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_72__repr__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + Py_UCS4 __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__repr__", 1); + + /* "sqlalchemy/util/_collections_cy.py":512 + * + * def __repr__(self) -> str: + * return "%s(%r)" % ( # <<<<<<<<<<<<<< + * self.__class__.__name__, + * list(self._members.values()), + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 512, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = 0; + __pyx_t_3 = 127; + + /* "sqlalchemy/util/_collections_cy.py":513 + * def __repr__(self) -> str: + * return "%s(%r)" % ( + * self.__class__.__name__, # <<<<<<<<<<<<<< + * list(self._members.values()), + * ) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 513, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_name); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 513, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_FormatSimpleAndDecref(PyObject_Unicode(__pyx_t_5), __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 513, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; + __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); + __pyx_t_4 = 0; + __Pyx_INCREF(__pyx_kp_u__2); + __pyx_t_2 += 1; + __Pyx_GIVEREF(__pyx_kp_u__2); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_kp_u__2); + + /* "sqlalchemy/util/_collections_cy.py":514 + * return "%s(%r)" % ( + * self.__class__.__name__, + * list(self._members.values()), # <<<<<<<<<<<<<< + * ) + */ + if (unlikely(__pyx_v_self->_members == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "values"); + __PYX_ERR(0, 514, __pyx_L1_error) + } + __pyx_t_4 = __Pyx_PyDict_Values(__pyx_v_self->_members); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 514, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PySequence_ListKeepNew(__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 514, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_FormatSimpleAndDecref(PyObject_Repr(__pyx_t_5), __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 514, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; + __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); + __pyx_t_4 = 0; + __Pyx_INCREF(__pyx_kp_u__3); + __pyx_t_2 += 1; + __Pyx_GIVEREF(__pyx_kp_u__3); + PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_kp_u__3); + + /* "sqlalchemy/util/_collections_cy.py":512 + * + * def __repr__(self) -> str: + * return "%s(%r)" % ( # <<<<<<<<<<<<<< + * self.__class__.__name__, + * list(self._members.values()), + */ + __pyx_t_4 = __Pyx_PyUnicode_Join(__pyx_t_1, 4, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 512, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_collections_cy.py":511 + * raise TypeError("set objects are unhashable") + * + * def __repr__(self) -> str: # <<<<<<<<<<<<<< + * return "%s(%r)" % ( + * self.__class__.__name__, + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_74__reduce_cython__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_74__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 1); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = (self._members,) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->_members); + __Pyx_GIVEREF(__pyx_v_self->_members); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->_members)) __PYX_ERR(1, 5, __pyx_L1_error); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = (self._members,) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = (self._members,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(1, 8, __pyx_L1_error); + __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._members is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = (self._members,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = self._members is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state + */ + /*else*/ { + __pyx_t_2 = (__pyx_v_self->_members != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_2; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._members is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state + * else: + */ + if (__pyx_v_use_setstate) { + + /* "(tree fragment)":13 + * use_setstate = self._members is not None + * if use_setstate: + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_IdentitySet); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_183888701); + __Pyx_GIVEREF(__pyx_int_183888701); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_183888701)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_3 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = self._members is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state + * else: + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_IdentitySet); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_183888701); + __Pyx_GIVEREF(__pyx_int_183888701); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_183888701)) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 16, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(1, 16, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v___pyx_state = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(1, 16, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_76__setstate_cython__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_76__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 1); + + /* "(tree fragment)":17 + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_IdentitySet__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_OrderedSet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet = {"__pyx_unpickle_OrderedSet", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[3] = {0,0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_OrderedSet (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_OrderedSet", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_OrderedSet", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_OrderedSet") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 3)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_OrderedSet", 1, 3, 3, __pyx_nargs); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_OrderedSet", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_4__pyx_unpickle_OrderedSet(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_4__pyx_unpickle_OrderedSet(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_OrderedSet", 1); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__5, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum + * __pyx_result = OrderedSet.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(1, 5, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum # <<<<<<<<<<<<<< + * __pyx_result = OrderedSet.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum + * __pyx_result = OrderedSet.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_v___pyx_result = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum + * __pyx_result = OrderedSet.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_2 = (__pyx_v___pyx_state != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = OrderedSet.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_OrderedSet__set_state(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum + * __pyx_result = OrderedSet.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): + * __pyx_result._list = __pyx_state[0] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_OrderedSet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_OrderedSet", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._list = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_OrderedSet__set_state(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + unsigned int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_OrderedSet__set_state", 1); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): + * __pyx_result._list = __pyx_state[0] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyList_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("list", __pyx_t_1))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_list); + __Pyx_DECREF(__pyx_v___pyx_result->_list); + __pyx_v___pyx_result->_list = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): + * __pyx_result._list = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_4 = (__pyx_t_3 > 1); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_2 = __pyx_t_4; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result._list = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): + * __pyx_result._list = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._list = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_OrderedSet__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_IdentitySet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet = {"__pyx_unpickle_IdentitySet", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[3] = {0,0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_IdentitySet (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_IdentitySet", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_IdentitySet", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_IdentitySet") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 3)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_IdentitySet", 1, 3, 3, __pyx_nargs); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_IdentitySet", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_6__pyx_unpickle_IdentitySet(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_6__pyx_unpickle_IdentitySet(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_IdentitySet", 1); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xaf5eb3d, 0x88a83ae, 0x3ac67e8): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__7, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0xaf5eb3d, 0x88a83ae, 0x3ac67e8): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum + * __pyx_result = IdentitySet.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(1, 5, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0xaf5eb3d, 0x88a83ae, 0x3ac67e8): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum # <<<<<<<<<<<<<< + * __pyx_result = IdentitySet.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xaf5eb3d, 0x88a83ae, 0x3ac67e8): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum + * __pyx_result = IdentitySet.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_v___pyx_result = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum + * __pyx_result = IdentitySet.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_2 = (__pyx_v___pyx_state != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = IdentitySet.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_IdentitySet__set_state(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum + * __pyx_result = IdentitySet.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): + * __pyx_result._members = __pyx_state[0] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_IdentitySet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_IdentitySet", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._members = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + +static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_IdentitySet__set_state(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + unsigned int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_IdentitySet__set_state", 1); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): + * __pyx_result._members = __pyx_state[0] # <<<<<<<<<<<<<< + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("dict", __pyx_t_1))) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v___pyx_result->_members); + __Pyx_DECREF(__pyx_v___pyx_result->_members); + __pyx_v___pyx_result->_members = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): + * __pyx_result._members = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_4 = (__pyx_t_3 > 1); + if (__pyx_t_4) { + } else { + __pyx_t_2 = __pyx_t_4; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) + __pyx_t_2 = __pyx_t_4; + __pyx_L4_bool_binop_done:; + if (__pyx_t_2) { + + /* "(tree fragment)":14 + * __pyx_result._members = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 14, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): + * __pyx_result._members = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[1]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._members = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_IdentitySet__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet __pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet; + +static PyObject *__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *p; + PyObject *o = __Pyx_PyType_GetSlot((&PySet_Type), tp_new, newfunc)(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)o); + p->__pyx_vtab = __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_OrderedSet; + p->_list = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *o) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely((PY_VERSION_HEX >= 0x03080000 || __Pyx_PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)) && __Pyx_PyObject_GetSlot(o, tp_finalize, destructor)) && !__Pyx_PyObject_GC_IsFinalized(o)) { + if (__Pyx_PyObject_GetSlot(o, tp_dealloc, destructor) == __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + } + #endif + PyObject_GC_UnTrack(o); + __Pyx_TRASHCAN_BEGIN(o, __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet) + Py_CLEAR(p->_list); + PyObject_GC_Track(o); + __Pyx_PyType_GetSlot((&PySet_Type), tp_dealloc, destructor)(o); + __Pyx_TRASHCAN_END +} + +static int __pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)o; + if (!(&PySet_Type)->tp_traverse); else { e = (&PySet_Type)->tp_traverse(o,v,a); if (e) return e; } + if (p->_list) { + e = (*v)(p->_list, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)o; + if (!(&PySet_Type)->tp_clear); else (&PySet_Type)->tp_clear(o); + tmp = ((PyObject*)p->_list); + p->_list = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} +static PyObject *__pyx_sq_item_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *o, Py_ssize_t i) { + PyObject *r; + PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; + r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); + Py_DECREF(x); + return r; +} + +static CYTHON_INLINE PyObject *__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_add : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_add); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_add == &__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_23__add__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_add == &__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_right) { + return __pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static CYTHON_INLINE PyObject *__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_subtract : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_subtract); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_subtract == &__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_45__sub__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_subtract == &__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_right) { + return __pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static CYTHON_INLINE PyObject *__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_and : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_and); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_and == &__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_37__and__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_and == &__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_right) { + return __pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static CYTHON_INLINE PyObject *__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_xor : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_xor); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_xor == &__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_41__xor__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_xor == &__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_right) { + return __pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static CYTHON_INLINE PyObject *__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_or : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_or); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_33__or__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + if (maybe_self_is_right) { + return __pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static PyObject *__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__(PyObject *self, CYTHON_UNUSED PyObject *arg) { + return __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__(self); +} + +static PyMethodDef __pyx_methods_10sqlalchemy_4util_15_collections_cy_OrderedSet[] = { + {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"add", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add, METH_O, 0}, + {"remove", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove, METH_O, 0}, + {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"insert", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"discard", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard, METH_O, 0}, + {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__repr__", (PyCFunction)__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__, METH_NOARGS|METH_COEXIST, 0}, + {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update, METH_VARARGS|METH_KEYWORDS, 0}, + {"union", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union, METH_VARARGS|METH_KEYWORDS, 0}, + {"intersection", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection, METH_VARARGS|METH_KEYWORDS, 0}, + {"difference", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference, METH_VARARGS|METH_KEYWORDS, 0}, + {"intersection_update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update, METH_VARARGS|METH_KEYWORDS, 0}, + {"difference_update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update, METH_VARARGS|METH_KEYWORDS, 0}, + {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {0, 0, 0, 0} +}; +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_slots[] = { + {Py_tp_dealloc, (void *)__pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_tp_repr, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__}, + {Py_nb_add, (void *)__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_nb_subtract, (void *)__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_nb_and, (void *)__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_nb_xor, (void *)__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_nb_or, (void *)__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_nb_inplace_subtract, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_57__isub__}, + {Py_nb_inplace_and, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_49__iand__}, + {Py_nb_inplace_xor, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_53__ixor__}, + {Py_nb_inplace_or, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_29__ior__}, + {Py_sq_item, (void *)__pyx_sq_item_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_mp_subscript, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_19__getitem__}, + {Py_tp_doc, (void *)PyDoc_STR("A set implementation that maintains insertion order.")}, + {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_tp_iter, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_21__iter__}, + {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {Py_tp_init, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_3__init__}, + {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet}, + {0, 0}, +}; +static PyType_Spec __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_spec = { + "sqlalchemy.util._collections_cy.OrderedSet", + sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet), + 0, + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, + __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_slots, +}; +#else + +static PyNumberMethods __pyx_tp_as_number_OrderedSet = { + __pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_add*/ + __pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_subtract*/ + 0, /*nb_multiply*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_divide*/ + #endif + 0, /*nb_remainder*/ + 0, /*nb_divmod*/ + 0, /*nb_power*/ + 0, /*nb_negative*/ + 0, /*nb_positive*/ + 0, /*nb_absolute*/ + 0, /*nb_bool*/ + 0, /*nb_invert*/ + 0, /*nb_lshift*/ + 0, /*nb_rshift*/ + __pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_and*/ + __pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_xor*/ + __pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_or*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_coerce*/ + #endif + 0, /*nb_int*/ + #if PY_MAJOR_VERSION < 3 + 0, /*nb_long*/ + #else + 0, /*reserved*/ + #endif + 0, /*nb_float*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_oct*/ + #endif + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_hex*/ + #endif + 0, /*nb_inplace_add*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_57__isub__, /*nb_inplace_subtract*/ + 0, /*nb_inplace_multiply*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_inplace_divide*/ + #endif + 0, /*nb_inplace_remainder*/ + 0, /*nb_inplace_power*/ + 0, /*nb_inplace_lshift*/ + 0, /*nb_inplace_rshift*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_49__iand__, /*nb_inplace_and*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_53__ixor__, /*nb_inplace_xor*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_29__ior__, /*nb_inplace_or*/ + 0, /*nb_floor_divide*/ + 0, /*nb_true_divide*/ + 0, /*nb_inplace_floor_divide*/ + 0, /*nb_inplace_true_divide*/ + 0, /*nb_index*/ + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_matrix_multiply*/ + #endif + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_inplace_matrix_multiply*/ + #endif +}; + +static PySequenceMethods __pyx_tp_as_sequence_OrderedSet = { + 0, /*sq_length*/ + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + __pyx_sq_item_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + 0, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyMappingMethods __pyx_tp_as_mapping_OrderedSet = { + 0, /*mp_length*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_19__getitem__, /*mp_subscript*/ + 0, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet = { + PyVarObject_HEAD_INIT(0, 0) + "sqlalchemy.util._collections_cy.""OrderedSet", /*tp_name*/ + sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__, /*tp_repr*/ + &__pyx_tp_as_number_OrderedSet, /*tp_as_number*/ + &__pyx_tp_as_sequence_OrderedSet, /*tp_as_sequence*/ + &__pyx_tp_as_mapping_OrderedSet, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ + PyDoc_STR("A set implementation that maintains insertion order."), /*tp_doc*/ + __pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_traverse*/ + __pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_21__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + #if !CYTHON_USE_TYPE_SPECS + 0, /*tp_dictoffset*/ + #endif + __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_3__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + #if CYTHON_USE_TP_FINALIZE + 0, /*tp_finalize*/ + #else + NULL, /*tp_finalize*/ + #endif + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if __PYX_NEED_TP_PRINT_SLOT == 1 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030C0000 + 0, /*tp_watched*/ + #endif + #if PY_VERSION_HEX >= 0x030d00A4 + 0, /*tp_versions_used*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, /*tp_pypy_flags*/ + #endif +}; +#endif +static struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet; + +static PyObject *__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *p; + PyObject *o; + #if CYTHON_COMPILING_IN_LIMITED_API + allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); + o = alloc_func(t, 0); + #else + if (likely(!__Pyx_PyType_HasFeature(t, Py_TPFLAGS_IS_ABSTRACT))) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + #endif + p = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)o); + p->__pyx_vtab = __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_IdentitySet; + p->_members = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *o) { + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely((PY_VERSION_HEX >= 0x03080000 || __Pyx_PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)) && __Pyx_PyObject_GetSlot(o, tp_finalize, destructor)) && !__Pyx_PyObject_GC_IsFinalized(o)) { + if (__Pyx_PyObject_GetSlot(o, tp_dealloc, destructor) == __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_IdentitySet) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_members); + #if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + (*Py_TYPE(o)->tp_free)(o); + #else + { + freefunc tp_free = (freefunc)PyType_GetSlot(Py_TYPE(o), Py_tp_free); + if (tp_free) tp_free(o); + } + #endif +} + +static int __pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)o; + if (p->_members) { + e = (*v)(p->_members, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)o; + tmp = ((PyObject*)p->_members); + p->_members = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_tp_richcompare_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *o1, PyObject *o2, int op) { + switch (op) { + case Py_EQ: { + return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_15__eq__(o1, o2); + } + case Py_NE: { + return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_17__ne__(o1, o2); + } + case Py_LT: { + return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_23__lt__(o1, o2); + } + case Py_GT: { + return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_29__gt__(o1, o2); + } + case Py_LE: { + return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_21__le__(o1, o2); + } + case Py_GE: { + return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_27__ge__(o1, o2); + } + default: { + return __Pyx_NewRef(Py_NotImplemented); + } + } +} + +static CYTHON_INLINE PyObject *__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_subtract : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_subtract); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_subtract == &__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_41__sub__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_subtract == &__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (maybe_self_is_right) { + return __pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static CYTHON_INLINE PyObject *__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_and : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_and); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_and == &__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_49__and__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_and == &__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (maybe_self_is_right) { + return __pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static CYTHON_INLINE PyObject *__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_xor : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_xor); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_xor == &__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_57__xor__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_xor == &__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (maybe_self_is_right) { + return __pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static CYTHON_INLINE PyObject *__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_or : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_or); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_33__or__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + if (maybe_self_is_right) { + return __pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, tp_base, PyTypeObject*), left, right ); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static PyObject *__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__(PyObject *self, CYTHON_UNUSED PyObject *arg) { + return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__(self); +} + +static PyMethodDef __pyx_methods_10sqlalchemy_4util_15_collections_cy_IdentitySet[] = { + {"add", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add, METH_O, 0}, + {"discard", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard, METH_O, 0}, + {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__copy__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__repr__", (PyCFunction)__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__, METH_NOARGS|METH_COEXIST, 0}, + {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {0, 0, 0, 0} +}; +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_slots[] = { + {Py_tp_dealloc, (void *)__pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_tp_repr, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__}, + {Py_nb_subtract, (void *)__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_nb_and, (void *)__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_nb_xor, (void *)__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_nb_or, (void *)__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_nb_inplace_subtract, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_45__isub__}, + {Py_nb_inplace_and, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_53__iand__}, + {Py_nb_inplace_xor, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_61__ixor__}, + {Py_nb_inplace_or, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_37__ior__}, + {Py_sq_length, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__}, + {Py_sq_contains, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_5__contains__}, + {Py_mp_length, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__}, + {Py_tp_hash, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_71__hash__}, + {Py_tp_doc, (void *)PyDoc_STR("A set that considers only object id() for uniqueness.\n\n This strategy has edge cases for builtin types- it's possible to have\n two 'foo' strings in one of these sets, for example. Use sparingly.\n\n ")}, + {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_tp_richcompare, (void *)__pyx_tp_richcompare_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_tp_iter, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_69__iter__}, + {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {Py_tp_init, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_1__init__}, + {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_IdentitySet}, + {0, 0}, +}; +static PyType_Spec __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_spec = { + "sqlalchemy.util._collections_cy.IdentitySet", + sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet), + 0, + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, + __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_slots, +}; +#else + +static PyNumberMethods __pyx_tp_as_number_IdentitySet = { + 0, /*nb_add*/ + __pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*nb_subtract*/ + 0, /*nb_multiply*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_divide*/ + #endif + 0, /*nb_remainder*/ + 0, /*nb_divmod*/ + 0, /*nb_power*/ + 0, /*nb_negative*/ + 0, /*nb_positive*/ + 0, /*nb_absolute*/ + 0, /*nb_bool*/ + 0, /*nb_invert*/ + 0, /*nb_lshift*/ + 0, /*nb_rshift*/ + __pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*nb_and*/ + __pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*nb_xor*/ + __pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*nb_or*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_coerce*/ + #endif + 0, /*nb_int*/ + #if PY_MAJOR_VERSION < 3 + 0, /*nb_long*/ + #else + 0, /*reserved*/ + #endif + 0, /*nb_float*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_oct*/ + #endif + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_hex*/ + #endif + 0, /*nb_inplace_add*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_45__isub__, /*nb_inplace_subtract*/ + 0, /*nb_inplace_multiply*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_inplace_divide*/ + #endif + 0, /*nb_inplace_remainder*/ + 0, /*nb_inplace_power*/ + 0, /*nb_inplace_lshift*/ + 0, /*nb_inplace_rshift*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_53__iand__, /*nb_inplace_and*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_61__ixor__, /*nb_inplace_xor*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_37__ior__, /*nb_inplace_or*/ + 0, /*nb_floor_divide*/ + 0, /*nb_true_divide*/ + 0, /*nb_inplace_floor_divide*/ + 0, /*nb_inplace_true_divide*/ + 0, /*nb_index*/ + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_matrix_multiply*/ + #endif + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_inplace_matrix_multiply*/ + #endif +}; + +static PySequenceMethods __pyx_tp_as_sequence_IdentitySet = { + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__, /*sq_length*/ + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + 0, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_5__contains__, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyMappingMethods __pyx_tp_as_mapping_IdentitySet = { + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__, /*mp_length*/ + 0, /*mp_subscript*/ + 0, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet = { + PyVarObject_HEAD_INIT(0, 0) + "sqlalchemy.util._collections_cy.""IdentitySet", /*tp_name*/ + sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__, /*tp_repr*/ + &__pyx_tp_as_number_IdentitySet, /*tp_as_number*/ + &__pyx_tp_as_sequence_IdentitySet, /*tp_as_sequence*/ + &__pyx_tp_as_mapping_IdentitySet, /*tp_as_mapping*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_71__hash__, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + PyDoc_STR("A set that considers only object id() for uniqueness.\n\n This strategy has edge cases for builtin types- it's possible to have\n two 'foo' strings in one of these sets, for example. Use sparingly.\n\n "), /*tp_doc*/ + __pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_traverse*/ + __pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_clear*/ + __pyx_tp_richcompare_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_69__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + #if !CYTHON_USE_TYPE_SPECS + 0, /*tp_dictoffset*/ + #endif + __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + #if CYTHON_USE_TP_FINALIZE + 0, /*tp_finalize*/ + #else + NULL, /*tp_finalize*/ + #endif + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if __PYX_NEED_TP_PRINT_SLOT == 1 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030C0000 + 0, /*tp_watched*/ + #endif + #if PY_VERSION_HEX >= 0x030d00A4 + 0, /*tp_versions_used*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, /*tp_pypy_flags*/ + #endif +}; +#endif + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif +/* #### Code section: pystring_table ### */ + +static int __Pyx_CreateStringTabAndInitStrings(void) { + __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_AbstractSet, __pyx_k_AbstractSet, sizeof(__pyx_k_AbstractSet), 0, 0, 1, 1}, + {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, + {&__pyx_n_s_Dict, __pyx_k_Dict, sizeof(__pyx_k_Dict), 0, 0, 1, 1}, + {&__pyx_n_s_Hashable, __pyx_k_Hashable, sizeof(__pyx_k_Hashable), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet, __pyx_k_IdentitySet, sizeof(__pyx_k_IdentitySet), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet___copy, __pyx_k_IdentitySet___copy, sizeof(__pyx_k_IdentitySet___copy), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet___reduce_cython, __pyx_k_IdentitySet___reduce_cython, sizeof(__pyx_k_IdentitySet___reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet___setstate_cython, __pyx_k_IdentitySet___setstate_cython, sizeof(__pyx_k_IdentitySet___setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_add, __pyx_k_IdentitySet_add, sizeof(__pyx_k_IdentitySet_add), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_clear, __pyx_k_IdentitySet_clear, sizeof(__pyx_k_IdentitySet_clear), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_copy, __pyx_k_IdentitySet_copy, sizeof(__pyx_k_IdentitySet_copy), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_difference, __pyx_k_IdentitySet_difference, sizeof(__pyx_k_IdentitySet_difference), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_difference_update, __pyx_k_IdentitySet_difference_update, sizeof(__pyx_k_IdentitySet_difference_update), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_discard, __pyx_k_IdentitySet_discard, sizeof(__pyx_k_IdentitySet_discard), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_intersection, __pyx_k_IdentitySet_intersection, sizeof(__pyx_k_IdentitySet_intersection), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_intersection_update, __pyx_k_IdentitySet_intersection_update, sizeof(__pyx_k_IdentitySet_intersection_update), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_issubset, __pyx_k_IdentitySet_issubset, sizeof(__pyx_k_IdentitySet_issubset), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_issuperset, __pyx_k_IdentitySet_issuperset, sizeof(__pyx_k_IdentitySet_issuperset), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_pop, __pyx_k_IdentitySet_pop, sizeof(__pyx_k_IdentitySet_pop), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_remove, __pyx_k_IdentitySet_remove, sizeof(__pyx_k_IdentitySet_remove), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_symmetric_difference, __pyx_k_IdentitySet_symmetric_difference, sizeof(__pyx_k_IdentitySet_symmetric_difference), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_symmetric_difference_2, __pyx_k_IdentitySet_symmetric_difference_2, sizeof(__pyx_k_IdentitySet_symmetric_difference_2), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_union, __pyx_k_IdentitySet_union, sizeof(__pyx_k_IdentitySet_union), 0, 0, 1, 1}, + {&__pyx_n_s_IdentitySet_update, __pyx_k_IdentitySet_update, sizeof(__pyx_k_IdentitySet_update), 0, 0, 1, 1}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2, __pyx_k_Incompatible_checksums_0x_x_vs_0_2, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_2), 0, 0, 1, 0}, + {&__pyx_n_s_IndexError, __pyx_k_IndexError, sizeof(__pyx_k_IndexError), 0, 0, 1, 1}, + {&__pyx_n_s_Iterable, __pyx_k_Iterable, sizeof(__pyx_k_Iterable), 0, 0, 1, 1}, + {&__pyx_kp_s_Iterable_Any, __pyx_k_Iterable_Any, sizeof(__pyx_k_Iterable_Any), 0, 0, 1, 0}, + {&__pyx_kp_s_Iterable_Hashable, __pyx_k_Iterable_Hashable, sizeof(__pyx_k_Iterable_Hashable), 0, 0, 1, 0}, + {&__pyx_kp_s_Iterable__S, __pyx_k_Iterable__S, sizeof(__pyx_k_Iterable__S), 0, 0, 1, 0}, + {&__pyx_kp_s_Iterable__T, __pyx_k_Iterable__T, sizeof(__pyx_k_Iterable__T), 0, 0, 1, 0}, + {&__pyx_n_s_Iterator, __pyx_k_Iterator, sizeof(__pyx_k_Iterator), 0, 0, 1, 1}, + {&__pyx_n_s_KeyError, __pyx_k_KeyError, sizeof(__pyx_k_KeyError), 0, 0, 1, 1}, + {&__pyx_n_s_List, __pyx_k_List, sizeof(__pyx_k_List), 0, 0, 1, 1}, + {&__pyx_n_s_NoReturn, __pyx_k_NoReturn, sizeof(__pyx_k_NoReturn), 0, 0, 1, 1}, + {&__pyx_n_s_None, __pyx_k_None, sizeof(__pyx_k_None), 0, 0, 1, 1}, + {&__pyx_n_s_NotImplemented, __pyx_k_NotImplemented, sizeof(__pyx_k_NotImplemented), 0, 0, 1, 1}, + {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet, __pyx_k_OrderedSet, sizeof(__pyx_k_OrderedSet), 0, 0, 1, 1}, + {&__pyx_kp_s_OrderedSet_Union__T__S, __pyx_k_OrderedSet_Union__T__S, sizeof(__pyx_k_OrderedSet_Union__T__S), 0, 0, 1, 0}, + {&__pyx_kp_s_OrderedSet__T, __pyx_k_OrderedSet__T, sizeof(__pyx_k_OrderedSet__T), 0, 0, 1, 0}, + {&__pyx_n_s_OrderedSet___class_getitem, __pyx_k_OrderedSet___class_getitem, sizeof(__pyx_k_OrderedSet___class_getitem), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet___reduce_cython, __pyx_k_OrderedSet___reduce_cython, sizeof(__pyx_k_OrderedSet___reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet___setstate_cython, __pyx_k_OrderedSet___setstate_cython, sizeof(__pyx_k_OrderedSet___setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_add, __pyx_k_OrderedSet_add, sizeof(__pyx_k_OrderedSet_add), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_clear, __pyx_k_OrderedSet_clear, sizeof(__pyx_k_OrderedSet_clear), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_copy, __pyx_k_OrderedSet_copy, sizeof(__pyx_k_OrderedSet_copy), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_difference, __pyx_k_OrderedSet_difference, sizeof(__pyx_k_OrderedSet_difference), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_difference_update, __pyx_k_OrderedSet_difference_update, sizeof(__pyx_k_OrderedSet_difference_update), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_discard, __pyx_k_OrderedSet_discard, sizeof(__pyx_k_OrderedSet_discard), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_insert, __pyx_k_OrderedSet_insert, sizeof(__pyx_k_OrderedSet_insert), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_intersection, __pyx_k_OrderedSet_intersection, sizeof(__pyx_k_OrderedSet_intersection), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_intersection_update, __pyx_k_OrderedSet_intersection_update, sizeof(__pyx_k_OrderedSet_intersection_update), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_pop, __pyx_k_OrderedSet_pop, sizeof(__pyx_k_OrderedSet_pop), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_remove, __pyx_k_OrderedSet_remove, sizeof(__pyx_k_OrderedSet_remove), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_symmetric_difference, __pyx_k_OrderedSet_symmetric_difference, sizeof(__pyx_k_OrderedSet_symmetric_difference), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_symmetric_difference_2, __pyx_k_OrderedSet_symmetric_difference_2, sizeof(__pyx_k_OrderedSet_symmetric_difference_2), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_union, __pyx_k_OrderedSet_union, sizeof(__pyx_k_OrderedSet_union), 0, 0, 1, 1}, + {&__pyx_n_s_OrderedSet_update, __pyx_k_OrderedSet_update, sizeof(__pyx_k_OrderedSet_update), 0, 0, 1, 1}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_S, __pyx_k_S, sizeof(__pyx_k_S), 0, 0, 1, 1}, + {&__pyx_n_u_S, __pyx_k_S, sizeof(__pyx_k_S), 0, 1, 0, 1}, + {&__pyx_n_s_Self, __pyx_k_Self, sizeof(__pyx_k_Self), 0, 0, 1, 1}, + {&__pyx_n_s_Set, __pyx_k_Set, sizeof(__pyx_k_Set), 0, 0, 1, 1}, + {&__pyx_n_s_T, __pyx_k_T, sizeof(__pyx_k_T), 0, 0, 1, 1}, + {&__pyx_n_u_T, __pyx_k_T, sizeof(__pyx_k_T), 0, 1, 0, 1}, + {&__pyx_n_s_Tuple, __pyx_k_Tuple, sizeof(__pyx_k_Tuple), 0, 0, 1, 1}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_TypeVar, __pyx_k_TypeVar, sizeof(__pyx_k_TypeVar), 0, 0, 1, 1}, + {&__pyx_n_s_Union, __pyx_k_Union, sizeof(__pyx_k_Union), 0, 0, 1, 1}, + {&__pyx_kp_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 0}, + {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, + {&__pyx_kp_u__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 1, 0, 0}, + {&__pyx_n_s__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 0, 1, 1}, + {&__pyx_n_s_a, __pyx_k_a, sizeof(__pyx_k_a), 0, 0, 1, 1}, + {&__pyx_n_s_add, __pyx_k_add, sizeof(__pyx_k_add), 0, 0, 1, 1}, + {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, + {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, + {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1}, + {&__pyx_n_s_class_getitem, __pyx_k_class_getitem, sizeof(__pyx_k_class_getitem), 0, 0, 1, 1}, + {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_cls, __pyx_k_cls, sizeof(__pyx_k_cls), 0, 0, 1, 1}, + {&__pyx_n_s_copy, __pyx_k_copy, sizeof(__pyx_k_copy), 0, 0, 1, 1}, + {&__pyx_n_s_copy_2, __pyx_k_copy_2, sizeof(__pyx_k_copy_2), 0, 0, 1, 1}, + {&__pyx_kp_s_cython_Py_ssize_t, __pyx_k_cython_Py_ssize_t, sizeof(__pyx_k_cython_Py_ssize_t), 0, 0, 1, 0}, + {&__pyx_n_s_d, __pyx_k_d, sizeof(__pyx_k_d), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, + {&__pyx_n_s_difference, __pyx_k_difference, sizeof(__pyx_k_difference), 0, 0, 1, 1}, + {&__pyx_n_s_difference_update, __pyx_k_difference_update, sizeof(__pyx_k_difference_update), 0, 0, 1, 1}, + {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, + {&__pyx_n_s_discard, __pyx_k_discard, sizeof(__pyx_k_discard), 0, 0, 1, 1}, + {&__pyx_n_s_element, __pyx_k_element, sizeof(__pyx_k_element), 0, 0, 1, 1}, + {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, + {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_init, __pyx_k_init, sizeof(__pyx_k_init), 0, 0, 1, 1}, + {&__pyx_n_s_insert, __pyx_k_insert, sizeof(__pyx_k_insert), 0, 0, 1, 1}, + {&__pyx_n_s_intersection, __pyx_k_intersection, sizeof(__pyx_k_intersection), 0, 0, 1, 1}, + {&__pyx_n_s_intersection_update, __pyx_k_intersection_update, sizeof(__pyx_k_intersection_update), 0, 0, 1, 1}, + {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, + {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, + {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, + {&__pyx_n_s_issubset, __pyx_k_issubset, sizeof(__pyx_k_issubset), 0, 0, 1, 1}, + {&__pyx_n_s_issuperset, __pyx_k_issuperset, sizeof(__pyx_k_issuperset), 0, 0, 1, 1}, + {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, + {&__pyx_n_s_iterable, __pyx_k_iterable, sizeof(__pyx_k_iterable), 0, 0, 1, 1}, + {&__pyx_n_s_iterables, __pyx_k_iterables, sizeof(__pyx_k_iterables), 0, 0, 1, 1}, + {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, + {&__pyx_n_s_keys, __pyx_k_keys, sizeof(__pyx_k_keys), 0, 0, 1, 1}, + {&__pyx_n_u_len, __pyx_k_len, sizeof(__pyx_k_len), 0, 1, 0, 1}, + {&__pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_k_lib_sqlalchemy_util__collections, sizeof(__pyx_k_lib_sqlalchemy_util__collections), 0, 0, 1, 0}, + {&__pyx_n_u_list, __pyx_k_list, sizeof(__pyx_k_list), 0, 1, 0, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_u_members, __pyx_k_members, sizeof(__pyx_k_members), 0, 1, 0, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_other, __pyx_k_other, sizeof(__pyx_k_other), 0, 0, 1, 1}, + {&__pyx_n_s_other_set, __pyx_k_other_set, sizeof(__pyx_k_other_set), 0, 0, 1, 1}, + {&__pyx_n_s_pair, __pyx_k_pair, sizeof(__pyx_k_pair), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, + {&__pyx_kp_u_pop_from_an_empty_set, __pyx_k_pop_from_an_empty_set, sizeof(__pyx_k_pop_from_an_empty_set), 0, 1, 0, 0}, + {&__pyx_n_s_popitem, __pyx_k_popitem, sizeof(__pyx_k_popitem), 0, 0, 1, 1}, + {&__pyx_n_s_pos, __pyx_k_pos, sizeof(__pyx_k_pos), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_IdentitySet, __pyx_k_pyx_unpickle_IdentitySet, sizeof(__pyx_k_pyx_unpickle_IdentitySet), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_OrderedSet, __pyx_k_pyx_unpickle_OrderedSet, sizeof(__pyx_k_pyx_unpickle_OrderedSet), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_remove, __pyx_k_remove, sizeof(__pyx_k_remove), 0, 0, 1, 1}, + {&__pyx_n_s_repr, __pyx_k_repr, sizeof(__pyx_k_repr), 0, 0, 1, 1}, + {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, + {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, + {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, + {&__pyx_n_s_seq, __pyx_k_seq, sizeof(__pyx_k_seq), 0, 0, 1, 1}, + {&__pyx_kp_u_set_objects_are_unhashable, __pyx_k_set_objects_are_unhashable, sizeof(__pyx_k_set_objects_are_unhashable), 0, 1, 0, 0}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_slots, __pyx_k_slots, sizeof(__pyx_k_slots), 0, 0, 1, 1}, + {&__pyx_n_s_sqlalchemy_util__collections_cy, __pyx_k_sqlalchemy_util__collections_cy, sizeof(__pyx_k_sqlalchemy_util__collections_cy), 0, 0, 1, 1}, + {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, + {&__pyx_n_s_str, __pyx_k_str, sizeof(__pyx_k_str), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_symmetric_difference, __pyx_k_symmetric_difference, sizeof(__pyx_k_symmetric_difference), 0, 0, 1, 1}, + {&__pyx_n_s_symmetric_difference_update, __pyx_k_symmetric_difference_update, sizeof(__pyx_k_symmetric_difference_update), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_kp_s_type_Self, __pyx_k_type_Self, sizeof(__pyx_k_type_Self), 0, 0, 1, 0}, + {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, + {&__pyx_n_s_union, __pyx_k_union, sizeof(__pyx_k_union), 0, 0, 1, 1}, + {&__pyx_n_s_unique_list, __pyx_k_unique_list, sizeof(__pyx_k_unique_list), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_n_s_use_setstate, __pyx_k_use_setstate, sizeof(__pyx_k_use_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, + {&__pyx_n_s_values, __pyx_k_values, sizeof(__pyx_k_values), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} + }; + return __Pyx_InitStrings(__pyx_string_tab); +} +/* #### Code section: cached_builtins ### */ +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_IndexError = __Pyx_GetBuiltinName(__pyx_n_s_IndexError); if (!__pyx_builtin_IndexError) __PYX_ERR(0, 126, __pyx_L1_error) + __pyx_builtin_KeyError = __Pyx_GetBuiltinName(__pyx_n_s_KeyError); if (!__pyx_builtin_KeyError) __PYX_ERR(0, 127, __pyx_L1_error) + __pyx_builtin_NotImplemented = __Pyx_GetBuiltinName(__pyx_n_s_NotImplemented); if (!__pyx_builtin_NotImplemented) __PYX_ERR(0, 346, __pyx_L1_error) + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 509, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: cached_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "sqlalchemy/util/_collections_cy.py":127 + * value = self._list.pop() + * except IndexError: + * raise KeyError("pop from an empty set") from None # <<<<<<<<<<<<<< + * set.remove(self, value) + * return value + */ + __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_pop_from_an_empty_set); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "sqlalchemy/util/_collections_cy.py":509 + * + * def __hash__(self) -> NoReturn: + * raise TypeError("set objects are unhashable") # <<<<<<<<<<<<<< + * + * def __repr__(self) -> str: + */ + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_set_objects_are_unhashable); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 509, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum + */ + __pyx_tuple__5 = PyTuple_Pack(3, __pyx_int_242532825, __pyx_int_75814257, __pyx_int_197243545); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + __pyx_tuple__7 = PyTuple_Pack(3, __pyx_int_183888701, __pyx_int_143295406, __pyx_int_61630440); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + + /* "sqlalchemy/util/_collections_cy.py":38 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_is_compiled, 38, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 38, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":44 + * + * # END GENERATED CYTHON IMPORT + * _T = TypeVar("_T") # <<<<<<<<<<<<<< + * _S = TypeVar("_S") + * + */ + __pyx_tuple__10 = PyTuple_Pack(1, __pyx_n_u_T); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); + + /* "sqlalchemy/util/_collections_cy.py":45 + * # END GENERATED CYTHON IMPORT + * _T = TypeVar("_T") + * _S = TypeVar("_S") # <<<<<<<<<<<<<< + * + * + */ + __pyx_tuple__11 = PyTuple_Pack(1, __pyx_n_u_S); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); + + /* "sqlalchemy/util/_collections_cy.py":48 + * + * + * @cython.ccall # <<<<<<<<<<<<<< + * def unique_list(seq: Iterable[_T]) -> List[_T]: + * # this version seems somewhat faster for smaller sizes, but it's + */ + __pyx_tuple__12 = PyTuple_Pack(1, __pyx_n_s_seq); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_unique_list, 48, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 48, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":83 + * """A set implementation that maintains insertion order.""" + * + * __slots__ = ("_list",) # <<<<<<<<<<<<<< + * _list: List[_T] + * + */ + __pyx_tuple__14 = PyTuple_Pack(1, __pyx_n_u_list); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 83, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__14); + __Pyx_GIVEREF(__pyx_tuple__14); + + /* "sqlalchemy/util/_collections_cy.py":86 + * _list: List[_T] + * + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__(cls, key: Any) -> type[Self]: + * return cls + */ + __pyx_tuple__15 = PyTuple_Pack(2, __pyx_n_s_cls, __pyx_n_s_key); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_class_getitem, 86, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(0, 86, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":101 + * set.__init__(self) + * + * def copy(self) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self._from_list(list(self._list)) + * + */ + __pyx_tuple__17 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__17); + __Pyx_GIVEREF(__pyx_tuple__17); + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_copy, 101, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 101, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":113 + * return new + * + * def add(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element not in self: + * self._list.append(element) + */ + __pyx_tuple__19 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_element); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__19); + __Pyx_GIVEREF(__pyx_tuple__19); + __pyx_codeobj__20 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_add, 113, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__20)) __PYX_ERR(0, 113, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":118 + * set.add(self, element) + * + * def remove(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * # set.remove will raise if element is not in self + * set.remove(self, element) + */ + __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_remove, 118, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(0, 118, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":123 + * self._list.remove(element) + * + * def pop(self) -> _T: # <<<<<<<<<<<<<< + * try: + * value = self._list.pop() + */ + __pyx_tuple__22 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_value); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__22); + __Pyx_GIVEREF(__pyx_tuple__22); + __pyx_codeobj__23 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_pop, 123, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__23)) __PYX_ERR(0, 123, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":131 + * return value + * + * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element not in self: + * self._list.insert(pos, element) + */ + __pyx_tuple__24 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_pos, __pyx_n_s_element); if (unlikely(!__pyx_tuple__24)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__24); + __Pyx_GIVEREF(__pyx_tuple__24); + __pyx_codeobj__25 = (PyObject*)__Pyx_PyCode_New(3, 3, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__24, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_insert, 131, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__25)) __PYX_ERR(0, 131, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":136 + * set.add(self, element) + * + * def discard(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element in self: + * set.remove(self, element) + */ + __pyx_codeobj__26 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_discard, 136, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__26)) __PYX_ERR(0, 136, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":141 + * self._list.remove(element) + * + * def clear(self) -> None: # <<<<<<<<<<<<<< + * set.clear(self) # type: ignore[arg-type] + * self._list = [] + */ + __pyx_codeobj__27 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_clear, 141, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__27)) __PYX_ERR(0, 141, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":160 + * + * # @cython.ccall # cdef function cannot have star argument + * def update(self, *iterables: Iterable[_T]) -> None: # <<<<<<<<<<<<<< + * for iterable in iterables: + * for element in iterable: + */ + __pyx_tuple__28 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_iterables, __pyx_n_s_iterable, __pyx_n_s_element); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__28); + __Pyx_GIVEREF(__pyx_tuple__28); + __pyx_codeobj__29 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__28, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_update, 160, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__29)) __PYX_ERR(0, 160, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":177 + * + * # @cython.ccall # cdef function cannot have star argument + * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< + * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) + * result.update(*other) + */ + __pyx_tuple__30 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_other, __pyx_n_s_result); if (unlikely(!__pyx_tuple__30)) __PYX_ERR(0, 177, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__30); + __Pyx_GIVEREF(__pyx_tuple__30); + __pyx_codeobj__31 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__30, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_union, 177, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__31)) __PYX_ERR(0, 177, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":186 + * + * # @cython.ccall # cdef function cannot have star argument + * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * other_set: Set[Any] = set.intersection(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) + */ + __pyx_tuple__32 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_other, __pyx_n_s_other_set, __pyx_n_s_a); if (unlikely(!__pyx_tuple__32)) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__32); + __Pyx_GIVEREF(__pyx_tuple__32); + __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__32, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_intersection, 186, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(0, 186, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":193 + * return self.intersection(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference( + */ + __pyx_tuple__34 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_other); if (unlikely(!__pyx_tuple__34)) __PYX_ERR(0, 193, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__34); + __Pyx_GIVEREF(__pyx_tuple__34); + __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__34, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_symmetric_difference, 193, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(0, 193, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":219 + * + * # @cython.ccall # cdef function cannot have star argument + * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * other_set: Set[Any] = set.difference(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) + */ + __pyx_codeobj__36 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__32, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_difference, 219, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__36)) __PYX_ERR(0, 219, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":227 + * + * # @cython.ccall # cdef function cannot have star argument + * def intersection_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< + * set.intersection_update(self, *other) + * self._list = [a for a in self._list if a in self] + */ + __pyx_tuple__37 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_other, __pyx_n_s_a); if (unlikely(!__pyx_tuple__37)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__37); + __Pyx_GIVEREF(__pyx_tuple__37); + __pyx_codeobj__38 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_intersection_update, 227, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__38)) __PYX_ERR(0, 227, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":235 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: + */ + __pyx_codeobj__39 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__34, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_symmetric_difference_update, 235, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__39)) __PYX_ERR(0, 235, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":250 + * + * # @cython.ccall # cdef function cannot have star argument + * def difference_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< + * set.difference_update(self, *other) + * self._list = [a for a in self._list if a in self] + */ + __pyx_codeobj__40 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_difference_update, 250, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__40)) __PYX_ERR(0, 250, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_tuple__41 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__41)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__41); + __Pyx_GIVEREF(__pyx_tuple__41); + __pyx_codeobj__42 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__41, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__42)) __PYX_ERR(1, 1, __pyx_L1_error) + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) + */ + __pyx_tuple__43 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__43)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__43); + __Pyx_GIVEREF(__pyx_tuple__43); + __pyx_codeobj__44 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__43, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__44)) __PYX_ERR(1, 16, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":282 + * """ + * + * __slots__ = ("_members",) # <<<<<<<<<<<<<< + * _members: Dict[int, Any] + * + */ + __pyx_tuple__45 = PyTuple_Pack(1, __pyx_n_u_members); if (unlikely(!__pyx_tuple__45)) __PYX_ERR(0, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__45); + __Pyx_GIVEREF(__pyx_tuple__45); + + /* "sqlalchemy/util/_collections_cy.py":291 + * self.update(iterable) + * + * def add(self, value: Any, /) -> None: # <<<<<<<<<<<<<< + * self._members[_get_id(value)] = value + * + */ + __pyx_codeobj__46 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_add, 291, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__46)) __PYX_ERR(0, 291, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":297 + * return _get_id(value) in self._members + * + * @cython.ccall # <<<<<<<<<<<<<< + * def remove(self, value: Any, /): + * del self._members[_get_id(value)] + */ + __pyx_codeobj__47 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_remove, 297, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__47)) __PYX_ERR(0, 297, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":301 + * del self._members[_get_id(value)] + * + * def discard(self, value, /) -> None: # <<<<<<<<<<<<<< + * try: + * self.remove(value) + */ + __pyx_codeobj__48 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_discard, 301, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__48)) __PYX_ERR(0, 301, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":307 + * pass + * + * def pop(self) -> Any: # <<<<<<<<<<<<<< + * pair: Tuple[Any, Any] + * try: + */ + __pyx_tuple__49 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pair); if (unlikely(!__pyx_tuple__49)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__49); + __Pyx_GIVEREF(__pyx_tuple__49); + __pyx_codeobj__50 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__49, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_pop, 307, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__50)) __PYX_ERR(0, 307, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":315 + * raise KeyError("pop from an empty set") + * + * def clear(self) -> None: # <<<<<<<<<<<<<< + * self._members.clear() + * + */ + __pyx_codeobj__51 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_clear, 315, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__51)) __PYX_ERR(0, 315, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":334 + * return True + * + * @cython.ccall # <<<<<<<<<<<<<< + * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + */ + __pyx_tuple__52 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_iterable); if (unlikely(!__pyx_tuple__52)) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__52); + __Pyx_GIVEREF(__pyx_tuple__52); + __pyx_codeobj__53 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_issubset, 334, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__53)) __PYX_ERR(0, 334, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":354 + * return len(self) < len(other) and self.issubset(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + */ + __pyx_codeobj__54 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_issuperset, 354, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__54)) __PYX_ERR(0, 354, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":374 + * return len(self) > len(other) and self.issuperset(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def union(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__class__() + */ + __pyx_codeobj__55 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_union, 374, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__55)) __PYX_ERR(0, 374, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":386 + * return self.union(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def update(self, iterable: Iterable[Any], /): + * members: Dict[int, Any] = self._members + */ + __pyx_codeobj__56 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_update, 386, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__56)) __PYX_ERR(0, 386, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":401 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + __pyx_codeobj__57 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_difference, 401, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__57)) __PYX_ERR(0, 401, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":420 + * + * # def difference_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.difference(iterable) + */ + __pyx_codeobj__58 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_difference_update, 420, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__58)) __PYX_ERR(0, 420, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":431 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + __pyx_codeobj__59 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_intersection, 431, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__59)) __PYX_ERR(0, 431, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":449 + * + * # def intersection_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def intersection_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.intersection(iterable) + */ + __pyx_codeobj__60 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_intersection_update, 449, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__60)) __PYX_ERR(0, 449, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":460 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + __pyx_codeobj__61 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_symmetric_difference, 460, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__61)) __PYX_ERR(0, 460, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":482 + * + * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def symmetric_difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.symmetric_difference(iterable) + */ + __pyx_codeobj__62 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_symmetric_difference_update, 482, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__62)) __PYX_ERR(0, 482, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":493 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def copy(self) -> IdentitySet: + * cp: IdentitySet = self.__new__(self.__class__) + */ + __pyx_codeobj__63 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_copy, 493, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__63)) __PYX_ERR(0, 493, __pyx_L1_error) + + /* "sqlalchemy/util/_collections_cy.py":499 + * return cp + * + * def __copy__(self) -> IdentitySet: # <<<<<<<<<<<<<< + * return self.copy() + * + */ + __pyx_codeobj__64 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_copy_2, 499, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__64)) __PYX_ERR(0, 499, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_codeobj__65 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__41, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__65)) __PYX_ERR(1, 1, __pyx_L1_error) + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) + */ + __pyx_codeobj__66 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__43, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__66)) __PYX_ERR(1, 16, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __pyx_unpickle_OrderedSet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__67 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__67)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__67); + __Pyx_GIVEREF(__pyx_tuple__67); + __pyx_codeobj__68 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__67, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_OrderedSet, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__68)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_codeobj__69 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__67, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_IdentitySet, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__69)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} +/* #### Code section: init_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { + __pyx_umethod_PyDict_Type_keys.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyDict_Type_keys.method_name = &__pyx_n_s_keys; + __pyx_umethod_PyDict_Type_update.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyDict_Type_update.method_name = &__pyx_n_s_update; + __pyx_umethod_PyDict_Type_values.type = (PyObject*)&PyDict_Type; + __pyx_umethod_PyDict_Type_values.method_name = &__pyx_n_s_values; + __pyx_umethod_PyList_Type_pop.type = (PyObject*)&PyList_Type; + __pyx_umethod_PyList_Type_pop.method_name = &__pyx_n_s_pop; + if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_61630440 = PyInt_FromLong(61630440L); if (unlikely(!__pyx_int_61630440)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_75814257 = PyInt_FromLong(75814257L); if (unlikely(!__pyx_int_75814257)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_143295406 = PyInt_FromLong(143295406L); if (unlikely(!__pyx_int_143295406)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_183888701 = PyInt_FromLong(183888701L); if (unlikely(!__pyx_int_183888701)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_197243545 = PyInt_FromLong(197243545L); if (unlikely(!__pyx_int_197243545)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_242532825 = PyInt_FromLong(242532825L); if (unlikely(!__pyx_int_242532825)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: init_globals ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + return 0; +} +/* #### Code section: init_module ### */ + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + if (__Pyx_ExportFunction("_get_id", (void (*)(void))__pyx_f_10sqlalchemy_4util_15_collections_cy__get_id, "unsigned PY_LONG_LONG (PyObject *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_OrderedSet = &__pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet._from_list = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *))__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet.symmetric_difference = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet.symmetric_difference_update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference_update; + #if CYTHON_USE_TYPE_SPECS + __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PySet_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 80, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_spec, __pyx_t_1); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet)) __PYX_ERR(0, 80, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_spec, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) + #else + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet = &__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet; + #endif + if (sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet) != sizeof(PySetObject)) { + if (__Pyx_validate_extern_base((&PySet_Type)) < 0) __PYX_ERR(0, 80, __pyx_L1_error) + } + #if !CYTHON_COMPILING_IN_LIMITED_API + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_base = (&PySet_Type); + #endif + #if !CYTHON_USE_TYPE_SPECS + if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) + #endif + #if PY_MAJOR_VERSION < 3 + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_print = 0; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_dictoffset && __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_getattro == PyObject_GenericGetAttr)) { + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #endif + if (__Pyx_SetVtable(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_MergeVtables(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_OrderedSet, (PyObject *) __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) + #endif + __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_IdentitySet = &__pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.remove = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_remove; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.issubset = (int (*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issubset; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.issuperset = (int (*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issuperset; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.__pyx_union = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_union; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_update; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.difference = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.difference_update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference_update; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.intersection = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.intersection_update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection_update; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.symmetric_difference = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.symmetric_difference_update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference_update; + __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.copy = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_copy; + #if CYTHON_USE_TYPE_SPECS + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_spec, NULL); if (unlikely(!__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet)) __PYX_ERR(0, 274, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_spec, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) + #else + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet = &__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + #endif + #if !CYTHON_USE_TYPE_SPECS + if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) + #endif + #if PY_MAJOR_VERSION < 3 + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet->tp_print = 0; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet->tp_dictoffset && __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet->tp_getattro == PyObject_GenericGetAttr)) { + __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet->tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #endif + if (__Pyx_SetVtable(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_MergeVtables(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_IdentitySet, (PyObject *) __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) + #endif + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__collections_cy(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__collections_cy}, + {0, NULL} +}; +#endif + +#ifdef __cplusplus +namespace { + struct PyModuleDef __pyx_moduledef = + #else + static struct PyModuleDef __pyx_moduledef = + #endif + { + PyModuleDef_HEAD_INIT, + "_collections_cy", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #elif CYTHON_USE_MODULE_STATE + sizeof(__pyx_mstate), /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + #if CYTHON_USE_MODULE_STATE + __pyx_m_traverse, /* m_traverse */ + __pyx_m_clear, /* m_clear */ + NULL /* m_free */ + #else + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ + #endif + }; + #ifdef __cplusplus +} /* anonymous namespace */ +#endif +#endif + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_collections_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_collections_cy(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__collections_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__collections_cy(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) +#else +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) +#endif +{ + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { +#if CYTHON_COMPILING_IN_LIMITED_API + result = PyModule_AddObject(module, to_name, value); +#else + result = PyDict_SetItemString(moddict, to_name, value); +#endif + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + CYTHON_UNUSED_VAR(def); + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + moddict = module; +#else + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; +#endif + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__collections_cy(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + int stringtab_initialized = 0; + #if CYTHON_USE_MODULE_STATE + int pystate_addmodule_run = 0; + #endif + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_collections_cy' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_collections_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #elif CYTHON_USE_MODULE_STATE + __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + { + int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_collections_cy" pseudovariable */ + if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + pystate_addmodule_run = 1; + } + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #endif + CYTHON_UNUSED_VAR(__pyx_t_1); + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__collections_cy(void)", 0); + if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + stringtab_initialized = 1; + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_sqlalchemy__util___collections_cy) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "sqlalchemy.util._collections_cy")) { + if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.util._collections_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + if (unlikely((__Pyx_modinit_function_export_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "sqlalchemy/util/_collections_cy.py":11 + * from __future__ import annotations + * + * from typing import AbstractSet # <<<<<<<<<<<<<< + * from typing import Any + * from typing import Dict + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_AbstractSet); + __Pyx_GIVEREF(__pyx_n_s_AbstractSet); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_AbstractSet)) __PYX_ERR(0, 11, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_AbstractSet); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_AbstractSet, __pyx_t_2) < 0) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":12 + * + * from typing import AbstractSet + * from typing import Any # <<<<<<<<<<<<<< + * from typing import Dict + * from typing import Hashable + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Any); + __Pyx_GIVEREF(__pyx_n_s_Any); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Any)) __PYX_ERR(0, 12, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Any); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_3) < 0) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":13 + * from typing import AbstractSet + * from typing import Any + * from typing import Dict # <<<<<<<<<<<<<< + * from typing import Hashable + * from typing import Iterable + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Dict); + __Pyx_GIVEREF(__pyx_n_s_Dict); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Dict)) __PYX_ERR(0, 13, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Dict); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Dict, __pyx_t_2) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":14 + * from typing import Any + * from typing import Dict + * from typing import Hashable # <<<<<<<<<<<<<< + * from typing import Iterable + * from typing import Iterator + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Hashable); + __Pyx_GIVEREF(__pyx_n_s_Hashable); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Hashable)) __PYX_ERR(0, 14, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Hashable); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Hashable, __pyx_t_3) < 0) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":15 + * from typing import Dict + * from typing import Hashable + * from typing import Iterable # <<<<<<<<<<<<<< + * from typing import Iterator + * from typing import List + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Iterable); + __Pyx_GIVEREF(__pyx_n_s_Iterable); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Iterable)) __PYX_ERR(0, 15, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Iterable, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":16 + * from typing import Hashable + * from typing import Iterable + * from typing import Iterator # <<<<<<<<<<<<<< + * from typing import List + * from typing import NoReturn + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Iterator); + __Pyx_GIVEREF(__pyx_n_s_Iterator); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Iterator)) __PYX_ERR(0, 16, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Iterator); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Iterator, __pyx_t_3) < 0) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":17 + * from typing import Iterable + * from typing import Iterator + * from typing import List # <<<<<<<<<<<<<< + * from typing import NoReturn + * from typing import Optional + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_List); + __Pyx_GIVEREF(__pyx_n_s_List); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_List)) __PYX_ERR(0, 17, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_List); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_List, __pyx_t_2) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":18 + * from typing import Iterator + * from typing import List + * from typing import NoReturn # <<<<<<<<<<<<<< + * from typing import Optional + * from typing import Set + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_NoReturn); + __Pyx_GIVEREF(__pyx_n_s_NoReturn); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_NoReturn)) __PYX_ERR(0, 18, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_NoReturn); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NoReturn, __pyx_t_3) < 0) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":19 + * from typing import List + * from typing import NoReturn + * from typing import Optional # <<<<<<<<<<<<<< + * from typing import Set + * from typing import Tuple + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Optional); + __Pyx_GIVEREF(__pyx_n_s_Optional); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 19, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Optional); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_2) < 0) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":20 + * from typing import NoReturn + * from typing import Optional + * from typing import Set # <<<<<<<<<<<<<< + * from typing import Tuple + * from typing import TypeVar + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Set); + __Pyx_GIVEREF(__pyx_n_s_Set); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Set)) __PYX_ERR(0, 20, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Set); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Set, __pyx_t_3) < 0) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":21 + * from typing import Optional + * from typing import Set + * from typing import Tuple # <<<<<<<<<<<<<< + * from typing import TypeVar + * from typing import Union + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Tuple); + __Pyx_GIVEREF(__pyx_n_s_Tuple); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Tuple)) __PYX_ERR(0, 21, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Tuple); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Tuple, __pyx_t_2) < 0) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":22 + * from typing import Set + * from typing import Tuple + * from typing import TypeVar # <<<<<<<<<<<<<< + * from typing import Union + * + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_TypeVar); + __Pyx_GIVEREF(__pyx_n_s_TypeVar); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_TypeVar)) __PYX_ERR(0, 22, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TypeVar, __pyx_t_3) < 0) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":23 + * from typing import Tuple + * from typing import TypeVar + * from typing import Union # <<<<<<<<<<<<<< + * + * from .typing import Self + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Union); + __Pyx_GIVEREF(__pyx_n_s_Union); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Union)) __PYX_ERR(0, 23, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Union); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Union, __pyx_t_2) < 0) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":25 + * from typing import Union + * + * from .typing import Self # <<<<<<<<<<<<<< + * + * # START GENERATED CYTHON IMPORT + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Self); + __Pyx_GIVEREF(__pyx_n_s_Self); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Self)) __PYX_ERR(0, 25, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Self, __pyx_t_3) < 0) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":29 + * # START GENERATED CYTHON IMPORT + * # This section is automatically generated by the script tools/cython_imports.py + * try: # <<<<<<<<<<<<<< + * # NOTE: the cython compiler needs this "import cython" in the file, it + * # can't be only "from sqlalchemy.util import cython" with the fallback + */ + { + (void)__pyx_t_1; (void)__pyx_t_4; (void)__pyx_t_5; /* mark used */ + /*try:*/ { + + /* "sqlalchemy/util/_collections_cy.py":33 + * # can't be only "from sqlalchemy.util import cython" with the fallback + * # in that module + * import cython # <<<<<<<<<<<<<< + * except ModuleNotFoundError: + * from sqlalchemy.util import cython + */ + } + } + + /* "sqlalchemy/util/_collections_cy.py":38 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 38, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__9)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":44 + * + * # END GENERATED CYTHON IMPORT + * _T = TypeVar("_T") # <<<<<<<<<<<<<< + * _S = TypeVar("_S") + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_T, __pyx_t_2) < 0) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":45 + * # END GENERATED CYTHON IMPORT + * _T = TypeVar("_T") + * _S = TypeVar("_S") # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_S, __pyx_t_3) < 0) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":48 + * + * + * @cython.ccall # <<<<<<<<<<<<<< + * def unique_list(seq: Iterable[_T]) -> List[_T]: + * # this version seems somewhat faster for smaller sizes, but it's + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_seq, __pyx_kp_s_Iterable__T) < 0) __PYX_ERR(0, 48, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_3unique_list, 0, __pyx_n_s_unique_list, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__13)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_unique_list, __pyx_t_2) < 0) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_collections_cy.py":83 + * """A set implementation that maintains insertion order.""" + * + * __slots__ = ("_list",) # <<<<<<<<<<<<<< + * _list: List[_T] + * + */ + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_slots, __pyx_tuple__14) < 0) __PYX_ERR(0, 83, __pyx_L1_error) + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":86 + * _list: List[_T] + * + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__(cls, key: Any) -> type[Self]: + * return cls + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 86, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_type_Self) < 0) __PYX_ERR(0, 86, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__, __Pyx_CYFUNCTION_CLASSMETHOD | __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet___class_getitem, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__16)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_class_getitem, __pyx_t_3) < 0) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + __Pyx_GetNameInClass(__pyx_t_3, (PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_class_getitem); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_Method_ClassMethod(__pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_class_getitem, __pyx_t_2) < 0) __PYX_ERR(0, 86, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":101 + * set.__init__(self) + * + * def copy(self) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * return self._from_list(list(self._list)) + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_OrderedSet__T) < 0) __PYX_ERR(0, 101, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_copy, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_copy, __pyx_t_3) < 0) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":113 + * return new + * + * def add(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element not in self: + * self._list.append(element) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_element, __pyx_n_s_T) < 0) __PYX_ERR(0, 113, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 113, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_add, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__20)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_add, __pyx_t_2) < 0) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":118 + * set.add(self, element) + * + * def remove(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * # set.remove will raise if element is not in self + * set.remove(self, element) + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 118, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_element, __pyx_n_s_T) < 0) __PYX_ERR(0, 118, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 118, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_remove, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__21)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 118, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_remove, __pyx_t_3) < 0) __PYX_ERR(0, 118, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":123 + * self._list.remove(element) + * + * def pop(self) -> _T: # <<<<<<<<<<<<<< + * try: + * value = self._list.pop() + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_T) < 0) __PYX_ERR(0, 123, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_pop, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__23)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_pop, __pyx_t_2) < 0) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":131 + * return value + * + * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element not in self: + * self._list.insert(pos, element) + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_pos, __pyx_kp_s_cython_Py_ssize_t) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_element, __pyx_n_s_T) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_insert, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__25)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_insert, __pyx_t_3) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":136 + * set.add(self, element) + * + * def discard(self, element: _T, /) -> None: # <<<<<<<<<<<<<< + * if element in self: + * set.remove(self, element) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_element, __pyx_n_s_T) < 0) __PYX_ERR(0, 136, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 136, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_discard, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__26)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_discard, __pyx_t_2) < 0) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":141 + * self._list.remove(element) + * + * def clear(self) -> None: # <<<<<<<<<<<<<< + * set.clear(self) # type: ignore[arg-type] + * self._list = [] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 141, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_clear, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__27)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_clear, __pyx_t_3) < 0) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":157 + * return "%s(%r)" % (self.__class__.__name__, self._list) + * + * __str__ = __repr__ # <<<<<<<<<<<<<< + * + * # @cython.ccall # cdef function cannot have star argument + */ + __Pyx_GetNameInClass(__pyx_t_3, (PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_repr); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 157, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_str, __pyx_t_3) < 0) __PYX_ERR(0, 157, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":160 + * + * # @cython.ccall # cdef function cannot have star argument + * def update(self, *iterables: Iterable[_T]) -> None: # <<<<<<<<<<<<<< + * for iterable in iterables: + * for element in iterable: + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterables, __pyx_kp_s_Iterable__T) < 0) __PYX_ERR(0, 160, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 160, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__29)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_update, __pyx_t_2) < 0) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":177 + * + * # @cython.ccall # cdef function cannot have star argument + * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< + * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) + * result.update(*other) + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 177, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Iterable__S) < 0) __PYX_ERR(0, 177, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_OrderedSet_Union__T__S) < 0) __PYX_ERR(0, 177, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_union, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__31)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 177, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_union, __pyx_t_3) < 0) __PYX_ERR(0, 177, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":186 + * + * # @cython.ccall # cdef function cannot have star argument + * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * other_set: Set[Any] = set.intersection(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_other, __pyx_kp_s_Iterable_Hashable) < 0) __PYX_ERR(0, 186, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_OrderedSet__T) < 0) __PYX_ERR(0, 186, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_intersection, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__33)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_intersection, __pyx_t_2) < 0) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":193 + * return self.intersection(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference( + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 193, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Iterable__S) < 0) __PYX_ERR(0, 193, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_symmetric_difference, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__35)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 193, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_symmetric_difference, __pyx_t_3) < 0) __PYX_ERR(0, 193, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":219 + * + * # @cython.ccall # cdef function cannot have star argument + * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< + * other_set: Set[Any] = set.difference(self, *other) + * return self._from_list([a for a in self._list if a in other_set]) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 219, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_other, __pyx_kp_s_Iterable_Hashable) < 0) __PYX_ERR(0, 219, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_OrderedSet__T) < 0) __PYX_ERR(0, 219, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_difference, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__36)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 219, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_difference, __pyx_t_2) < 0) __PYX_ERR(0, 219, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":227 + * + * # @cython.ccall # cdef function cannot have star argument + * def intersection_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< + * set.intersection_update(self, *other) + * self._list = [a for a in self._list if a in self] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Iterable_Hashable) < 0) __PYX_ERR(0, 227, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 227, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_intersection_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__38)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_intersection_update, __pyx_t_3) < 0) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":235 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_other, __pyx_kp_s_Iterable__T) < 0) __PYX_ERR(0, 235, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_symmetric_difference_2, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__39)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_symmetric_difference_update, __pyx_t_2) < 0) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":250 + * + * # @cython.ccall # cdef function cannot have star argument + * def difference_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< + * set.difference_update(self, *other) + * self._list = [a for a in self._list if a in self] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 250, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Iterable_Hashable) < 0) __PYX_ERR(0, 250, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 250, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_difference_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__40)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 250, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_difference_update, __pyx_t_3) < 0) __PYX_ERR(0, 250, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet___reduce_cython, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__42)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) + */ + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet___setstate_cython, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__44)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); + + /* "sqlalchemy/util/_collections_cy.py":282 + * """ + * + * __slots__ = ("_members",) # <<<<<<<<<<<<<< + * _members: Dict[int, Any] + * + */ + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_slots, __pyx_tuple__45) < 0) __PYX_ERR(0, 282, __pyx_L1_error) + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":291 + * self.update(iterable) + * + * def add(self, value: Any, /) -> None: # <<<<<<<<<<<<<< + * self._members[_get_id(value)] = value + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 291, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 291, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_add, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__46)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_add, __pyx_t_2) < 0) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":297 + * return _get_id(value) in self._members + * + * @cython.ccall # <<<<<<<<<<<<<< + * def remove(self, value: Any, /): + * del self._members[_get_id(value)] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 297, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 297, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_remove, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__47)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 297, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_remove, __pyx_t_3) < 0) __PYX_ERR(0, 297, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":301 + * del self._members[_get_id(value)] + * + * def discard(self, value, /) -> None: # <<<<<<<<<<<<<< + * try: + * self.remove(value) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 301, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_discard, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__48)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_discard, __pyx_t_2) < 0) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":307 + * pass + * + * def pop(self) -> Any: # <<<<<<<<<<<<<< + * pair: Tuple[Any, Any] + * try: + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_Any) < 0) __PYX_ERR(0, 307, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_pop, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__50)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_pop, __pyx_t_3) < 0) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":315 + * raise KeyError("pop from an empty set") + * + * def clear(self) -> None: # <<<<<<<<<<<<<< + * self._members.clear() + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 315, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 315, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_clear, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__51)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 315, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_clear, __pyx_t_2) < 0) __PYX_ERR(0, 315, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":334 + * return True + * + * @cython.ccall # <<<<<<<<<<<<<< + * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 334, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_issubset, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__53)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_issubset, __pyx_t_3) < 0) __PYX_ERR(0, 334, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":354 + * return len(self) < len(other) and self.issubset(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: + * other: IdentitySet + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 354, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 354, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_issuperset, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__54)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 354, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_issuperset, __pyx_t_2) < 0) __PYX_ERR(0, 354, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":374 + * return len(self) > len(other) and self.issuperset(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def union(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__class__() + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 374, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 374, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_union, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__55)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 374, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_union, __pyx_t_3) < 0) __PYX_ERR(0, 374, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":386 + * return self.union(other) + * + * @cython.ccall # <<<<<<<<<<<<<< + * def update(self, iterable: Iterable[Any], /): + * members: Dict[int, Any] = self._members + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 386, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 386, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__56)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 386, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_update, __pyx_t_2) < 0) __PYX_ERR(0, 386, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":401 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 401, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_difference, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__57)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_difference, __pyx_t_3) < 0) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":420 + * + * # def difference_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.difference(iterable) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 420, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_difference_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__58)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_difference_update, __pyx_t_2) < 0) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":431 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 431, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_intersection, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__59)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_intersection, __pyx_t_3) < 0) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":449 + * + * # def intersection_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def intersection_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.intersection(iterable) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 449, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 449, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_intersection_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__60)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 449, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_intersection_update, __pyx_t_2) < 0) __PYX_ERR(0, 449, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":460 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: + * result: IdentitySet = self.__new__(self.__class__) + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 460, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 460, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_symmetric_difference, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__61)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 460, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_symmetric_difference, __pyx_t_3) < 0) __PYX_ERR(0, 460, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":482 + * + * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: + * @cython.ccall # <<<<<<<<<<<<<< + * def symmetric_difference_update(self, iterable: Iterable[Any], /): + * other: IdentitySet = self.symmetric_difference(iterable) + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 482, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 482, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_symmetric_difference_2, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__62)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 482, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_symmetric_difference_update, __pyx_t_2) < 0) __PYX_ERR(0, 482, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":493 + * return self + * + * @cython.ccall # <<<<<<<<<<<<<< + * def copy(self) -> IdentitySet: + * cp: IdentitySet = self.__new__(self.__class__) + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_copy, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__63)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 493, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_copy, __pyx_t_2) < 0) __PYX_ERR(0, 493, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "sqlalchemy/util/_collections_cy.py":499 + * return cp + * + * def __copy__(self) -> IdentitySet: # <<<<<<<<<<<<<< + * return self.copy() + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 499, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_IdentitySet) < 0) __PYX_ERR(0, 499, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet___copy, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__64)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 499, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_copy_2, __pyx_t_3) < 0) __PYX_ERR(0, 499, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet___reduce_cython, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__65)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) + */ + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet___setstate_cython, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__66)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); + + /* "(tree fragment)":1 + * def __pyx_unpickle_OrderedSet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet, 0, __pyx_n_s_pyx_unpickle_OrderedSet, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__68)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_OrderedSet, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":11 + * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * __pyx_result._list = __pyx_state[0] + * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): + */ + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet, 0, __pyx_n_s_pyx_unpickle_IdentitySet, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__69)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_IdentitySet, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_collections_cy.py":1 + * # util/_collections_cy.py # <<<<<<<<<<<<<< + * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors + * # + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_3) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + if (__pyx_m) { + if (__pyx_d && stringtab_initialized) { + __Pyx_AddTraceback("init sqlalchemy.util._collections_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + #if !CYTHON_USE_MODULE_STATE + Py_CLEAR(__pyx_m); + #else + Py_DECREF(__pyx_m); + if (pystate_addmodule_run) { + PyObject *tp, *value, *tb; + PyErr_Fetch(&tp, &value, &tb); + PyState_RemoveModule(&__pyx_moduledef); + PyErr_Restore(tp, value, tb); + } + #endif + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init sqlalchemy.util._collections_cy"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} +/* #### Code section: cleanup_globals ### */ +/* #### Code section: cleanup_module ### */ +/* #### Code section: main_method ### */ +/* #### Code section: utility_code_pragmas ### */ +#ifdef _MSC_VER +#pragma warning( push ) +/* Warning 4127: conditional expression is constant + * Cython uses constant conditional expressions to allow in inline functions to be optimized at + * compile-time, so this warning is not useful + */ +#pragma warning( disable : 4127 ) +#endif + + + +/* #### Code section: utility_code_def ### */ + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030C00A6 + PyObject *current_exception = tstate->current_exception; + if (unlikely(!current_exception)) return 0; + exc_type = (PyObject*) Py_TYPE(current_exception); + if (exc_type == err) return 1; +#else + exc_type = tstate->curexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; +#endif + #if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(exc_type); + #endif + if (unlikely(PyTuple_Check(err))) { + result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + } else { + result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); + } + #if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(exc_type); + #endif + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject *tmp_value; + assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); + if (value) { + #if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) + #endif + PyException_SetTraceback(value, tb); + } + tmp_value = tstate->current_exception; + tstate->current_exception = value; + Py_XDECREF(tmp_value); + Py_XDECREF(type); + Py_XDECREF(tb); +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject* exc_value; + exc_value = tstate->current_exception; + tstate->current_exception = 0; + *value = exc_value; + *type = NULL; + *tb = NULL; + if (exc_value) { + *type = (PyObject*) Py_TYPE(exc_value); + Py_INCREF(*type); + #if CYTHON_COMPILING_IN_CPYTHON + *tb = ((PyBaseExceptionObject*) exc_value)->traceback; + Py_XINCREF(*tb); + #else + *tb = PyException_GetTraceback(exc_value); + #endif + } +#else + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#endif +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* PyObjectGetAttrStrNoError */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + (void) PyObject_GetOptionalAttr(obj, attr_name, &result); + return result; +#else +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +#endif +} + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); + if (unlikely(!result) && !PyErr_Occurred()) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* TupleAndListFromArray */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { + PyObject *v; + Py_ssize_t i; + for (i = 0; i < length; i++) { + v = dest[i] = src[i]; + Py_INCREF(v); + } +} +static CYTHON_INLINE PyObject * +__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + Py_INCREF(__pyx_empty_tuple); + return __pyx_empty_tuple; + } + res = PyTuple_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); + return res; +} +static CYTHON_INLINE PyObject * +__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + return PyList_New(0); + } + res = PyList_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); + return res; +} +#endif + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* fastcall */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) +{ + Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); + for (i = 0; i < n; i++) + { + if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; + } + for (i = 0; i < n; i++) + { + int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); + if (unlikely(eq != 0)) { + if (unlikely(eq < 0)) return NULL; + return kwvalues[i]; + } + } + return NULL; +} +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { + Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); + PyObject *dict; + dict = PyDict_New(); + if (unlikely(!dict)) + return NULL; + for (i=0; i= 0x031000A1) || likely(PySet_GET_SIZE(result))) + return result; + Py_DECREF(result); +#endif + } +#if CYTHON_USE_TYPE_SLOTS + return PyFrozenSet_Type.tp_new(&PyFrozenSet_Type, __pyx_empty_tuple, NULL); +#else + return PyObject_Call((PyObject*)&PyFrozenSet_Type, __pyx_empty_tuple, NULL); +#endif +} + +/* PySetContains */ +static int __Pyx_PySet_ContainsUnhashable(PyObject *set, PyObject *key) { + int result = -1; + if (PySet_Check(key) && PyErr_ExceptionMatches(PyExc_TypeError)) { + PyObject *tmpkey; + PyErr_Clear(); + tmpkey = __Pyx_PyFrozenSet_New(key); + if (tmpkey != NULL) { + result = PySet_Contains(set, tmpkey); + Py_DECREF(tmpkey); + } + } + return result; +} +static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq) { + int result = PySet_Contains(set, key); + if (unlikely(result < 0)) { + result = __Pyx_PySet_ContainsUnhashable(set, key); + } + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); + while (1) { + Py_XDECREF(key); key = NULL; + Py_XDECREF(value); value = NULL; + if (kwds_is_tuple) { + Py_ssize_t size; +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(kwds); +#else + size = PyTuple_Size(kwds); + if (size < 0) goto bad; +#endif + if (pos >= size) break; +#if CYTHON_AVOID_BORROWED_REFS + key = __Pyx_PySequence_ITEM(kwds, pos); + if (!key) goto bad; +#elif CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kwds, pos); +#else + key = PyTuple_GetItem(kwds, pos); + if (!key) goto bad; +#endif + value = kwvalues[pos]; + pos++; + } + else + { + if (!PyDict_Next(kwds, &pos, &key, &value)) break; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + } + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(value); + Py_DECREF(key); +#endif + key = NULL; + value = NULL; + continue; + } +#if !CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + Py_INCREF(value); + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = ( + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key) + ); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + Py_XDECREF(key); + Py_XDECREF(value); + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + Py_XDECREF(key); + Py_XDECREF(value); + return -1; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { + return NULL; + } + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { + return NULL; + } + #endif + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); + self = __Pyx_CyOrPyCFunction_GET_SELF(func); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectFastCall */ +#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API +static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { + PyObject *argstuple; + PyObject *result = 0; + size_t i; + argstuple = PyTuple_New((Py_ssize_t)nargs); + if (unlikely(!argstuple)) return NULL; + for (i = 0; i < nargs; i++) { + Py_INCREF(args[i]); + if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; + } + result = __Pyx_PyObject_Call(func, argstuple, kwargs); + bad: + Py_DECREF(argstuple); + return result; +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { + Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); +#if CYTHON_COMPILING_IN_CPYTHON + if (nargs == 0 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) + return __Pyx_PyObject_CallMethO(func, NULL); + } + else if (nargs == 1 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) + return __Pyx_PyObject_CallMethO(func, args[0]); + } +#endif + #if PY_VERSION_HEX < 0x030800B1 + #if CYTHON_FAST_PYCCALL + if (PyCFunction_Check(func)) { + if (kwargs) { + return _PyCFunction_FastCallDict(func, args, nargs, kwargs); + } else { + return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); + } + } + #if PY_VERSION_HEX >= 0x030700A1 + if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { + return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); + } + #endif + #endif + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); + } + #endif + #endif + if (kwargs == NULL) { + #if CYTHON_VECTORCALL + #if PY_VERSION_HEX < 0x03090000 + vectorcallfunc f = _PyVectorcall_Function(func); + #else + vectorcallfunc f = PyVectorcall_Function(func); + #endif + if (f) { + return f(func, args, (size_t)nargs, NULL); + } + #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL + if (__Pyx_CyFunction_CheckExact(func)) { + __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); + if (f) return f(func, args, (size_t)nargs, NULL); + } + #endif + } + if (nargs == 0) { + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); + } + #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API + return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); + #else + return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); + #endif +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kw, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { + Py_ssize_t kwsize; +#if CYTHON_ASSUME_SAFE_MACROS + kwsize = PyTuple_GET_SIZE(kw); +#else + kwsize = PyTuple_Size(kw); + if (kwsize < 0) return 0; +#endif + if (unlikely(kwsize == 0)) + return 1; + if (!kw_allowed) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, 0); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + goto invalid_keyword; + } +#if PY_VERSION_HEX < 0x03090000 + for (pos = 0; pos < kwsize; pos++) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, pos); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } +#endif + return 1; + } + while (PyDict_Next(kw, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if (!kw_allowed && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* py_set_discard_unhashable */ +static int __Pyx_PySet_DiscardUnhashable(PyObject *set, PyObject *key) { + PyObject *tmpkey; + int rv; + if (likely(!PySet_Check(key) || !PyErr_ExceptionMatches(PyExc_TypeError))) + return -1; + PyErr_Clear(); + tmpkey = __Pyx_PyFrozenSet_New(key); + if (tmpkey == NULL) + return -1; + rv = PySet_Discard(set, tmpkey); + Py_DECREF(tmpkey); + return rv; +} + +/* py_set_remove */ +static int __Pyx_PySet_RemoveNotFound(PyObject *set, PyObject *key, int found) { + if (unlikely(found < 0)) { + found = __Pyx_PySet_DiscardUnhashable(set, key); + } + if (likely(found == 0)) { + PyObject *tup; + tup = PyTuple_Pack(1, key); + if (!tup) + return -1; + PyErr_SetObject(PyExc_KeyError, tup); + Py_DECREF(tup); + return -1; + } + return found; +} +static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key) { + int found = PySet_Discard(set, key); + if (unlikely(found != 1)) { + return __Pyx_PySet_RemoveNotFound(set, key, found); + } + return 0; +} + +/* PyObjectCallNoArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { + PyObject *arg[2] = {NULL, NULL}; + return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* PyObjectCallOneArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *args[2] = {NULL, arg}; + return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + __Pyx_TypeName type_name; + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR + if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) +#elif PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (likely(descr != NULL)) { + *method = descr; + return 0; + } + type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod0 */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { + PyObject *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_CallOneArg(method, obj); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) goto bad; + result = __Pyx_PyObject_CallNoArg(method); + Py_DECREF(method); +bad: + return result; +} + +/* UnpackUnboundCMethod */ +static PyObject *__Pyx_SelflessCall(PyObject *method, PyObject *args, PyObject *kwargs) { + PyObject *result; + PyObject *selfless_args = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); + if (unlikely(!selfless_args)) return NULL; + result = PyObject_Call(method, selfless_args, kwargs); + Py_DECREF(selfless_args); + return result; +} +static PyMethodDef __Pyx_UnboundCMethod_Def = { + "CythonUnboundCMethod", + __PYX_REINTERPRET_FUNCION(PyCFunction, __Pyx_SelflessCall), + METH_VARARGS | METH_KEYWORDS, + NULL +}; +static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { + PyObject *method; + method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); + if (unlikely(!method)) + return -1; + target->method = method; +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION >= 3 + if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) + #else + if (likely(!__Pyx_CyOrPyCFunction_Check(method))) + #endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject*) method; + target->func = descr->d_method->ml_meth; + target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); + } else +#endif +#if CYTHON_COMPILING_IN_PYPY +#else + if (PyCFunction_Check(method)) +#endif + { + PyObject *self; + int self_found; +#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY + self = PyObject_GetAttrString(method, "__self__"); + if (!self) { + PyErr_Clear(); + } +#else + self = PyCFunction_GET_SELF(method); +#endif + self_found = (self && self != Py_None); +#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY + Py_XDECREF(self); +#endif + if (self_found) { + PyObject *unbound_method = PyCFunction_New(&__Pyx_UnboundCMethod_Def, method); + if (unlikely(!unbound_method)) return -1; + Py_DECREF(method); + target->method = unbound_method; + } + } + return 0; +} + +/* CallUnboundCMethod0 */ +static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self) { + PyObject *args, *result = NULL; + if (unlikely(!cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_ASSUME_SAFE_MACROS + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); +#else + args = PyTuple_Pack(1, self); + if (unlikely(!args)) goto bad; +#endif + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + Py_DECREF(args); +bad: + return result; +} + +/* pop */ +static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L) { + if (__Pyx_IS_TYPE(L, &PySet_Type)) { + return PySet_Pop(L); + } + return __Pyx_PyObject_CallMethod0(L, __pyx_n_s_pop); +} +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L) { + if (likely(PyList_GET_SIZE(L) > (((PyListObject*)L)->allocated >> 1))) { + __Pyx_SET_SIZE(L, Py_SIZE(L) - 1); + return PyList_GET_ITEM(L, PyList_GET_SIZE(L)); + } + return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyList_Type_pop, L); +} +#endif + +/* GetTopmostException */ +#if CYTHON_USE_EXC_INFO_STACK && CYTHON_FAST_THREAD_STATE +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_value == NULL || exc_info->exc_value == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + PyObject *exc_value = exc_info->exc_value; + if (exc_value == NULL || exc_value == Py_None) { + *value = NULL; + *type = NULL; + *tb = NULL; + } else { + *value = exc_value; + Py_INCREF(*value); + *type = (PyObject*) Py_TYPE(exc_value); + Py_INCREF(*type); + *tb = PyException_GetTraceback(exc_value); + } + #elif CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); + #endif +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 + _PyErr_StackItem *exc_info = tstate->exc_info; + PyObject *tmp_value = exc_info->exc_value; + exc_info->exc_value = value; + Py_XDECREF(tmp_value); + Py_XDECREF(type); + Py_XDECREF(tb); + #else + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); + #endif +} +#endif + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type = NULL, *local_value, *local_tb = NULL; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if PY_VERSION_HEX >= 0x030C00A6 + local_value = tstate->current_exception; + tstate->current_exception = 0; + if (likely(local_value)) { + local_type = (PyObject*) Py_TYPE(local_value); + Py_INCREF(local_type); + local_tb = PyException_GetTraceback(local_value); + } + #else + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + #endif +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE && PY_VERSION_HEX >= 0x030C00A6 + if (unlikely(tstate->current_exception)) +#elif CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + #if PY_VERSION_HEX >= 0x030B00a4 + tmp_value = exc_info->exc_value; + exc_info->exc_value = local_value; + tmp_type = NULL; + tmp_tb = NULL; + Py_XDECREF(local_type); + Py_XDECREF(local_tb); + #else + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + #endif + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + __Pyx_PyThreadState_declare + CYTHON_UNUSED_VAR(cause); + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + #if PY_VERSION_HEX >= 0x030C00A6 + PyException_SetTraceback(value, tb); + #elif CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (unlikely(!j)) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; + PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; + if (mm && mm->mp_subscript) { + PyObject *r, *key = PyInt_FromSsize_t(i); + if (unlikely(!key)) return NULL; + r = mm->mp_subscript(o, key); + Py_DECREF(key); + return r; + } + if (likely(sm && sm->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { + Py_ssize_t l = sm->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return sm->sq_item(o, i); + } + } +#else + if (is_list || !PyMapping_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* PyObjectFormatAndDecref */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatSimpleAndDecref(PyObject* s, PyObject* f) { + if (unlikely(!s)) return NULL; + if (likely(PyUnicode_CheckExact(s))) return s; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(s))) { + PyObject *result = PyUnicode_FromEncodedObject(s, NULL, "strict"); + Py_DECREF(s); + return result; + } + #endif + return __Pyx_PyObject_FormatAndDecref(s, f); +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObject* f) { + PyObject *result; + if (unlikely(!s)) return NULL; + result = PyObject_Format(s, f); + Py_DECREF(s); + return result; +} + +/* JoinPyUnicode */ +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char) { +#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyObject *result_uval; + int result_ukind, kind_shift; + Py_ssize_t i, char_pos; + void *result_udata; + CYTHON_MAYBE_UNUSED_VAR(max_char); +#if CYTHON_PEP393_ENABLED + result_uval = PyUnicode_New(result_ulength, max_char); + if (unlikely(!result_uval)) return NULL; + result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; + kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; + result_udata = PyUnicode_DATA(result_uval); +#else + result_uval = PyUnicode_FromUnicode(NULL, result_ulength); + if (unlikely(!result_uval)) return NULL; + result_ukind = sizeof(Py_UNICODE); + kind_shift = (result_ukind == 4) ? 2 : result_ukind - 1; + result_udata = PyUnicode_AS_UNICODE(result_uval); +#endif + assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); + char_pos = 0; + for (i=0; i < value_count; i++) { + int ukind; + Py_ssize_t ulength; + void *udata; + PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); + if (unlikely(__Pyx_PyUnicode_READY(uval))) + goto bad; + ulength = __Pyx_PyUnicode_GET_LENGTH(uval); + if (unlikely(!ulength)) + continue; + if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) + goto overflow; + ukind = __Pyx_PyUnicode_KIND(uval); + udata = __Pyx_PyUnicode_DATA(uval); + if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { + memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); + } else { + #if PY_VERSION_HEX >= 0x030d0000 + if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; + #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) + _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); + #else + Py_ssize_t j; + for (j=0; j < ulength; j++) { + Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); + __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); + } + #endif + } + char_pos += ulength; + } + return result_uval; +overflow: + PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); +bad: + Py_DECREF(result_uval); + return NULL; +#else + CYTHON_UNUSED_VAR(max_char); + CYTHON_UNUSED_VAR(result_ulength); + CYTHON_UNUSED_VAR(value_count); + return PyUnicode_Join(__pyx_empty_unicode, value_tuple); +#endif +} + +/* RaiseUnexpectedTypeError */ +static int +__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) +{ + __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, + expected, obj_type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (!r) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* GetAttr3 */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +#endif +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + int res = PyObject_GetOptionalAttr(o, n, &r); + return (res != 0) ? r : __Pyx_NewRef(d); +#else + #if CYTHON_USE_TYPE_SLOTS + if (likely(PyString_Check(n))) { + r = __Pyx_PyObject_GetAttrStrNoError(o, n); + if (unlikely(!r) && likely(!PyErr_Occurred())) { + r = __Pyx_NewRef(d); + } + return r; + } + #endif + r = PyObject_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +#endif +} + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#elif CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(!__pyx_m)) { + return NULL; + } + result = PyObject_GetAttr(__pyx_m, name); + if (likely(result)) { + return result; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + __Pyx_TypeName obj_type_name; + __Pyx_TypeName type_name; + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + type_name = __Pyx_PyType_GetName(type); + PyErr_Format(PyExc_TypeError, + "Cannot convert " __Pyx_FMT_TYPENAME " to " __Pyx_FMT_TYPENAME, + obj_type_name, type_name); + __Pyx_DECREF_TypeName(obj_type_name); + __Pyx_DECREF_TypeName(type_name); + return 0; +} + +/* py_dict_keys */ +static CYTHON_INLINE PyObject* __Pyx_PyDict_Keys(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyDict_Type_keys, d); + else + return PyDict_Keys(d); +} + +/* CallUnboundCMethod1 */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { + if (likely(cfunc->func)) { + int flag = cfunc->flag; + if (flag == METH_O) { + return (*(cfunc->func))(self, arg); + } else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) { + #if PY_VERSION_HEX >= 0x030700A0 + return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); + #else + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + #endif + } else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) { + return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); + } + } + return __Pyx__CallUnboundCMethod1(cfunc, self, arg); +} +#endif +static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ + PyObject *args, *result = NULL; + if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_COMPILING_IN_CPYTHON + if (cfunc->func && (cfunc->flag & METH_VARARGS)) { + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + if (cfunc->flag & METH_KEYWORDS) + result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); + else + result = (*cfunc->func)(self, args); + } else { + args = PyTuple_New(2); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + } +#else + args = PyTuple_Pack(2, self, arg); + if (unlikely(!args)) goto bad; + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); +#endif +bad: + Py_XDECREF(args); + return result; +} + +/* IterFinish */ +static CYTHON_INLINE int __Pyx_IterFinish(void) { + PyObject* exc_type; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + exc_type = __Pyx_PyErr_CurrentExceptionType(); + if (unlikely(exc_type)) { + if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) + return -1; + __Pyx_PyErr_Clear(); + return 0; + } + return 0; +} + +/* RaiseNeedMoreValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* RaiseTooManyValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* UnpackItemEndCheck */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } + return __Pyx_IterFinish(); +} + +/* RaiseNoneIterError */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/* UnpackTupleError */ +static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) { + if (t == Py_None) { + __Pyx_RaiseNoneNotIterableError(); + } else if (PyTuple_GET_SIZE(t) < index) { + __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t)); + } else { + __Pyx_RaiseTooManyValuesError(index); + } +} + +/* UnpackTuple2 */ +static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( + PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) { + PyObject *value1 = NULL, *value2 = NULL; +#if CYTHON_COMPILING_IN_PYPY + value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad; + value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad; +#else + value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1); + value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2); +#endif + if (decref_tuple) { + Py_DECREF(tuple); + } + *pvalue1 = value1; + *pvalue2 = value2; + return 0; +#if CYTHON_COMPILING_IN_PYPY +bad: + Py_XDECREF(value1); + Py_XDECREF(value2); + if (decref_tuple) { Py_XDECREF(tuple); } + return -1; +#endif +} +static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, + int has_known_size, int decref_tuple) { + Py_ssize_t index; + PyObject *value1 = NULL, *value2 = NULL, *iter = NULL; + iternextfunc iternext; + iter = PyObject_GetIter(tuple); + if (unlikely(!iter)) goto bad; + if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; } + iternext = __Pyx_PyObject_GetIterNextFunc(iter); + value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; } + value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; } + if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad; + Py_DECREF(iter); + *pvalue1 = value1; + *pvalue2 = value2; + return 0; +unpacking_failed: + if (!has_known_size && __Pyx_IterFinish() == 0) + __Pyx_RaiseNeedMoreValuesError(index); +bad: + Py_XDECREF(iter); + Py_XDECREF(value1); + Py_XDECREF(value2); + if (decref_tuple) { Py_XDECREF(tuple); } + return -1; +} + +/* dict_iter */ +#if CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 +#include +#endif +static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name, + Py_ssize_t* p_orig_length, int* p_source_is_dict) { + is_dict = is_dict || likely(PyDict_CheckExact(iterable)); + *p_source_is_dict = is_dict; + if (is_dict) { +#if !CYTHON_COMPILING_IN_PYPY + *p_orig_length = PyDict_Size(iterable); + Py_INCREF(iterable); + return iterable; +#elif PY_MAJOR_VERSION >= 3 + static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL; + PyObject **pp = NULL; + if (method_name) { + const char *name = PyUnicode_AsUTF8(method_name); + if (strcmp(name, "iteritems") == 0) pp = &py_items; + else if (strcmp(name, "iterkeys") == 0) pp = &py_keys; + else if (strcmp(name, "itervalues") == 0) pp = &py_values; + if (pp) { + if (!*pp) { + *pp = PyUnicode_FromString(name + 4); + if (!*pp) + return NULL; + } + method_name = *pp; + } + } +#endif + } + *p_orig_length = 0; + if (method_name) { + PyObject* iter; + iterable = __Pyx_PyObject_CallMethod0(iterable, method_name); + if (!iterable) + return NULL; +#if !CYTHON_COMPILING_IN_PYPY + if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable)) + return iterable; +#endif + iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + return iter; + } + return PyObject_GetIter(iterable); +} +static CYTHON_INLINE int __Pyx_dict_iter_next( + PyObject* iter_obj, CYTHON_NCP_UNUSED Py_ssize_t orig_length, CYTHON_NCP_UNUSED Py_ssize_t* ppos, + PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) { + PyObject* next_item; +#if !CYTHON_COMPILING_IN_PYPY + if (source_is_dict) { + PyObject *key, *value; + if (unlikely(orig_length != PyDict_Size(iter_obj))) { + PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); + return -1; + } + if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) { + return 0; + } + if (pitem) { + PyObject* tuple = PyTuple_New(2); + if (unlikely(!tuple)) { + return -1; + } + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(tuple, 0, key); + PyTuple_SET_ITEM(tuple, 1, value); + *pitem = tuple; + } else { + if (pkey) { + Py_INCREF(key); + *pkey = key; + } + if (pvalue) { + Py_INCREF(value); + *pvalue = value; + } + } + return 1; + } else if (PyTuple_CheckExact(iter_obj)) { + Py_ssize_t pos = *ppos; + if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0; + *ppos = pos + 1; + next_item = PyTuple_GET_ITEM(iter_obj, pos); + Py_INCREF(next_item); + } else if (PyList_CheckExact(iter_obj)) { + Py_ssize_t pos = *ppos; + if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0; + *ppos = pos + 1; + next_item = PyList_GET_ITEM(iter_obj, pos); + Py_INCREF(next_item); + } else +#endif + { + next_item = PyIter_Next(iter_obj); + if (unlikely(!next_item)) { + return __Pyx_IterFinish(); + } + } + if (pitem) { + *pitem = next_item; + } else if (pkey && pvalue) { + if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1)) + return -1; + } else if (pkey) { + *pkey = next_item; + } else { + *pvalue = next_item; + } + return 1; +} + +/* ArgTypeTest */ +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + __Pyx_TypeName type_name; + __Pyx_TypeName obj_type_name; + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + type_name = __Pyx_PyType_GetName(type); + obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected " __Pyx_FMT_TYPENAME + ", got " __Pyx_FMT_TYPENAME ")", name, type_name, obj_type_name); + __Pyx_DECREF_TypeName(type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* py_dict_values */ +static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyDict_Type_values, d); + else + return PyDict_Values(d); +} + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *module = 0; + PyObject *empty_dict = 0; + PyObject *empty_list = 0; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (unlikely(!py_import)) + goto bad; + if (!from_list) { + empty_list = PyList_New(0); + if (unlikely(!empty_list)) + goto bad; + from_list = empty_list; + } + #endif + empty_dict = PyDict_New(); + if (unlikely(!empty_dict)) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, 1); + if (unlikely(!module)) { + if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (unlikely(!py_level)) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, level); + #endif + } + } +bad: + Py_XDECREF(empty_dict); + Py_XDECREF(empty_list); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + const char* module_name_str = 0; + PyObject* module_name = 0; + PyObject* module_dot = 0; + PyObject* full_name = 0; + PyErr_Clear(); + module_name_str = PyModule_GetName(module); + if (unlikely(!module_name_str)) { goto modbad; } + module_name = PyUnicode_FromString(module_name_str); + if (unlikely(!module_name)) { goto modbad; } + module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__6); + if (unlikely(!module_dot)) { goto modbad; } + full_name = PyUnicode_Concat(module_dot, name); + if (unlikely(!full_name)) { goto modbad; } + #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) + { + PyObject *modules = PyImport_GetModuleDict(); + if (unlikely(!modules)) + goto modbad; + value = PyObject_GetItem(modules, full_name); + } + #else + value = PyImport_GetModule(full_name); + #endif + modbad: + Py_XDECREF(full_name); + Py_XDECREF(module_dot); + Py_XDECREF(module_name); + } + if (unlikely(!value)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* FixUpExtensionType */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { +#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + CYTHON_UNUSED_VAR(spec); + CYTHON_UNUSED_VAR(type); +#else + const PyType_Slot *slot = spec->slots; + while (slot && slot->slot && slot->slot != Py_tp_members) + slot++; + if (slot && slot->slot == Py_tp_members) { + int changed = 0; +#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) + const +#endif + PyMemberDef *memb = (PyMemberDef*) slot->pfunc; + while (memb && memb->name) { + if (memb->name[0] == '_' && memb->name[1] == '_') { +#if PY_VERSION_HEX < 0x030900b1 + if (strcmp(memb->name, "__weaklistoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_weaklistoffset = memb->offset; + changed = 1; + } + else if (strcmp(memb->name, "__dictoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_dictoffset = memb->offset; + changed = 1; + } +#if CYTHON_METH_FASTCALL + else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); +#if PY_VERSION_HEX >= 0x030800b4 + type->tp_vectorcall_offset = memb->offset; +#else + type->tp_print = (printfunc) memb->offset; +#endif + changed = 1; + } +#endif +#else + if ((0)); +#endif +#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON + else if (strcmp(memb->name, "__module__") == 0) { + PyObject *descr; + assert(memb->type == T_OBJECT); + assert(memb->flags == 0 || memb->flags == READONLY); + descr = PyDescr_NewMember(type, memb); + if (unlikely(!descr)) + return -1; + if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { + Py_DECREF(descr); + return -1; + } + Py_DECREF(descr); + changed = 1; + } +#endif + } + memb++; + } + if (changed) + PyType_Modified(type); + } +#endif + return 0; +} +#endif + +/* FormatTypeName */ +#if CYTHON_COMPILING_IN_LIMITED_API +static __Pyx_TypeName +__Pyx_PyType_GetName(PyTypeObject* tp) +{ + PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, + __pyx_n_s_name); + if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { + PyErr_Clear(); + Py_XDECREF(name); + name = __Pyx_NewRef(__pyx_n_s__8); + } + return name; +} +#endif + +/* ValidateExternBase */ +static int __Pyx_validate_extern_base(PyTypeObject *base) { + Py_ssize_t itemsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_itemsize; +#endif +#if !CYTHON_COMPILING_IN_LIMITED_API + itemsize = ((PyTypeObject *)base)->tp_itemsize; +#else + py_itemsize = PyObject_GetAttrString((PyObject*)base, "__itemsize__"); + if (!py_itemsize) + return -1; + itemsize = PyLong_AsSsize_t(py_itemsize); + Py_DECREF(py_itemsize); + py_itemsize = 0; + if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) + return -1; +#endif + if (itemsize) { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(base); + PyErr_Format(PyExc_TypeError, + "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name); + __Pyx_DECREF_TypeName(b_name); + return -1; + } + return 0; +} + +/* ValidateBasesTuple */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { + Py_ssize_t i, n; +#if CYTHON_ASSUME_SAFE_MACROS + n = PyTuple_GET_SIZE(bases); +#else + n = PyTuple_Size(bases); + if (n < 0) return -1; +#endif + for (i = 1; i < n; i++) + { +#if CYTHON_AVOID_BORROWED_REFS + PyObject *b0 = PySequence_GetItem(bases, i); + if (!b0) return -1; +#elif CYTHON_ASSUME_SAFE_MACROS + PyObject *b0 = PyTuple_GET_ITEM(bases, i); +#else + PyObject *b0 = PyTuple_GetItem(bases, i); + if (!b0) return -1; +#endif + PyTypeObject *b; +#if PY_MAJOR_VERSION < 3 + if (PyClass_Check(b0)) + { + PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", + PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } +#endif + b = (PyTypeObject*) b0; + if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); + __Pyx_DECREF_TypeName(b_name); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + if (dictoffset == 0) + { + Py_ssize_t b_dictoffset = 0; +#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + b_dictoffset = b->tp_dictoffset; +#else + PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); + if (!py_b_dictoffset) goto dictoffset_return; + b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); + Py_DECREF(py_b_dictoffset); + if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; +#endif + if (b_dictoffset) { + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "extension type '%.200s' has no __dict__ slot, " + "but base type '" __Pyx_FMT_TYPENAME "' has: " + "either add 'cdef dict __dict__' to the extension type " + "or add '__slots__ = [...]' to the base type", + type_name, b_name); + __Pyx_DECREF_TypeName(b_name); + } +#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) + dictoffset_return: +#endif +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + } +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + } + return 0; +} +#endif + +/* PyType_Ready */ +static int __Pyx_PyType_Ready(PyTypeObject *t) { +#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) + (void)__Pyx_PyObject_CallMethod0; +#if CYTHON_USE_TYPE_SPECS + (void)__Pyx_validate_bases_tuple; +#endif + return PyType_Ready(t); +#else + int r; + PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); + if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) + return -1; +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + { + int gc_was_enabled; + #if PY_VERSION_HEX >= 0x030A00b1 + gc_was_enabled = PyGC_Disable(); + (void)__Pyx_PyObject_CallMethod0; + #else + PyObject *ret, *py_status; + PyObject *gc = NULL; + #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) + gc = PyImport_GetModule(__pyx_kp_u_gc); + #endif + if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); + if (unlikely(!gc)) return -1; + py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); + if (unlikely(!py_status)) { + Py_DECREF(gc); + return -1; + } + gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); + Py_DECREF(py_status); + if (gc_was_enabled > 0) { + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); + if (unlikely(!ret)) { + Py_DECREF(gc); + return -1; + } + Py_DECREF(ret); + } else if (unlikely(gc_was_enabled == -1)) { + Py_DECREF(gc); + return -1; + } + #endif + t->tp_flags |= Py_TPFLAGS_HEAPTYPE; +#if PY_VERSION_HEX >= 0x030A0000 + t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; +#endif +#else + (void)__Pyx_PyObject_CallMethod0; +#endif + r = PyType_Ready(t); +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; + #if PY_VERSION_HEX >= 0x030A00b1 + if (gc_was_enabled) + PyGC_Enable(); + #else + if (gc_was_enabled) { + PyObject *tp, *v, *tb; + PyErr_Fetch(&tp, &v, &tb); + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); + if (likely(ret || r == -1)) { + Py_XDECREF(ret); + PyErr_Restore(tp, v, tb); + } else { + Py_XDECREF(tp); + Py_XDECREF(v); + Py_XDECREF(tb); + r = -1; + } + } + Py_DECREF(gc); + #endif + } +#endif + return r; +#endif +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, attr_name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(attr_name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetVTable */ +static int __Pyx_SetVtable(PyTypeObject *type, void *vtable) { + PyObject *ob = PyCapsule_New(vtable, 0, 0); + if (unlikely(!ob)) + goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(PyObject_SetAttr((PyObject *) type, __pyx_n_s_pyx_vtable, ob) < 0)) +#else + if (unlikely(PyDict_SetItem(type->tp_dict, __pyx_n_s_pyx_vtable, ob) < 0)) +#endif + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* GetVTable */ +static void* __Pyx_GetVtable(PyTypeObject *type) { + void* ptr; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *ob = PyObject_GetAttr((PyObject *)type, __pyx_n_s_pyx_vtable); +#else + PyObject *ob = PyObject_GetItem(type->tp_dict, __pyx_n_s_pyx_vtable); +#endif + if (!ob) + goto bad; + ptr = PyCapsule_GetPointer(ob, 0); + if (!ptr && !PyErr_Occurred()) + PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); + Py_DECREF(ob); + return ptr; +bad: + Py_XDECREF(ob); + return NULL; +} + +/* MergeVTables */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_MergeVtables(PyTypeObject *type) { + int i; + void** base_vtables; + __Pyx_TypeName tp_base_name; + __Pyx_TypeName base_name; + void* unknown = (void*)-1; + PyObject* bases = type->tp_bases; + int base_depth = 0; + { + PyTypeObject* base = type->tp_base; + while (base) { + base_depth += 1; + base = base->tp_base; + } + } + base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1)); + base_vtables[0] = unknown; + for (i = 1; i < PyTuple_GET_SIZE(bases); i++) { + void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))); + if (base_vtable != NULL) { + int j; + PyTypeObject* base = type->tp_base; + for (j = 0; j < base_depth; j++) { + if (base_vtables[j] == unknown) { + base_vtables[j] = __Pyx_GetVtable(base); + base_vtables[j + 1] = unknown; + } + if (base_vtables[j] == base_vtable) { + break; + } else if (base_vtables[j] == NULL) { + goto bad; + } + base = base->tp_base; + } + } + } + PyErr_Clear(); + free(base_vtables); + return 0; +bad: + tp_base_name = __Pyx_PyType_GetName(type->tp_base); + base_name = __Pyx_PyType_GetName((PyTypeObject*)PyTuple_GET_ITEM(bases, i)); + PyErr_Format(PyExc_TypeError, + "multiple bases have vtable conflict: '" __Pyx_FMT_TYPENAME "' and '" __Pyx_FMT_TYPENAME "'", tp_base_name, base_name); + __Pyx_DECREF_TypeName(tp_base_name); + __Pyx_DECREF_TypeName(base_name); + free(base_vtables); + return -1; +} +#endif + +/* SetupReduce */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStrNoError(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_getstate = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; + PyObject *getstate = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); +#else + getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); + if (!getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (getstate) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); +#else + object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); + if (!object_getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (object_getstate != getstate) { + goto __PYX_GOOD; + } + } +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + goto __PYX_BAD; + } + setstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; +__PYX_BAD: + if (!PyErr_Occurred()) { + __Pyx_TypeName type_obj_name = + __Pyx_PyType_GetName((PyTypeObject*)type_obj); + PyErr_Format(PyExc_RuntimeError, + "Unable to initialize pickling for " __Pyx_FMT_TYPENAME, type_obj_name); + __Pyx_DECREF_TypeName(type_obj_name); + } + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); + Py_XDECREF(object_getstate); + Py_XDECREF(getstate); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} +#endif + +/* FetchSharedCythonModule */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void) { + return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); +} + +/* FetchCommonType */ +static int __Pyx_VerifyCachedType(PyObject *cached_type, + const char *name, + Py_ssize_t basicsize, + Py_ssize_t expected_basicsize) { + if (!PyType_Check(cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", name); + return -1; + } + if (basicsize != expected_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + name); + return -1; + } + return 0; +} +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { + PyObject* abi_module; + const char* object_name; + PyTypeObject *cached_type = NULL; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + object_name = strrchr(type->tp_name, '.'); + object_name = object_name ? object_name+1 : type->tp_name; + cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + if (__Pyx_VerifyCachedType( + (PyObject *)cached_type, + object_name, + cached_type->tp_basicsize, + type->tp_basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; +done: + Py_DECREF(abi_module); + return cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#else +static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { + PyObject *abi_module, *cached_type = NULL; + const char* object_name = strrchr(spec->name, '.'); + object_name = object_name ? object_name+1 : spec->name; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + cached_type = PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + Py_ssize_t basicsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_basicsize; + py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); + if (unlikely(!py_basicsize)) goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; +#else + basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; +#endif + if (__Pyx_VerifyCachedType( + cached_type, + object_name, + basicsize, + spec->basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + CYTHON_UNUSED_VAR(module); + cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); + if (unlikely(!cached_type)) goto bad; + if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; +done: + Py_DECREF(abi_module); + assert(cached_type == NULL || PyType_Check(cached_type)); + return (PyTypeObject *) cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#endif + +/* PyVectorcallFastCallDict */ +#if CYTHON_METH_FASTCALL +static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + PyObject *res = NULL; + PyObject *kwnames; + PyObject **newargs; + PyObject **kwvalues; + Py_ssize_t i, pos; + size_t j; + PyObject *key, *value; + unsigned long keys_are_strings; + Py_ssize_t nkw = PyDict_GET_SIZE(kw); + newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); + if (unlikely(newargs == NULL)) { + PyErr_NoMemory(); + return NULL; + } + for (j = 0; j < nargs; j++) newargs[j] = args[j]; + kwnames = PyTuple_New(nkw); + if (unlikely(kwnames == NULL)) { + PyMem_Free(newargs); + return NULL; + } + kwvalues = newargs + nargs; + pos = i = 0; + keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; + while (PyDict_Next(kw, &pos, &key, &value)) { + keys_are_strings &= Py_TYPE(key)->tp_flags; + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(kwnames, i, key); + kwvalues[i] = value; + i++; + } + if (unlikely(!keys_are_strings)) { + PyErr_SetString(PyExc_TypeError, "keywords must be strings"); + goto cleanup; + } + res = vc(func, newargs, nargs, kwnames); +cleanup: + Py_DECREF(kwnames); + for (i = 0; i < nkw; i++) + Py_DECREF(kwvalues[i]); + PyMem_Free(newargs); + return res; +} +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { + return vc(func, args, nargs, NULL); + } + return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); +} +#endif + +/* CythonFunctionShared */ +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + if (__Pyx_CyFunction_Check(func)) { + return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; + } else if (PyCFunction_Check(func)) { + return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; + } + return 0; +} +#else +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +} +#endif +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + __Pyx_Py_XDECREF_SET( + __Pyx_CyFunction_GetClassObj(f), + ((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#else + __Pyx_Py_XDECREF_SET( + ((PyCMethodObject *) (f))->mm_class, + (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#endif +} +static PyObject * +__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) +{ + CYTHON_UNUSED_VAR(closure); + if (unlikely(op->func_doc == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); + if (unlikely(!op->func_doc)) return NULL; +#else + if (((PyCFunctionObject*)op)->m_ml->ml_doc) { +#if PY_MAJOR_VERSION >= 3 + op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#else + op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#endif + if (unlikely(op->func_doc == NULL)) + return NULL; + } else { + Py_INCREF(Py_None); + return Py_None; + } +#endif + } + Py_INCREF(op->func_doc); + return op->func_doc; +} +static int +__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (value == NULL) { + value = Py_None; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_doc, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_name == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_name = PyObject_GetAttrString(op->func, "__name__"); +#elif PY_MAJOR_VERSION >= 3 + op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#else + op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#endif + if (unlikely(op->func_name == NULL)) + return NULL; + } + Py_INCREF(op->func_name); + return op->func_name; +} +static int +__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_name, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_qualname); + return op->func_qualname; +} +static int +__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_qualname, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_dict == NULL)) { + op->func_dict = PyDict_New(); + if (unlikely(op->func_dict == NULL)) + return NULL; + } + Py_INCREF(op->func_dict); + return op->func_dict; +} +static int +__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(value == NULL)) { + PyErr_SetString(PyExc_TypeError, + "function's dictionary may not be deleted"); + return -1; + } + if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "setting function's dictionary to a non-dict"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_dict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_globals); + return op->func_globals; +} +static PyObject * +__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(op); + CYTHON_UNUSED_VAR(context); + Py_INCREF(Py_None); + return Py_None; +} +static PyObject * +__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) +{ + PyObject* result = (op->func_code) ? op->func_code : Py_None; + CYTHON_UNUSED_VAR(context); + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { + int result = 0; + PyObject *res = op->defaults_getter((PyObject *) op); + if (unlikely(!res)) + return -1; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + op->defaults_tuple = PyTuple_GET_ITEM(res, 0); + Py_INCREF(op->defaults_tuple); + op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); + Py_INCREF(op->defaults_kwdict); + #else + op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); + if (unlikely(!op->defaults_tuple)) result = -1; + else { + op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); + if (unlikely(!op->defaults_kwdict)) result = -1; + } + #endif + Py_DECREF(res); + return result; +} +static int +__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__defaults__ must be set to a tuple object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_tuple; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_tuple; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__kwdefaults__ must be set to a dict object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_kwdict; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_kwdict; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value || value == Py_None) { + value = NULL; + } else if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__annotations__ must be set to a dict object"); + return -1; + } + Py_XINCREF(value); + __Pyx_Py_XDECREF_SET(op->func_annotations, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->func_annotations; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + result = PyDict_New(); + if (unlikely(!result)) return NULL; + op->func_annotations = result; + } + Py_INCREF(result); + return result; +} +static PyObject * +__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { + int is_coroutine; + CYTHON_UNUSED_VAR(context); + if (op->func_is_coroutine) { + return __Pyx_NewRef(op->func_is_coroutine); + } + is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; +#if PY_VERSION_HEX >= 0x03050000 + if (is_coroutine) { + PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; + fromlist = PyList_New(1); + if (unlikely(!fromlist)) return NULL; + Py_INCREF(marker); +#if CYTHON_ASSUME_SAFE_MACROS + PyList_SET_ITEM(fromlist, 0, marker); +#else + if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { + Py_DECREF(marker); + Py_DECREF(fromlist); + return NULL; + } +#endif + module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); + Py_DECREF(fromlist); + if (unlikely(!module)) goto ignore; + op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); + Py_DECREF(module); + if (likely(op->func_is_coroutine)) { + return __Pyx_NewRef(op->func_is_coroutine); + } +ignore: + PyErr_Clear(); + } +#endif + op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); + return __Pyx_NewRef(op->func_is_coroutine); +} +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject * +__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_GetAttrString(op->func, "__module__"); +} +static int +__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_SetAttrString(op->func, "__module__", value); +} +#endif +static PyGetSetDef __pyx_CyFunction_getsets[] = { + {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, + {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, + {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, + {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, +#if CYTHON_COMPILING_IN_LIMITED_API + {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, +#endif + {0, 0, 0, 0, 0} +}; +static PyMemberDef __pyx_CyFunction_members[] = { +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, +#endif +#if CYTHON_USE_TYPE_SPECS + {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, +#if CYTHON_METH_FASTCALL +#if CYTHON_BACKPORT_VECTORCALL + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, +#else +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, +#endif +#endif +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, +#else + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, +#endif +#endif + {0, 0, 0, 0, 0} +}; +static PyObject * +__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) +{ + CYTHON_UNUSED_VAR(args); +#if PY_MAJOR_VERSION >= 3 + Py_INCREF(m->func_qualname); + return m->func_qualname; +#else + return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); +#endif +} +static PyMethodDef __pyx_CyFunction_methods[] = { + {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, + {0, 0, 0, 0} +}; +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) +#else +#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) +#endif +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { +#if !CYTHON_COMPILING_IN_LIMITED_API + PyCFunctionObject *cf = (PyCFunctionObject*) op; +#endif + if (unlikely(op == NULL)) + return NULL; +#if CYTHON_COMPILING_IN_LIMITED_API + op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); + if (unlikely(!op->func)) return NULL; +#endif + op->flags = flags; + __Pyx_CyFunction_weakreflist(op) = NULL; +#if !CYTHON_COMPILING_IN_LIMITED_API + cf->m_ml = ml; + cf->m_self = (PyObject *) op; +#endif + Py_XINCREF(closure); + op->func_closure = closure; +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_XINCREF(module); + cf->m_module = module; +#endif + op->func_dict = NULL; + op->func_name = NULL; + Py_INCREF(qualname); + op->func_qualname = qualname; + op->func_doc = NULL; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + op->func_classobj = NULL; +#else + ((PyCMethodObject*)op)->mm_class = NULL; +#endif + op->func_globals = globals; + Py_INCREF(op->func_globals); + Py_XINCREF(code); + op->func_code = code; + op->defaults_pyobjects = 0; + op->defaults_size = 0; + op->defaults = NULL; + op->defaults_tuple = NULL; + op->defaults_kwdict = NULL; + op->defaults_getter = NULL; + op->func_annotations = NULL; + op->func_is_coroutine = NULL; +#if CYTHON_METH_FASTCALL + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { + case METH_NOARGS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; + break; + case METH_O: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; + break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; + break; + case METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; + break; + case METH_VARARGS | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = NULL; + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + Py_DECREF(op); + return NULL; + } +#endif + return (PyObject *) op; +} +static int +__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) +{ + Py_CLEAR(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_CLEAR(m->func); +#else + Py_CLEAR(((PyCFunctionObject*)m)->m_module); +#endif + Py_CLEAR(m->func_dict); + Py_CLEAR(m->func_name); + Py_CLEAR(m->func_qualname); + Py_CLEAR(m->func_doc); + Py_CLEAR(m->func_globals); + Py_CLEAR(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API +#if PY_VERSION_HEX < 0x030900B1 + Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); +#else + { + PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; + ((PyCMethodObject *) (m))->mm_class = NULL; + Py_XDECREF(cls); + } +#endif +#endif + Py_CLEAR(m->defaults_tuple); + Py_CLEAR(m->defaults_kwdict); + Py_CLEAR(m->func_annotations); + Py_CLEAR(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_XDECREF(pydefaults[i]); + PyObject_Free(m->defaults); + m->defaults = NULL; + } + return 0; +} +static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + if (__Pyx_CyFunction_weakreflist(m) != NULL) + PyObject_ClearWeakRefs((PyObject *) m); + __Pyx_CyFunction_clear(m); + __Pyx_PyHeapTypeObject_GC_Del(m); +} +static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + PyObject_GC_UnTrack(m); + __Pyx__CyFunction_dealloc(m); +} +static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) +{ + Py_VISIT(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(m->func); +#else + Py_VISIT(((PyCFunctionObject*)m)->m_module); +#endif + Py_VISIT(m->func_dict); + Py_VISIT(m->func_name); + Py_VISIT(m->func_qualname); + Py_VISIT(m->func_doc); + Py_VISIT(m->func_globals); + Py_VISIT(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); +#endif + Py_VISIT(m->defaults_tuple); + Py_VISIT(m->defaults_kwdict); + Py_VISIT(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_VISIT(pydefaults[i]); + } + return 0; +} +static PyObject* +__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) +{ +#if PY_MAJOR_VERSION >= 3 + return PyUnicode_FromFormat("", + op->func_qualname, (void *)op); +#else + return PyString_FromFormat("", + PyString_AsString(op->func_qualname), (void *)op); +#endif +} +static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *f = ((__pyx_CyFunctionObject*)func)->func; + PyObject *py_name = NULL; + PyCFunction meth; + int flags; + meth = PyCFunction_GetFunction(f); + if (unlikely(!meth)) return NULL; + flags = PyCFunction_GetFlags(f); + if (unlikely(flags < 0)) return NULL; +#else + PyCFunctionObject* f = (PyCFunctionObject*)func; + PyCFunction meth = f->m_ml->ml_meth; + int flags = f->m_ml->ml_flags; +#endif + Py_ssize_t size; + switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { + case METH_VARARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) + return (*meth)(self, arg); + break; + case METH_VARARGS | METH_KEYWORDS: + return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); + case METH_NOARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 0)) + return (*meth)(self, NULL); +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + case METH_O: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 1)) { + PyObject *result, *arg0; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + arg0 = PyTuple_GET_ITEM(arg, 0); + #else + arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; + #endif + result = (*meth)(self, arg0); + #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(arg0); + #endif + return result; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + return NULL; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", + py_name); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", + f->m_ml->ml_name); +#endif + return NULL; +} +static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *self, *result; +#if CYTHON_COMPILING_IN_LIMITED_API + self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); + if (unlikely(!self) && PyErr_Occurred()) return NULL; +#else + self = ((PyCFunctionObject*)func)->m_self; +#endif + result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); + return result; +} +static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { + PyObject *result; + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; +#if CYTHON_METH_FASTCALL + __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); + if (vc) { +#if CYTHON_ASSUME_SAFE_MACROS + return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); +#else + (void) &__Pyx_PyVectorcall_FastCallDict; + return PyVectorcall_Call(func, args, kw); +#endif + } +#endif + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + Py_ssize_t argc; + PyObject *new_args; + PyObject *self; +#if CYTHON_ASSUME_SAFE_MACROS + argc = PyTuple_GET_SIZE(args); +#else + argc = PyTuple_Size(args); + if (unlikely(!argc) < 0) return NULL; +#endif + new_args = PyTuple_GetSlice(args, 1, argc); + if (unlikely(!new_args)) + return NULL; + self = PyTuple_GetItem(args, 0); + if (unlikely(!self)) { + Py_DECREF(new_args); +#if PY_MAJOR_VERSION > 2 + PyErr_Format(PyExc_TypeError, + "unbound method %.200S() needs an argument", + cyfunc->func_qualname); +#else + PyErr_SetString(PyExc_TypeError, + "unbound method needs an argument"); +#endif + return NULL; + } + result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); + Py_DECREF(new_args); + } else { + result = __Pyx_CyFunction_Call(func, args, kw); + } + return result; +} +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) +{ + int ret = 0; + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + if (unlikely(nargs < 1)) { + PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", + ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + ret = 1; + } + if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + return ret; +} +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 0)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, NULL); +} +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 1)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, args[0]); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; + PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); +} +#endif +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_CyFunctionType_slots[] = { + {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, + {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, + {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, + {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, + {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, + {Py_tp_methods, (void *)__pyx_CyFunction_methods}, + {Py_tp_members, (void *)__pyx_CyFunction_members}, + {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, + {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, + {0, 0}, +}; +static PyType_Spec __pyx_CyFunctionType_spec = { + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + __pyx_CyFunctionType_slots +}; +#else +static PyTypeObject __pyx_CyFunctionType_type = { + PyVarObject_HEAD_INIT(0, 0) + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, + (destructor) __Pyx_CyFunction_dealloc, +#if !CYTHON_METH_FASTCALL + 0, +#elif CYTHON_BACKPORT_VECTORCALL + (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), +#else + offsetof(PyCFunctionObject, vectorcall), +#endif + 0, + 0, +#if PY_MAJOR_VERSION < 3 + 0, +#else + 0, +#endif + (reprfunc) __Pyx_CyFunction_repr, + 0, + 0, + 0, + 0, + __Pyx_CyFunction_CallAsMethod, + 0, + 0, + 0, + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + 0, + (traverseproc) __Pyx_CyFunction_traverse, + (inquiry) __Pyx_CyFunction_clear, + 0, +#if PY_VERSION_HEX < 0x030500A0 + offsetof(__pyx_CyFunctionObject, func_weakreflist), +#else + offsetof(PyCFunctionObject, m_weakreflist), +#endif + 0, + 0, + __pyx_CyFunction_methods, + __pyx_CyFunction_members, + __pyx_CyFunction_getsets, + 0, + 0, + __Pyx_PyMethod_New, + 0, + offsetof(__pyx_CyFunctionObject, func_dict), + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +#if PY_VERSION_HEX >= 0x030400a1 + 0, +#endif +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, +#endif +#if __PYX_NEED_TP_PRINT_SLOT + 0, +#endif +#if PY_VERSION_HEX >= 0x030C0000 + 0, +#endif +#if PY_VERSION_HEX >= 0x030d00A4 + 0, +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, +#endif +}; +#endif +static int __pyx_CyFunction_init(PyObject *module) { +#if CYTHON_USE_TYPE_SPECS + __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); +#else + CYTHON_UNUSED_VAR(module); + __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); +#endif + if (unlikely(__pyx_CyFunctionType == NULL)) { + return -1; + } + return 0; +} +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults = PyObject_Malloc(size); + if (unlikely(!m->defaults)) + return PyErr_NoMemory(); + memset(m->defaults, 0, size); + m->defaults_pyobjects = pyobjects; + m->defaults_size = size; + return m->defaults; +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_tuple = tuple; + Py_INCREF(tuple); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_kwdict = dict; + Py_INCREF(dict); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->func_annotations = dict; + Py_INCREF(dict); +} + +/* CythonFunction */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + PyObject *op = __Pyx_CyFunction_Init( + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + PyObject_GC_Track(op); + } + return op; +} + +/* ClassMethod */ +static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { +#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM <= 0x05080000 + if (PyObject_TypeCheck(method, &PyWrapperDescr_Type)) { + return PyClassMethod_New(method); + } +#else +#if CYTHON_COMPILING_IN_PYPY + if (PyMethodDescr_Check(method)) +#else + #if PY_MAJOR_VERSION == 2 + static PyTypeObject *methoddescr_type = NULL; + if (unlikely(methoddescr_type == NULL)) { + PyObject *meth = PyObject_GetAttrString((PyObject*)&PyList_Type, "append"); + if (unlikely(!meth)) return NULL; + methoddescr_type = Py_TYPE(meth); + Py_DECREF(meth); + } + #else + PyTypeObject *methoddescr_type = &PyMethodDescr_Type; + #endif + if (__Pyx_TypeCheck(method, methoddescr_type)) +#endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject *)method; + #if PY_VERSION_HEX < 0x03020000 + PyTypeObject *d_type = descr->d_type; + #else + PyTypeObject *d_type = descr->d_common.d_type; + #endif + return PyDescr_NewClassMethod(d_type, descr->d_method); + } +#endif + else if (PyMethod_Check(method)) { + return PyClassMethod_New(PyMethod_GET_FUNCTION(method)); + } + else { + return PyClassMethod_New(method); + } +} + +/* GetNameInClass */ +static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name) { + PyObject *result; + PyObject *dict; + assert(PyType_Check(nmspace)); +#if CYTHON_USE_TYPE_SLOTS + dict = ((PyTypeObject*)nmspace)->tp_dict; + Py_XINCREF(dict); +#else + dict = PyObject_GetAttr(nmspace, __pyx_n_s_dict); +#endif + if (likely(dict)) { + result = PyObject_GetItem(dict, name); + Py_DECREF(dict); + if (result) { + return result; + } + } + PyErr_Clear(); + __Pyx_GetModuleGlobalNameUncached(result, name); + return result; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + CYTHON_MAYBE_UNUSED_VAR(tstate); + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} +#endif + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, + PyObject *firstlineno, PyObject *name) { + PyObject *replace = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; + replace = PyObject_GetAttrString(code, "replace"); + if (likely(replace)) { + PyObject *result; + result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); + Py_DECREF(replace); + return result; + } + PyErr_Clear(); + #if __PYX_LIMITED_VERSION_HEX < 0x030780000 + { + PyObject *compiled = NULL, *result = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; + compiled = Py_CompileString( + "out = type(code)(\n" + " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" + " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" + " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" + " code.co_lnotab)\n", "", Py_file_input); + if (!compiled) return NULL; + result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); + Py_DECREF(compiled); + if (!result) PyErr_Print(); + Py_DECREF(result); + result = PyDict_GetItemString(scratch_dict, "out"); + if (result) Py_INCREF(result); + return result; + } + #else + return NULL; + #endif +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; + PyObject *replace = NULL, *getframe = NULL, *frame = NULL; + PyObject *exc_type, *exc_value, *exc_traceback; + int success = 0; + if (c_line) { + (void) __pyx_cfilenm; + (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); + } + PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); + code_object = Py_CompileString("_getframe()", filename, Py_eval_input); + if (unlikely(!code_object)) goto bad; + py_py_line = PyLong_FromLong(py_line); + if (unlikely(!py_py_line)) goto bad; + py_funcname = PyUnicode_FromString(funcname); + if (unlikely(!py_funcname)) goto bad; + dict = PyDict_New(); + if (unlikely(!dict)) goto bad; + { + PyObject *old_code_object = code_object; + code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); + Py_DECREF(old_code_object); + } + if (unlikely(!code_object)) goto bad; + getframe = PySys_GetObject("_getframe"); + if (unlikely(!getframe)) goto bad; + if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; + frame = PyEval_EvalCode(code_object, dict, dict); + if (unlikely(!frame) || frame == Py_None) goto bad; + success = 1; + bad: + PyErr_Restore(exc_type, exc_value, exc_traceback); + Py_XDECREF(code_object); + Py_XDECREF(py_py_line); + Py_XDECREF(py_funcname); + Py_XDECREF(dict); + Py_XDECREF(replace); + if (success) { + PyTraceBack_Here( + (struct _frame*)frame); + } + Py_XDECREF(frame); +} +#else +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject *ptype, *pvalue, *ptraceback; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) { + /* If the code object creation fails, then we should clear the + fetched exception references and propagate the new exception */ + Py_XDECREF(ptype); + Py_XDECREF(pvalue); + Py_XDECREF(ptraceback); + goto bad; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} +#endif + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(long) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(long) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(long) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + long val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (long) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (long) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (long) -1; + } else { + stepval = v; + } + v = NULL; + val = (long) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((long) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((long) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (long) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG) -1, const_zero = (unsigned PY_LONG_LONG) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(unsigned PY_LONG_LONG) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(unsigned PY_LONG_LONG) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(unsigned PY_LONG_LONG), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(unsigned PY_LONG_LONG)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(int) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(int) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(int) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + int val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (int) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (int) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (int) -1; + } else { + stepval = v; + } + v = NULL; + val = (int) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((int) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((int) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (int) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (cls == a || cls == b) return 1; + mro = cls->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + PyObject *base = PyTuple_GET_ITEM(mro, i); + if (base == (PyObject *)a || base == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + if (exc_type1) { + return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); + } else { + return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030B00A4 + return Py_Version & ~0xFFUL; +#else + const char* rt_version = Py_GetVersion(); + unsigned long version = 0; + unsigned long factor = 0x01000000UL; + unsigned int digit = 0; + int i = 0; + while (factor) { + while ('0' <= rt_version[i] && rt_version[i] <= '9') { + digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); + ++i; + } + version += factor * digit; + if (rt_version[i] != '.') + break; + digit = 0; + factor >>= 8; + ++i; + } + return version; +#endif +} +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { + const unsigned long MAJOR_MINOR = 0xFFFF0000UL; + if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) + return 0; + if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) + return 1; + { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compile time Python version %d.%d " + "of module '%.100s' " + "%s " + "runtime version %d.%d", + (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), + __Pyx_MODULE_NAME, + (allow_newer) ? "was newer than" : "does not match", + (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) + ); + return PyErr_WarnEx(NULL, message, 1); + } +} + +/* FunctionExport */ +static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + d = PyObject_GetAttrString(__pyx_m, (char *)"__pyx_capi__"); + if (!d) { + PyErr_Clear(); + d = PyDict_New(); + if (!d) + goto bad; + Py_INCREF(d); + if (PyModule_AddObject(__pyx_m, (char *)"__pyx_capi__", d) < 0) + goto bad; + } + tmp.fp = f; + cobj = PyCapsule_New(tmp.p, sig, 0); + if (!cobj) + goto bad; + if (PyDict_SetItemString(d, name, cobj) < 0) + goto bad; + Py_DECREF(cobj); + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(cobj); + Py_XDECREF(d); + return -1; +} + +/* InitStrings */ +#if PY_MAJOR_VERSION >= 3 +static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { + if (t.is_unicode | t.is_str) { + if (t.intern) { + *str = PyUnicode_InternFromString(t.s); + } else if (t.encoding) { + *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); + } else { + *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); + } + } else { + *str = PyBytes_FromStringAndSize(t.s, t.n - 1); + } + if (!*str) + return -1; + if (PyObject_Hash(*str) == -1) + return -1; + return 0; +} +#endif +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION >= 3 + __Pyx_InitString(*t, t->p); + #else + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + #endif + ++t; + } + return 0; +} + +#include +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { + size_t len = strlen(s); + if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { + PyErr_SetString(PyExc_OverflowError, "byte string is too long"); + return -1; + } + return (Py_ssize_t) len; +} +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return __Pyx_PyUnicode_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return PyByteArray_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { + __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " + "The ability to return an instance of a strict subclass of int is deprecated, " + "and may be removed in a future version of Python.", + result_type_name)) { + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; + } + __Pyx_DECREF_TypeName(result_type_name); + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", + type_name, type_name, result_type_name); + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(__Pyx_PyLong_IsCompact(b))) { + return __Pyx_PyLong_CompactValue(b); + } else { + const digit* digits = __Pyx_PyLong_Digits(b); + const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +/* #### Code section: utility_code_pragmas_end ### */ +#ifdef _MSC_VER +#pragma warning( pop ) +#endif + + + +/* #### Code section: end ### */ +#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/util/_immutabledict_cy.c b/lib/sqlalchemy/util/_immutabledict_cy.c new file mode 100644 index 00000000000..1290b9ec844 --- /dev/null +++ b/lib/sqlalchemy/util/_immutabledict_cy.c @@ -0,0 +1,15840 @@ +/* Generated by Cython 3.0.11 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "name": "sqlalchemy.util._immutabledict_cy", + "sources": [ + "lib/sqlalchemy/util/_immutabledict_cy.py" + ] + }, + "module_name": "sqlalchemy.util._immutabledict_cy" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#if defined(CYTHON_LIMITED_API) && 0 + #ifndef Py_LIMITED_API + #if CYTHON_LIMITED_API+0 > 0x03030000 + #define Py_LIMITED_API CYTHON_LIMITED_API + #else + #define Py_LIMITED_API 0x03030000 + #endif + #endif +#endif + +#include "Python.h" + + #if PY_MAJOR_VERSION <= 2 + #define PyDict_GetItemWithError _PyDict_GetItemWithError + #endif + +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.7+ or Python 3.3+. +#else +#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API +#define __PYX_EXTRA_ABI_MODULE_NAME "limited" +#else +#define __PYX_EXTRA_ABI_MODULE_NAME "" +#endif +#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME +#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI +#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." +#define CYTHON_HEX_VERSION 0x03000BF0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #define HAVE_LONG_LONG +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX +#if defined(GRAALVM_PYTHON) + /* For very preliminary testing purposes. Most variables are set the same as PyPy. + The existence of this section does not imply that anything works or is even tested */ + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 1 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(PYPY_VERSION) + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) + #endif + #if PY_VERSION_HEX < 0x03090000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(CYTHON_LIMITED_API) + #ifdef Py_LIMITED_API + #undef __PYX_LIMITED_VERSION_HEX + #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API + #endif + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 1 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #undef CYTHON_CLINE_IN_TRACEBACK + #define CYTHON_CLINE_IN_TRACEBACK 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 1 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #endif + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #undef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 1 + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 + #endif + #undef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 +#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #ifndef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL 0 + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL 1 + #endif + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 0 + #endif +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #define CYTHON_COMPILING_IN_LIMITED_API 0 + #define CYTHON_COMPILING_IN_GRAAL 0 + #define CYTHON_COMPILING_IN_NOGIL 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #ifndef CYTHON_USE_TYPE_SPECS + #define CYTHON_USE_TYPE_SPECS 0 + #endif + #ifndef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #ifndef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_GIL + #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) + #endif + #ifndef CYTHON_METH_FASTCALL + #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP487_INIT_SUBCLASS + #define CYTHON_PEP487_INIT_SUBCLASS 1 + #endif + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) + #define CYTHON_PEP489_MULTI_PHASE_INIT 1 + #endif + #ifndef CYTHON_USE_MODULE_STATE + #define CYTHON_USE_MODULE_STATE 0 + #endif + #if PY_VERSION_HEX < 0x030400a1 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #elif !defined(CYTHON_USE_TP_FINALIZE) + #define CYTHON_USE_TP_FINALIZE 1 + #endif + #if PY_VERSION_HEX < 0x030600B1 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #elif !defined(CYTHON_USE_DICT_VERSIONS) + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) + #endif + #if PY_VERSION_HEX < 0x030700A3 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK 1 + #endif + #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC + #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 + #endif + #ifndef CYTHON_USE_FREELISTS + #define CYTHON_USE_FREELISTS 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if !defined(CYTHON_VECTORCALL) +#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) +#endif +#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(maybe_unused) + #define CYTHON_UNUSED [[maybe_unused]] + #endif + #endif + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR + #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_USE_CPP_STD_MOVE + #if defined(__cplusplus) && (\ + __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) + #define CYTHON_USE_CPP_STD_MOVE 1 + #else + #define CYTHON_USE_CPP_STD_MOVE 0 + #endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + #endif + #endif + #if _MSC_VER < 1300 + #ifdef _WIN64 + typedef unsigned long long __pyx_uintptr_t; + #else + typedef unsigned int __pyx_uintptr_t; + #endif + #else + #ifdef _WIN64 + typedef unsigned __int64 __pyx_uintptr_t; + #else + typedef unsigned __int32 __pyx_uintptr_t; + #endif + #endif +#else + #include + typedef uintptr_t __pyx_uintptr_t; +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) + /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 + * but leads to warnings with -pedantic, since it is a C++17 feature */ + #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif +#ifdef __cplusplus + template + struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; + #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) +#else + #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) +#endif +#if CYTHON_COMPILING_IN_PYPY == 1 + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) +#else + #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) +#endif +#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_DefaultClassType PyClass_Type + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if CYTHON_COMPILING_IN_LIMITED_API + static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *exception_table = NULL; + PyObject *types_module=NULL, *code_type=NULL, *result=NULL; + #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 + PyObject *version_info; + PyObject *py_minor_version = NULL; + #endif + long minor_version = 0; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 + minor_version = 11; + #else + if (!(version_info = PySys_GetObject("version_info"))) goto end; + if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; + minor_version = PyLong_AsLong(py_minor_version); + Py_DECREF(py_minor_version); + if (minor_version == -1 && PyErr_Occurred()) goto end; + #endif + if (!(types_module = PyImport_ImportModule("types"))) goto end; + if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; + if (minor_version <= 7) { + (void)p; + result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else if (minor_version <= 10) { + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, fline, lnos, fv, cell); + } else { + if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; + result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, + c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); + } + end: + Py_XDECREF(code_type); + Py_XDECREF(exception_table); + Py_XDECREF(types_module); + if (type) { + PyErr_Restore(type, value, traceback); + } + return result; + } + #ifndef CO_OPTIMIZED + #define CO_OPTIMIZED 0x0001 + #endif + #ifndef CO_NEWLOCALS + #define CO_NEWLOCALS 0x0002 + #endif + #ifndef CO_VARARGS + #define CO_VARARGS 0x0004 + #endif + #ifndef CO_VARKEYWORDS + #define CO_VARKEYWORDS 0x0008 + #endif + #ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x0200 + #endif + #ifndef CO_GENERATOR + #define CO_GENERATOR 0x0020 + #endif + #ifndef CO_COROUTINE + #define CO_COROUTINE 0x0080 + #endif +#elif PY_VERSION_HEX >= 0x030B0000 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyCodeObject *result; + PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); + if (!empty_bytes) return NULL; + result = + #if PY_VERSION_HEX >= 0x030C0000 + PyUnstable_Code_NewWithPosOnlyArgs + #else + PyCode_NewWithPosOnlyArgs + #endif + (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); + Py_DECREF(empty_bytes); + return result; + } +#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#endif +#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) + #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) +#else + #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) + #define __Pyx_Py_Is(x, y) Py_Is(x, y) +#else + #define __Pyx_Py_Is(x, y) ((x) == (y)) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) + #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) +#else + #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) + #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) +#else + #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) +#endif +#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) + #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) +#else + #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) +#endif +#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) +#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) +#else + #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) +#endif +#ifndef CO_COROUTINE + #define CO_COROUTINE 0x80 +#endif +#ifndef CO_ASYNC_GENERATOR + #define CO_ASYNC_GENERATOR 0x200 +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef Py_TPFLAGS_SEQUENCE + #define Py_TPFLAGS_SEQUENCE 0 +#endif +#ifndef Py_TPFLAGS_MAPPING + #define Py_TPFLAGS_MAPPING 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #if PY_VERSION_HEX >= 0x030d00A4 + # define __Pyx_PyCFunctionFast PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords + #else + # define __Pyx_PyCFunctionFast _PyCFunctionFast + # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords + #endif +#endif +#if CYTHON_METH_FASTCALL + #define __Pyx_METH_FASTCALL METH_FASTCALL + #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast + #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords +#else + #define __Pyx_METH_FASTCALL METH_VARARGS + #define __Pyx_PyCFunction_FastCall PyCFunction + #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords +#endif +#if CYTHON_VECTORCALL + #define __pyx_vectorcallfunc vectorcallfunc + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET + #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) +#elif CYTHON_BACKPORT_VECTORCALL + typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames); + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) +#else + #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 + #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) +#endif +#if PY_MAJOR_VERSION >= 0x030900B1 +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) +#else +#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) +#endif +#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) +#elif !CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) +#endif +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) +static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { + return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; +} +#endif +static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { +#if CYTHON_COMPILING_IN_LIMITED_API + return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; +#else + return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +#endif +} +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) +#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) + typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); +#else + #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) + #define __Pyx_PyCMethod PyCMethod +#endif +#ifndef METH_METHOD + #define METH_METHOD 0x200 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyThreadState_Current PyThreadState_Get() +#elif !CYTHON_FAST_THREAD_STATE + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) +{ + void *result; + result = PyModule_GetState(op); + if (!result) + Py_FatalError("Couldn't find the module state"); + return result; +} +#endif +#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) +#else + #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if PY_MAJOR_VERSION < 3 + #if CYTHON_COMPILING_IN_PYPY + #if PYPY_VERSION_NUM < 0x07030600 + #if defined(__cplusplus) && __cplusplus >= 201402L + [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] + #elif defined(__GNUC__) || defined(__clang__) + __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) + #elif defined(_MSC_VER) + __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) + #endif + static CYTHON_INLINE int PyGILState_Check(void) { + return 0; + } + #else // PYPY_VERSION_NUM < 0x07030600 + #endif // PYPY_VERSION_NUM < 0x07030600 + #else + static CYTHON_INLINE int PyGILState_Check(void) { + PyThreadState * tstate = _PyThreadState_Current; + return tstate && (tstate == PyGILState_GetThisThreadState()); + } + #endif +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { + PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); + if (res == NULL) PyErr_Clear(); + return res; +} +#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) +#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#else +static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { +#if CYTHON_COMPILING_IN_PYPY + return PyDict_GetItem(dict, name); +#else + PyDictEntry *ep; + PyDictObject *mp = (PyDictObject*) dict; + long hash = ((PyStringObject *) name)->ob_shash; + assert(hash != -1); + ep = (mp->ma_lookup)(mp, name, hash); + if (ep == NULL) { + return NULL; + } + return ep->me_value; +#endif +} +#define __Pyx_PyDict_GetItemStr PyDict_GetItem +#endif +#if CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) + #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) + #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) +#else + #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) + #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) + #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) +#else + #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) +#endif +#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 +#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ + PyTypeObject *type = Py_TYPE((PyObject*)obj);\ + assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ + PyObject_GC_Del(obj);\ + Py_DECREF(type);\ +} +#else +#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) +#endif +#if CYTHON_COMPILING_IN_LIMITED_API + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) + #define __Pyx_PyUnicode_DATA(u) ((void*)u) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) +#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_READY(op) (0) + #else + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) + #if PY_VERSION_HEX >= 0x030C0000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #else + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) + #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #if !defined(PyUnicode_DecodeUnicodeEscape) + #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) + #endif + #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) + #undef PyUnicode_Contains + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) + #endif + #if !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) + #endif + #if !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) + #endif +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#if CYTHON_COMPILING_IN_CPYTHON + #define __Pyx_PySequence_ListKeepNew(obj)\ + (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) +#else + #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) +#else + #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) + #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) + #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) + #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) + #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) + #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) + #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) + #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) +#endif +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) +#else + static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { + PyObject *module = PyImport_AddModule(name); + Py_XINCREF(module); + return module; + } +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define __Pyx_Py3Int_Check(op) PyLong_Check(op) + #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#else + #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) + #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) + #if !defined(_USE_MATH_DEFINES) + #define _USE_MATH_DEFINES + #endif +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifdef CYTHON_EXTERN_C + #undef __PYX_EXTERN_C + #define __PYX_EXTERN_C CYTHON_EXTERN_C +#elif defined(__PYX_EXTERN_C) + #ifdef _MSC_VER + #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") + #else + #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. + #endif +#else + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__sqlalchemy__util___immutabledict_cy +#define __PYX_HAVE_API__sqlalchemy__util___immutabledict_cy +/* Early includes */ +#include +#include +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_VERSION_HEX >= 0x030C00A7 + #ifndef _PyLong_SIGN_MASK + #define _PyLong_SIGN_MASK 3 + #endif + #ifndef _PyLong_NON_SIZE_BITS + #define _PyLong_NON_SIZE_BITS 3 + #endif + #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) + #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) + #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) + #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) + #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_SignedDigitCount(x)\ + ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) + #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) + #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) + #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) + #else + #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) + #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) + #endif + typedef Py_ssize_t __Pyx_compact_pylong; + typedef size_t __Pyx_compact_upylong; + #else + #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) + #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) + #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) + #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) + #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) + #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) + #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) + #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) + #define __Pyx_PyLong_CompactValue(x)\ + ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) + typedef sdigit __Pyx_compact_pylong; + typedef digit __Pyx_compact_upylong; + #endif + #if PY_VERSION_HEX >= 0x030C00A5 + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) + #else + #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) + #endif +#endif +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +#include +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = (char) c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#include +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +#if !CYTHON_USE_MODULE_STATE +static PyObject *__pyx_m = NULL; +#endif +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm = __FILE__; +static const char *__pyx_filename; + +/* #### Code section: filename_table ### */ + +static const char *__pyx_f[] = { + "lib/sqlalchemy/util/_immutabledict_cy.py", + "", + "type.pxd", +}; +/* #### Code section: utility_code_proto_before_types ### */ +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + +/* #### Code section: numeric_typedefs ### */ +/* #### Code section: complex_type_declarations ### */ +/* #### Code section: type_declarations ### */ + +/*--- Type declarations ---*/ +struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase; +struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict; + +/* "sqlalchemy/util/_immutabledict_cy.py":71 + * + * @cython.cclass + * class ImmutableDictBase(Dict[_KT, _VT]): # <<<<<<<<<<<<<< + * # NOTE: this method is required in 3.9 and speeds up the use case + * # ImmutableDictBase[str,int](a_dict) significantly + */ +struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase { + PyDictObject __pyx_base; +}; + + +/* "sqlalchemy/util/_immutabledict_cy.py":109 + * # a type checking section and other workaround for the crash + * @cython.cclass + * class immutabledict(Dict[_KT, _VT]): # <<<<<<<<<<<<<< + * """An immutable version of a dict.""" + * + */ +struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict { + PyDictObject __pyx_base; +}; + +/* #### Code section: utility_code_proto ### */ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, Py_ssize_t); + void (*DECREF)(void*, PyObject*, Py_ssize_t); + void (*GOTREF)(void*, PyObject*, Py_ssize_t); + void (*GIVEREF)(void*, PyObject*, Py_ssize_t); + void* (*SetupContext)(const char*, Py_ssize_t, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ + } + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) + #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() +#endif + #define __Pyx_RefNannyFinishContextNogil() {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __Pyx_RefNannyFinishContext();\ + PyGILState_Release(__pyx_gilstate_save);\ + } + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) + #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContextNogil() + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_Py_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; Py_XDECREF(tmp);\ + } while (0) +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#if PY_VERSION_HEX >= 0x030C00A6 +#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) +#else +#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) +#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) +#endif +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) +#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* TupleAndListFromArray.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); +static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); +#endif + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* fastcall.proto */ +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) +#elif CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) +#else + #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) +#endif +#if CYTHON_AVOID_BORROWED_REFS + #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) + #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) +#else + #define __Pyx_Arg_NewRef_VARARGS(arg) arg + #define __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) +#define __Pyx_KwValues_VARARGS(args, nargs) NULL +#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) +#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) +#if CYTHON_METH_FASTCALL + #define __Pyx_Arg_FASTCALL(args, i) args[i] + #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) + #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) + static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 + CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); + #else + #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) + #endif + #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs + to have the same reference counting */ + #define __Pyx_Arg_XDECREF_FASTCALL(arg) +#else + #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS + #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS + #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS + #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS + #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS + #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) + #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) +#else +#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) +#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) +#endif + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, + const char* function_name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* PyObjectFormatSimple.proto */ +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + PyObject_Format(s, f)) +#elif PY_MAJOR_VERSION < 3 + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + likely(PyString_CheckExact(s)) ? PyUnicode_FromEncodedObject(s, NULL, "strict") :\ + PyObject_Format(s, f)) +#elif CYTHON_USE_TYPE_SLOTS + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + likely(PyLong_CheckExact(s)) ? PyLong_Type.tp_repr(s) :\ + likely(PyFloat_CheckExact(s)) ? PyFloat_Type.tp_repr(s) :\ + PyObject_Format(s, f)) +#else + #define __Pyx_PyObject_FormatSimple(s, f) (\ + likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ + PyObject_Format(s, f)) +#endif + +/* UnicodeConcatInPlace.proto */ +# if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 + #if CYTHON_REFNANNY + #define __Pyx_PyUnicode_ConcatInPlace(left, right) __Pyx_PyUnicode_ConcatInPlaceImpl(&left, right, __pyx_refnanny) + #else + #define __Pyx_PyUnicode_ConcatInPlace(left, right) __Pyx_PyUnicode_ConcatInPlaceImpl(&left, right) + #endif + static CYTHON_INLINE PyObject *__Pyx_PyUnicode_ConcatInPlaceImpl(PyObject **p_left, PyObject *right + #if CYTHON_REFNANNY + , void* __pyx_refnanny + #endif + ); +#else +#define __Pyx_PyUnicode_ConcatInPlace __Pyx_PyUnicode_Concat +#endif +#define __Pyx_PyUnicode_ConcatInPlaceSafe(left, right) ((unlikely((left) == Py_None) || unlikely((right) == Py_None)) ?\ + PyNumber_InPlaceAdd(left, right) : __Pyx_PyUnicode_ConcatInPlace(left, right)) + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#if !CYTHON_VECTORCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if !CYTHON_VECTORCALL +#if PY_VERSION_HEX >= 0x03080000 + #include "frameobject.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif + #define __Pxy_PyFrame_Initialize_Offsets() + #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) +#else + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif +#endif +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectFastCall.proto */ +#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) do {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} while(0) +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* RaiseUnexpectedTypeError.proto */ +static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); + +/* JoinPyUnicode.proto */ +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char); + +/* PyObjectCallNoArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* IncludeStructmemberH.proto */ +#include + +/* FixUpExtensionType.proto */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); +#endif + +/* FormatTypeName.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +typedef PyObject *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%U" +static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); +#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) +#else +typedef const char *__Pyx_TypeName; +#define __Pyx_FMT_TYPENAME "%.200s" +#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) +#define __Pyx_DECREF_TypeName(obj) +#endif + +/* ValidateExternBase.proto */ +static int __Pyx_validate_extern_base(PyTypeObject *base); + +/* PyObjectGetMethod.proto */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); + +/* PyObjectCallMethod0.proto */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); + +/* ValidateBasesTuple.proto */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); +#endif + +/* PyType_Ready.proto */ +CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetupReduce.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_setup_reduce(PyObject* type_obj); +#endif + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto_3_0_11 +#define __PYX_HAVE_RT_ImportType_proto_3_0_11 +#if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L +#include +#endif +#if (defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) || __cplusplus >= 201103L +#define __PYX_GET_STRUCT_ALIGNMENT_3_0_11(s) alignof(s) +#else +#define __PYX_GET_STRUCT_ALIGNMENT_3_0_11(s) sizeof(void*) +#endif +enum __Pyx_ImportType_CheckSize_3_0_11 { + __Pyx_ImportType_CheckSize_Error_3_0_11 = 0, + __Pyx_ImportType_CheckSize_Warn_3_0_11 = 1, + __Pyx_ImportType_CheckSize_Ignore_3_0_11 = 2 +}; +static PyTypeObject *__Pyx_ImportType_3_0_11(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_11 check_size); +#endif + +/* FetchSharedCythonModule.proto */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void); + +/* FetchCommonType.proto */ +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); +#else +static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); +#endif + +/* PyMethodNew.proto */ +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + typesModule = PyImport_ImportModule("types"); + if (!typesModule) return NULL; + methodType = PyObject_GetAttrString(typesModule, "MethodType"); + Py_DECREF(typesModule); + if (!methodType) return NULL; + result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); + Py_DECREF(methodType); + return result; +} +#elif PY_MAJOR_VERSION >= 3 +static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { + CYTHON_UNUSED_VAR(typ); + if (!self) + return __Pyx_NewRef(func); + return PyMethod_New(func, self); +} +#else + #define __Pyx_PyMethod_New PyMethod_New +#endif + +/* PyVectorcallFastCallDict.proto */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); +#endif + +/* CythonFunctionShared.proto */ +#define __Pyx_CyFunction_USED +#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 +#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 +#define __Pyx_CYFUNCTION_CCLASS 0x04 +#define __Pyx_CYFUNCTION_COROUTINE 0x08 +#define __Pyx_CyFunction_GetClosure(f)\ + (((__pyx_CyFunctionObject *) (f))->func_closure) +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + #define __Pyx_CyFunction_GetClassObj(f)\ + (((__pyx_CyFunctionObject *) (f))->func_classobj) +#else + #define __Pyx_CyFunction_GetClassObj(f)\ + ((PyObject*) ((PyCMethodObject *) (f))->mm_class) +#endif +#define __Pyx_CyFunction_SetClassObj(f, classobj)\ + __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) +#define __Pyx_CyFunction_Defaults(type, f)\ + ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) +#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ + ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) +typedef struct { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject_HEAD + PyObject *func; +#elif PY_VERSION_HEX < 0x030900B1 + PyCFunctionObject func; +#else + PyCMethodObject func; +#endif +#if CYTHON_BACKPORT_VECTORCALL + __pyx_vectorcallfunc func_vectorcall; +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_weakreflist; +#endif + PyObject *func_dict; + PyObject *func_name; + PyObject *func_qualname; + PyObject *func_doc; + PyObject *func_globals; + PyObject *func_code; + PyObject *func_closure; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + PyObject *func_classobj; +#endif + void *defaults; + int defaults_pyobjects; + size_t defaults_size; + int flags; + PyObject *defaults_tuple; + PyObject *defaults_kwdict; + PyObject *(*defaults_getter)(PyObject *); + PyObject *func_annotations; + PyObject *func_is_coroutine; +} __pyx_CyFunctionObject; +#undef __Pyx_CyOrPyCFunction_Check +#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) +#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) +#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); +#undef __Pyx_IsSameCFunction +#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, + size_t size, + int pyobjects); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, + PyObject *tuple); +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, + PyObject *dict); +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, + PyObject *dict); +static int __pyx_CyFunction_init(PyObject *module); +#if CYTHON_METH_FASTCALL +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +#if CYTHON_BACKPORT_VECTORCALL +#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) +#else +#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) +#endif +#endif + +/* CythonFunction.proto */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); + +/* SetNameInClass.proto */ +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 +#define __Pyx_SetNameInClass(ns, name, value)\ + (likely(PyDict_CheckExact(ns)) ? _PyDict_SetItem_KnownHash(ns, name, value, ((PyASCIIObject *) name)->hash) : PyObject_SetItem(ns, name, value)) +#elif CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_SetNameInClass(ns, name, value)\ + (likely(PyDict_CheckExact(ns)) ? PyDict_SetItem(ns, name, value) : PyObject_SetItem(ns, name, value)) +#else +#define __Pyx_SetNameInClass(ns, name, value) PyObject_SetItem(ns, name, value) +#endif + +/* CalculateMetaclass.proto */ +static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases); + +/* PyObjectCall2Args.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* PyObjectLookupSpecial.proto */ +#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS +#define __Pyx_PyObject_LookupSpecialNoError(obj, attr_name) __Pyx__PyObject_LookupSpecial(obj, attr_name, 0) +#define __Pyx_PyObject_LookupSpecial(obj, attr_name) __Pyx__PyObject_LookupSpecial(obj, attr_name, 1) +static CYTHON_INLINE PyObject* __Pyx__PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name, int with_error); +#else +#define __Pyx_PyObject_LookupSpecialNoError(o,n) __Pyx_PyObject_GetAttrStrNoError(o,n) +#define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n) +#endif + +/* Py3ClassCreate.proto */ +static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, PyObject *qualname, + PyObject *mkw, PyObject *modname, PyObject *doc); +static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, PyObject *dict, + PyObject *mkw, int calculate_metaclass, int allow_py2_metaclass); + +/* ClassMethod.proto */ +#include "descrobject.h" +CYTHON_UNUSED static PyObject* __Pyx_Method_ClassMethod(PyObject *method); + +/* GetNameInClass.proto */ +#define __Pyx_GetNameInClass(var, nmspace, name) (var) = __Pyx__GetNameInClass(nmspace, name) +static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +#if !CYTHON_COMPILING_IN_LIMITED_API +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); +#endif + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static unsigned long __Pyx_get_runtime_version(void); +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +/* #### Code section: module_declarations ### */ + +/* Module declarations from "cython" */ + +/* Module declarations from "libc.string" */ + +/* Module declarations from "libc.stdio" */ + +/* Module declarations from "__builtin__" */ + +/* Module declarations from "cpython.type" */ + +/* Module declarations from "cpython" */ + +/* Module declarations from "cpython.object" */ + +/* Module declarations from "cpython.pyport" */ + +/* Module declarations from "cpython.dict" */ + +/* Module declarations from "sqlalchemy.util._immutabledict_cy" */ +static PyObject *__pyx_f_10sqlalchemy_4util_17_immutabledict_cy___pyx_unpickle_ImmutableDictBase__set_state(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *, PyObject *); /*proto*/ +/* #### Code section: typeinfo ### */ +/* #### Code section: before_global_var ### */ +#define __Pyx_MODULE_NAME "sqlalchemy.util._immutabledict_cy" +extern int __pyx_module_is_main_sqlalchemy__util___immutabledict_cy; +int __pyx_module_is_main_sqlalchemy__util___immutabledict_cy = 0; + +/* Implementation of "sqlalchemy.util._immutabledict_cy" */ +/* #### Code section: global_var ### */ +static PyObject *__pyx_builtin_TypeError; +/* #### Code section: string_decls ### */ +static const char __pyx_k_[] = ")"; +static const char __pyx_k_d[] = "d"; +static const char __pyx_k_KT[] = "_KT"; +static const char __pyx_k_VT[] = "_VT"; +static const char __pyx_k__3[] = "."; +static const char __pyx_k__4[] = "?"; +static const char __pyx_k_gc[] = "gc"; +static const char __pyx_k_kw[] = "kw"; +static const char __pyx_k_or[] = "__or__"; +static const char __pyx_k_Any[] = "Any"; +static const char __pyx_k_arg[] = "arg"; +static const char __pyx_k_cls[] = "cls"; +static const char __pyx_k_doc[] = "__doc__"; +static const char __pyx_k_key[] = "key"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_obj[] = "obj"; +static const char __pyx_k_pop[] = "pop"; +static const char __pyx_k_ror[] = "__ror__"; +static const char __pyx_k_Dict[] = "Dict"; +static const char __pyx_k_Self[] = "Self"; +static const char __pyx_k_bool[] = "bool"; +static const char __pyx_k_copy[] = "copy"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_repr[] = "__repr__"; +static const char __pyx_k_self[] = "self"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_bound[] = "bound"; +static const char __pyx_k_class[] = "__class__"; +static const char __pyx_k_clear[] = "clear"; +static const char __pyx_k_dicts[] = "dicts"; +static const char __pyx_k_other[] = "other"; +static const char __pyx_k_slots[] = "__slots__"; +static const char __pyx_k_state[] = "state"; +static const char __pyx_k_super[] = "super"; +static const char __pyx_k_union[] = "union"; +static const char __pyx_k_value[] = "value"; +static const char __pyx_k_dict_2[] = "_dict"; +static const char __pyx_k_enable[] = "enable"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_module[] = "__module__"; +static const char __pyx_k_object[] = "object"; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_result[] = "result"; +static const char __pyx_k_return[] = "return"; +static const char __pyx_k_typing[] = "typing"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_Mapping[] = "Mapping"; +static const char __pyx_k_TypeVar[] = "TypeVar"; +static const char __pyx_k_default[] = "default"; +static const char __pyx_k_delitem[] = "__delitem__"; +static const char __pyx_k_disable[] = "disable"; +static const char __pyx_k_popitem[] = "popitem"; +static const char __pyx_k_prepare[] = "__prepare__"; +static const char __pyx_k_setattr[] = "__setattr__"; +static const char __pyx_k_setitem[] = "__setitem__"; +static const char __pyx_k_Hashable[] = "Hashable"; +static const char __pyx_k_NoReturn[] = "NoReturn"; +static const char __pyx_k_Optional[] = "Optional"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_qualname[] = "__qualname__"; +static const char __pyx_k_readonly[] = "_readonly"; +static const char __pyx_k_set_name[] = "__set_name__"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_isenabled[] = "isenabled"; +static const char __pyx_k_metaclass[] = "__metaclass__"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_type_Self[] = "type[Self]"; +static const char __pyx_k_merge_with[] = "merge_with"; +static const char __pyx_k_pyx_result[] = "__pyx_result"; +static const char __pyx_k_setdefault[] = "setdefault"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_is_compiled[] = "_is_compiled"; +static const char __pyx_k_Optional_Any[] = "Optional[Any]"; +static const char __pyx_k_immutable_fn[] = "_immutable_fn"; +static const char __pyx_k_is_coroutine[] = "_is_coroutine"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_stringsource[] = ""; +static const char __pyx_k_use_setstate[] = "use_setstate"; +static const char __pyx_k_class_getitem[] = "__class_getitem__"; +static const char __pyx_k_immutabledict[] = "immutabledict("; +static const char __pyx_k_init_subclass[] = "__init_subclass__"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_immutabledict_2[] = "immutabledict"; +static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_ImmutableDictBase[] = "ImmutableDictBase"; +static const char __pyx_k_ReadOnlyContainer[] = "ReadOnlyContainer"; +static const char __pyx_k_immutabledict_pop[] = "immutabledict.pop"; +static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_immutabledict_copy[] = "immutabledict.copy"; +static const char __pyx_k_immutabledict_clear[] = "immutabledict.clear"; +static const char __pyx_k_immutabledict_union[] = "immutabledict.union"; +static const char __pyx_k_object_is_immutable[] = " object is immutable"; +static const char __pyx_k_immutabledict_update[] = "immutabledict.update"; +static const char __pyx_k_ImmutableDictBase_pop[] = "ImmutableDictBase.pop"; +static const char __pyx_k_immutabledict__KT__VT[] = "immutabledict[_KT, _VT]"; +static const char __pyx_k_immutabledict_popitem[] = "immutabledict.popitem"; +static const char __pyx_k_immutabledict___reduce[] = "immutabledict.__reduce__"; +static const char __pyx_k_ImmutableDictBase_clear[] = "ImmutableDictBase.clear"; +static const char __pyx_k_ImmutableDictBase_update[] = "ImmutableDictBase.update"; +static const char __pyx_k_Optional_Mapping__KT__VT[] = "Optional[Mapping[_KT, _VT]]"; +static const char __pyx_k_immutabledict_merge_with[] = "immutabledict.merge_with"; +static const char __pyx_k_immutabledict_setdefault[] = "immutabledict.setdefault"; +static const char __pyx_k_ImmutableDictBase_popitem[] = "ImmutableDictBase.popitem"; +static const char __pyx_k_ReadOnlyContainer___delitem[] = "ReadOnlyContainer.__delitem__"; +static const char __pyx_k_ReadOnlyContainer___setattr[] = "ReadOnlyContainer.__setattr__"; +static const char __pyx_k_ReadOnlyContainer___setitem[] = "ReadOnlyContainer.__setitem__"; +static const char __pyx_k_ReadOnlyContainer__readonly[] = "ReadOnlyContainer._readonly"; +static const char __pyx_k_ImmutableDictBase_setdefault[] = "ImmutableDictBase.setdefault"; +static const char __pyx_k_immutabledict___class_getitem[] = "immutabledict.__class_getitem__"; +static const char __pyx_k_pyx_unpickle_ImmutableDictBase[] = "__pyx_unpickle_ImmutableDictBase"; +static const char __pyx_k_object_is_immutable_and_or_read[] = " object is immutable and/or readonly"; +static const char __pyx_k_ImmutableDictBase___class_getite[] = "ImmutableDictBase.__class_getitem__"; +static const char __pyx_k_ImmutableDictBase___reduce_cytho[] = "ImmutableDictBase.__reduce_cython__"; +static const char __pyx_k_ImmutableDictBase___setstate_cyt[] = "ImmutableDictBase.__setstate_cython__"; +static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())"; +static const char __pyx_k_lib_sqlalchemy_util__immutabledi[] = "lib/sqlalchemy/util/_immutabledict_cy.py"; +static const char __pyx_k_sqlalchemy_util__immutabledict_c[] = "sqlalchemy.util._immutabledict_cy"; +/* #### Code section: decls ### */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_2_immutable_fn(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_obj); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer__readonly(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_2__delitem__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_4__setitem__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_6__setattr__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_2__delitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_4__setitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_6__setattr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_8clear(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_10pop(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_12popitem(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_14setdefault(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_16update(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED PyObject *__pyx_v_kw); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_18__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_20__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_2__delitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_4__setitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_6__setattr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_8clear(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_10pop(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_12popitem(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_14setdefault(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_16update(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED PyObject *__pyx_v_kw); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_18__repr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_20union(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_22merge_with(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v_dicts); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_24copy(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_26__reduce__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_28__ior__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v__immutabledict__value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_30__or__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_32__ror__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /* proto */ +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_4__pyx_unpickle_ImmutableDictBase(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +/* #### Code section: late_includes ### */ +/* #### Code section: module_state ### */ +typedef struct { + PyObject *__pyx_d; + PyObject *__pyx_b; + PyObject *__pyx_cython_runtime; + PyObject *__pyx_empty_tuple; + PyObject *__pyx_empty_bytes; + PyObject *__pyx_empty_unicode; + #ifdef __Pyx_CyFunction_USED + PyTypeObject *__pyx_CyFunctionType; + #endif + #ifdef __Pyx_FusedFunction_USED + PyTypeObject *__pyx_FusedFunctionType; + #endif + #ifdef __Pyx_Generator_USED + PyTypeObject *__pyx_GeneratorType; + #endif + #ifdef __Pyx_IterableCoroutine_USED + PyTypeObject *__pyx_IterableCoroutineType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineAwaitType; + #endif + #ifdef __Pyx_Coroutine_USED + PyTypeObject *__pyx_CoroutineType; + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + PyTypeObject *__pyx_ptype_7cpython_4type_type; + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + #endif + #if CYTHON_USE_MODULE_STATE + PyObject *__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase; + PyObject *__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict; + #endif + PyTypeObject *__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase; + PyTypeObject *__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict; + PyObject *__pyx_kp_u_; + PyObject *__pyx_n_s_Any; + PyObject *__pyx_n_s_Dict; + PyObject *__pyx_n_s_Hashable; + PyObject *__pyx_n_s_ImmutableDictBase; + PyObject *__pyx_n_s_ImmutableDictBase___class_getite; + PyObject *__pyx_n_s_ImmutableDictBase___reduce_cytho; + PyObject *__pyx_n_s_ImmutableDictBase___setstate_cyt; + PyObject *__pyx_n_s_ImmutableDictBase_clear; + PyObject *__pyx_n_s_ImmutableDictBase_pop; + PyObject *__pyx_n_s_ImmutableDictBase_popitem; + PyObject *__pyx_n_s_ImmutableDictBase_setdefault; + PyObject *__pyx_n_s_ImmutableDictBase_update; + PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; + PyObject *__pyx_n_s_KT; + PyObject *__pyx_n_u_KT; + PyObject *__pyx_n_s_Mapping; + PyObject *__pyx_n_s_NoReturn; + PyObject *__pyx_n_s_Optional; + PyObject *__pyx_kp_s_Optional_Any; + PyObject *__pyx_kp_s_Optional_Mapping__KT__VT; + PyObject *__pyx_n_s_PickleError; + PyObject *__pyx_n_s_ReadOnlyContainer; + PyObject *__pyx_n_s_ReadOnlyContainer___delitem; + PyObject *__pyx_n_s_ReadOnlyContainer___setattr; + PyObject *__pyx_n_s_ReadOnlyContainer___setitem; + PyObject *__pyx_n_s_ReadOnlyContainer__readonly; + PyObject *__pyx_n_s_Self; + PyObject *__pyx_n_s_TypeError; + PyObject *__pyx_n_s_TypeVar; + PyObject *__pyx_n_s_VT; + PyObject *__pyx_n_u_VT; + PyObject *__pyx_kp_u__3; + PyObject *__pyx_n_s__4; + PyObject *__pyx_n_s_arg; + PyObject *__pyx_n_s_asyncio_coroutines; + PyObject *__pyx_n_s_bool; + PyObject *__pyx_n_s_bound; + PyObject *__pyx_n_s_class; + PyObject *__pyx_n_s_class_getitem; + PyObject *__pyx_n_s_clear; + PyObject *__pyx_n_s_cline_in_traceback; + PyObject *__pyx_n_s_cls; + PyObject *__pyx_n_s_copy; + PyObject *__pyx_n_s_d; + PyObject *__pyx_n_s_default; + PyObject *__pyx_n_s_delitem; + PyObject *__pyx_n_s_dict; + PyObject *__pyx_n_s_dict_2; + PyObject *__pyx_n_s_dicts; + PyObject *__pyx_kp_u_disable; + PyObject *__pyx_n_s_doc; + PyObject *__pyx_kp_u_enable; + PyObject *__pyx_kp_u_gc; + PyObject *__pyx_n_s_getstate; + PyObject *__pyx_n_s_immutable_fn; + PyObject *__pyx_kp_u_immutabledict; + PyObject *__pyx_n_s_immutabledict_2; + PyObject *__pyx_kp_s_immutabledict__KT__VT; + PyObject *__pyx_n_s_immutabledict___class_getitem; + PyObject *__pyx_n_s_immutabledict___reduce; + PyObject *__pyx_n_s_immutabledict_clear; + PyObject *__pyx_n_s_immutabledict_copy; + PyObject *__pyx_n_s_immutabledict_merge_with; + PyObject *__pyx_n_s_immutabledict_pop; + PyObject *__pyx_n_s_immutabledict_popitem; + PyObject *__pyx_n_s_immutabledict_setdefault; + PyObject *__pyx_n_s_immutabledict_union; + PyObject *__pyx_n_s_immutabledict_update; + PyObject *__pyx_n_s_import; + PyObject *__pyx_n_s_init_subclass; + PyObject *__pyx_n_s_is_compiled; + PyObject *__pyx_n_s_is_coroutine; + PyObject *__pyx_kp_u_isenabled; + PyObject *__pyx_n_s_key; + PyObject *__pyx_n_s_kw; + PyObject *__pyx_kp_s_lib_sqlalchemy_util__immutabledi; + PyObject *__pyx_n_s_main; + PyObject *__pyx_n_s_merge_with; + PyObject *__pyx_n_s_metaclass; + PyObject *__pyx_n_s_module; + PyObject *__pyx_n_s_name; + PyObject *__pyx_n_s_new; + PyObject *__pyx_n_s_obj; + PyObject *__pyx_n_s_object; + PyObject *__pyx_kp_u_object_is_immutable; + PyObject *__pyx_kp_u_object_is_immutable_and_or_read; + PyObject *__pyx_n_s_or; + PyObject *__pyx_n_s_other; + PyObject *__pyx_n_s_pickle; + PyObject *__pyx_n_s_pop; + PyObject *__pyx_n_s_popitem; + PyObject *__pyx_n_s_prepare; + PyObject *__pyx_n_s_pyx_PickleError; + PyObject *__pyx_n_s_pyx_checksum; + PyObject *__pyx_n_s_pyx_result; + PyObject *__pyx_n_s_pyx_state; + PyObject *__pyx_n_s_pyx_type; + PyObject *__pyx_n_s_pyx_unpickle_ImmutableDictBase; + PyObject *__pyx_n_s_qualname; + PyObject *__pyx_n_s_readonly; + PyObject *__pyx_n_s_reduce; + PyObject *__pyx_n_s_reduce_cython; + PyObject *__pyx_n_s_reduce_ex; + PyObject *__pyx_n_s_repr; + PyObject *__pyx_n_s_result; + PyObject *__pyx_n_s_return; + PyObject *__pyx_n_s_ror; + PyObject *__pyx_n_s_self; + PyObject *__pyx_n_s_set_name; + PyObject *__pyx_n_s_setattr; + PyObject *__pyx_n_s_setdefault; + PyObject *__pyx_n_s_setitem; + PyObject *__pyx_n_s_setstate; + PyObject *__pyx_n_s_setstate_cython; + PyObject *__pyx_n_s_slots; + PyObject *__pyx_n_s_sqlalchemy_util__immutabledict_c; + PyObject *__pyx_n_s_state; + PyObject *__pyx_kp_s_stringsource; + PyObject *__pyx_n_s_super; + PyObject *__pyx_n_s_test; + PyObject *__pyx_kp_s_type_Self; + PyObject *__pyx_n_s_typing; + PyObject *__pyx_n_s_union; + PyObject *__pyx_n_s_update; + PyObject *__pyx_n_s_use_setstate; + PyObject *__pyx_n_s_value; + PyObject *__pyx_int_222419149; + PyObject *__pyx_int_228825662; + PyObject *__pyx_int_238750788; + PyObject *__pyx_tuple__2; + PyObject *__pyx_tuple__6; + PyObject *__pyx_tuple__8; + PyObject *__pyx_tuple__10; + PyObject *__pyx_tuple__12; + PyObject *__pyx_tuple__15; + PyObject *__pyx_tuple__16; + PyObject *__pyx_tuple__17; + PyObject *__pyx_tuple__20; + PyObject *__pyx_tuple__22; + PyObject *__pyx_tuple__25; + PyObject *__pyx_tuple__27; + PyObject *__pyx_tuple__29; + PyObject *__pyx_tuple__37; + PyObject *__pyx_tuple__39; + PyObject *__pyx_tuple__43; + PyObject *__pyx_codeobj__5; + PyObject *__pyx_codeobj__7; + PyObject *__pyx_codeobj__9; + PyObject *__pyx_codeobj__11; + PyObject *__pyx_codeobj__13; + PyObject *__pyx_codeobj__14; + PyObject *__pyx_codeobj__18; + PyObject *__pyx_codeobj__19; + PyObject *__pyx_codeobj__21; + PyObject *__pyx_codeobj__23; + PyObject *__pyx_codeobj__24; + PyObject *__pyx_codeobj__26; + PyObject *__pyx_codeobj__28; + PyObject *__pyx_codeobj__30; + PyObject *__pyx_codeobj__31; + PyObject *__pyx_codeobj__32; + PyObject *__pyx_codeobj__33; + PyObject *__pyx_codeobj__34; + PyObject *__pyx_codeobj__35; + PyObject *__pyx_codeobj__36; + PyObject *__pyx_codeobj__38; + PyObject *__pyx_codeobj__40; + PyObject *__pyx_codeobj__41; + PyObject *__pyx_codeobj__42; + PyObject *__pyx_codeobj__44; +} __pyx_mstate; + +#if CYTHON_USE_MODULE_STATE +#ifdef __cplusplus +namespace { + extern struct PyModuleDef __pyx_moduledef; +} /* anonymous namespace */ +#else +static struct PyModuleDef __pyx_moduledef; +#endif + +#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) + +#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) + +#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) +#else +static __pyx_mstate __pyx_mstate_global_static = +#ifdef __cplusplus + {}; +#else + {0}; +#endif +static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; +#endif +/* #### Code section: module_state_clear ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_clear(PyObject *m) { + __pyx_mstate *clear_module_state = __pyx_mstate(m); + if (!clear_module_state) return 0; + Py_CLEAR(clear_module_state->__pyx_d); + Py_CLEAR(clear_module_state->__pyx_b); + Py_CLEAR(clear_module_state->__pyx_cython_runtime); + Py_CLEAR(clear_module_state->__pyx_empty_tuple); + Py_CLEAR(clear_module_state->__pyx_empty_bytes); + Py_CLEAR(clear_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_CLEAR(clear_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); + #endif + Py_CLEAR(clear_module_state->__pyx_ptype_7cpython_4type_type); + Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + Py_CLEAR(clear_module_state->__pyx_kp_u_); + Py_CLEAR(clear_module_state->__pyx_n_s_Any); + Py_CLEAR(clear_module_state->__pyx_n_s_Dict); + Py_CLEAR(clear_module_state->__pyx_n_s_Hashable); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase___class_getite); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase___reduce_cytho); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase___setstate_cyt); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_clear); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_pop); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_popitem); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_setdefault); + Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_update); + Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); + Py_CLEAR(clear_module_state->__pyx_n_s_KT); + Py_CLEAR(clear_module_state->__pyx_n_u_KT); + Py_CLEAR(clear_module_state->__pyx_n_s_Mapping); + Py_CLEAR(clear_module_state->__pyx_n_s_NoReturn); + Py_CLEAR(clear_module_state->__pyx_n_s_Optional); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_Any); + Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_Mapping__KT__VT); + Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); + Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer); + Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer___delitem); + Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer___setattr); + Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer___setitem); + Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer__readonly); + Py_CLEAR(clear_module_state->__pyx_n_s_Self); + Py_CLEAR(clear_module_state->__pyx_n_s_TypeError); + Py_CLEAR(clear_module_state->__pyx_n_s_TypeVar); + Py_CLEAR(clear_module_state->__pyx_n_s_VT); + Py_CLEAR(clear_module_state->__pyx_n_u_VT); + Py_CLEAR(clear_module_state->__pyx_kp_u__3); + Py_CLEAR(clear_module_state->__pyx_n_s__4); + Py_CLEAR(clear_module_state->__pyx_n_s_arg); + Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); + Py_CLEAR(clear_module_state->__pyx_n_s_bool); + Py_CLEAR(clear_module_state->__pyx_n_s_bound); + Py_CLEAR(clear_module_state->__pyx_n_s_class); + Py_CLEAR(clear_module_state->__pyx_n_s_class_getitem); + Py_CLEAR(clear_module_state->__pyx_n_s_clear); + Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); + Py_CLEAR(clear_module_state->__pyx_n_s_cls); + Py_CLEAR(clear_module_state->__pyx_n_s_copy); + Py_CLEAR(clear_module_state->__pyx_n_s_d); + Py_CLEAR(clear_module_state->__pyx_n_s_default); + Py_CLEAR(clear_module_state->__pyx_n_s_delitem); + Py_CLEAR(clear_module_state->__pyx_n_s_dict); + Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); + Py_CLEAR(clear_module_state->__pyx_n_s_dicts); + Py_CLEAR(clear_module_state->__pyx_kp_u_disable); + Py_CLEAR(clear_module_state->__pyx_n_s_doc); + Py_CLEAR(clear_module_state->__pyx_kp_u_enable); + Py_CLEAR(clear_module_state->__pyx_kp_u_gc); + Py_CLEAR(clear_module_state->__pyx_n_s_getstate); + Py_CLEAR(clear_module_state->__pyx_n_s_immutable_fn); + Py_CLEAR(clear_module_state->__pyx_kp_u_immutabledict); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_2); + Py_CLEAR(clear_module_state->__pyx_kp_s_immutabledict__KT__VT); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict___class_getitem); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict___reduce); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_clear); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_copy); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_merge_with); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_pop); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_popitem); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_setdefault); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_union); + Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_update); + Py_CLEAR(clear_module_state->__pyx_n_s_import); + Py_CLEAR(clear_module_state->__pyx_n_s_init_subclass); + Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); + Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); + Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); + Py_CLEAR(clear_module_state->__pyx_n_s_key); + Py_CLEAR(clear_module_state->__pyx_n_s_kw); + Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_util__immutabledi); + Py_CLEAR(clear_module_state->__pyx_n_s_main); + Py_CLEAR(clear_module_state->__pyx_n_s_merge_with); + Py_CLEAR(clear_module_state->__pyx_n_s_metaclass); + Py_CLEAR(clear_module_state->__pyx_n_s_module); + Py_CLEAR(clear_module_state->__pyx_n_s_name); + Py_CLEAR(clear_module_state->__pyx_n_s_new); + Py_CLEAR(clear_module_state->__pyx_n_s_obj); + Py_CLEAR(clear_module_state->__pyx_n_s_object); + Py_CLEAR(clear_module_state->__pyx_kp_u_object_is_immutable); + Py_CLEAR(clear_module_state->__pyx_kp_u_object_is_immutable_and_or_read); + Py_CLEAR(clear_module_state->__pyx_n_s_or); + Py_CLEAR(clear_module_state->__pyx_n_s_other); + Py_CLEAR(clear_module_state->__pyx_n_s_pickle); + Py_CLEAR(clear_module_state->__pyx_n_s_pop); + Py_CLEAR(clear_module_state->__pyx_n_s_popitem); + Py_CLEAR(clear_module_state->__pyx_n_s_prepare); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_result); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_state); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_type); + Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_ImmutableDictBase); + Py_CLEAR(clear_module_state->__pyx_n_s_qualname); + Py_CLEAR(clear_module_state->__pyx_n_s_readonly); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_reduce_ex); + Py_CLEAR(clear_module_state->__pyx_n_s_repr); + Py_CLEAR(clear_module_state->__pyx_n_s_result); + Py_CLEAR(clear_module_state->__pyx_n_s_return); + Py_CLEAR(clear_module_state->__pyx_n_s_ror); + Py_CLEAR(clear_module_state->__pyx_n_s_self); + Py_CLEAR(clear_module_state->__pyx_n_s_set_name); + Py_CLEAR(clear_module_state->__pyx_n_s_setattr); + Py_CLEAR(clear_module_state->__pyx_n_s_setdefault); + Py_CLEAR(clear_module_state->__pyx_n_s_setitem); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_setstate_cython); + Py_CLEAR(clear_module_state->__pyx_n_s_slots); + Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_util__immutabledict_c); + Py_CLEAR(clear_module_state->__pyx_n_s_state); + Py_CLEAR(clear_module_state->__pyx_kp_s_stringsource); + Py_CLEAR(clear_module_state->__pyx_n_s_super); + Py_CLEAR(clear_module_state->__pyx_n_s_test); + Py_CLEAR(clear_module_state->__pyx_kp_s_type_Self); + Py_CLEAR(clear_module_state->__pyx_n_s_typing); + Py_CLEAR(clear_module_state->__pyx_n_s_union); + Py_CLEAR(clear_module_state->__pyx_n_s_update); + Py_CLEAR(clear_module_state->__pyx_n_s_use_setstate); + Py_CLEAR(clear_module_state->__pyx_n_s_value); + Py_CLEAR(clear_module_state->__pyx_int_222419149); + Py_CLEAR(clear_module_state->__pyx_int_228825662); + Py_CLEAR(clear_module_state->__pyx_int_238750788); + Py_CLEAR(clear_module_state->__pyx_tuple__2); + Py_CLEAR(clear_module_state->__pyx_tuple__6); + Py_CLEAR(clear_module_state->__pyx_tuple__8); + Py_CLEAR(clear_module_state->__pyx_tuple__10); + Py_CLEAR(clear_module_state->__pyx_tuple__12); + Py_CLEAR(clear_module_state->__pyx_tuple__15); + Py_CLEAR(clear_module_state->__pyx_tuple__16); + Py_CLEAR(clear_module_state->__pyx_tuple__17); + Py_CLEAR(clear_module_state->__pyx_tuple__20); + Py_CLEAR(clear_module_state->__pyx_tuple__22); + Py_CLEAR(clear_module_state->__pyx_tuple__25); + Py_CLEAR(clear_module_state->__pyx_tuple__27); + Py_CLEAR(clear_module_state->__pyx_tuple__29); + Py_CLEAR(clear_module_state->__pyx_tuple__37); + Py_CLEAR(clear_module_state->__pyx_tuple__39); + Py_CLEAR(clear_module_state->__pyx_tuple__43); + Py_CLEAR(clear_module_state->__pyx_codeobj__5); + Py_CLEAR(clear_module_state->__pyx_codeobj__7); + Py_CLEAR(clear_module_state->__pyx_codeobj__9); + Py_CLEAR(clear_module_state->__pyx_codeobj__11); + Py_CLEAR(clear_module_state->__pyx_codeobj__13); + Py_CLEAR(clear_module_state->__pyx_codeobj__14); + Py_CLEAR(clear_module_state->__pyx_codeobj__18); + Py_CLEAR(clear_module_state->__pyx_codeobj__19); + Py_CLEAR(clear_module_state->__pyx_codeobj__21); + Py_CLEAR(clear_module_state->__pyx_codeobj__23); + Py_CLEAR(clear_module_state->__pyx_codeobj__24); + Py_CLEAR(clear_module_state->__pyx_codeobj__26); + Py_CLEAR(clear_module_state->__pyx_codeobj__28); + Py_CLEAR(clear_module_state->__pyx_codeobj__30); + Py_CLEAR(clear_module_state->__pyx_codeobj__31); + Py_CLEAR(clear_module_state->__pyx_codeobj__32); + Py_CLEAR(clear_module_state->__pyx_codeobj__33); + Py_CLEAR(clear_module_state->__pyx_codeobj__34); + Py_CLEAR(clear_module_state->__pyx_codeobj__35); + Py_CLEAR(clear_module_state->__pyx_codeobj__36); + Py_CLEAR(clear_module_state->__pyx_codeobj__38); + Py_CLEAR(clear_module_state->__pyx_codeobj__40); + Py_CLEAR(clear_module_state->__pyx_codeobj__41); + Py_CLEAR(clear_module_state->__pyx_codeobj__42); + Py_CLEAR(clear_module_state->__pyx_codeobj__44); + return 0; +} +#endif +/* #### Code section: module_state_traverse ### */ +#if CYTHON_USE_MODULE_STATE +static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { + __pyx_mstate *traverse_module_state = __pyx_mstate(m); + if (!traverse_module_state) return 0; + Py_VISIT(traverse_module_state->__pyx_d); + Py_VISIT(traverse_module_state->__pyx_b); + Py_VISIT(traverse_module_state->__pyx_cython_runtime); + Py_VISIT(traverse_module_state->__pyx_empty_tuple); + Py_VISIT(traverse_module_state->__pyx_empty_bytes); + Py_VISIT(traverse_module_state->__pyx_empty_unicode); + #ifdef __Pyx_CyFunction_USED + Py_VISIT(traverse_module_state->__pyx_CyFunctionType); + #endif + #ifdef __Pyx_FusedFunction_USED + Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); + #endif + Py_VISIT(traverse_module_state->__pyx_ptype_7cpython_4type_type); + Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + Py_VISIT(traverse_module_state->__pyx_kp_u_); + Py_VISIT(traverse_module_state->__pyx_n_s_Any); + Py_VISIT(traverse_module_state->__pyx_n_s_Dict); + Py_VISIT(traverse_module_state->__pyx_n_s_Hashable); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase___class_getite); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase___reduce_cytho); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase___setstate_cyt); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_clear); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_pop); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_popitem); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_setdefault); + Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_update); + Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); + Py_VISIT(traverse_module_state->__pyx_n_s_KT); + Py_VISIT(traverse_module_state->__pyx_n_u_KT); + Py_VISIT(traverse_module_state->__pyx_n_s_Mapping); + Py_VISIT(traverse_module_state->__pyx_n_s_NoReturn); + Py_VISIT(traverse_module_state->__pyx_n_s_Optional); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_Any); + Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_Mapping__KT__VT); + Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); + Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer); + Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer___delitem); + Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer___setattr); + Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer___setitem); + Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer__readonly); + Py_VISIT(traverse_module_state->__pyx_n_s_Self); + Py_VISIT(traverse_module_state->__pyx_n_s_TypeError); + Py_VISIT(traverse_module_state->__pyx_n_s_TypeVar); + Py_VISIT(traverse_module_state->__pyx_n_s_VT); + Py_VISIT(traverse_module_state->__pyx_n_u_VT); + Py_VISIT(traverse_module_state->__pyx_kp_u__3); + Py_VISIT(traverse_module_state->__pyx_n_s__4); + Py_VISIT(traverse_module_state->__pyx_n_s_arg); + Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); + Py_VISIT(traverse_module_state->__pyx_n_s_bool); + Py_VISIT(traverse_module_state->__pyx_n_s_bound); + Py_VISIT(traverse_module_state->__pyx_n_s_class); + Py_VISIT(traverse_module_state->__pyx_n_s_class_getitem); + Py_VISIT(traverse_module_state->__pyx_n_s_clear); + Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); + Py_VISIT(traverse_module_state->__pyx_n_s_cls); + Py_VISIT(traverse_module_state->__pyx_n_s_copy); + Py_VISIT(traverse_module_state->__pyx_n_s_d); + Py_VISIT(traverse_module_state->__pyx_n_s_default); + Py_VISIT(traverse_module_state->__pyx_n_s_delitem); + Py_VISIT(traverse_module_state->__pyx_n_s_dict); + Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); + Py_VISIT(traverse_module_state->__pyx_n_s_dicts); + Py_VISIT(traverse_module_state->__pyx_kp_u_disable); + Py_VISIT(traverse_module_state->__pyx_n_s_doc); + Py_VISIT(traverse_module_state->__pyx_kp_u_enable); + Py_VISIT(traverse_module_state->__pyx_kp_u_gc); + Py_VISIT(traverse_module_state->__pyx_n_s_getstate); + Py_VISIT(traverse_module_state->__pyx_n_s_immutable_fn); + Py_VISIT(traverse_module_state->__pyx_kp_u_immutabledict); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_2); + Py_VISIT(traverse_module_state->__pyx_kp_s_immutabledict__KT__VT); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict___class_getitem); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict___reduce); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_clear); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_copy); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_merge_with); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_pop); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_popitem); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_setdefault); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_union); + Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_update); + Py_VISIT(traverse_module_state->__pyx_n_s_import); + Py_VISIT(traverse_module_state->__pyx_n_s_init_subclass); + Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); + Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); + Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); + Py_VISIT(traverse_module_state->__pyx_n_s_key); + Py_VISIT(traverse_module_state->__pyx_n_s_kw); + Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_util__immutabledi); + Py_VISIT(traverse_module_state->__pyx_n_s_main); + Py_VISIT(traverse_module_state->__pyx_n_s_merge_with); + Py_VISIT(traverse_module_state->__pyx_n_s_metaclass); + Py_VISIT(traverse_module_state->__pyx_n_s_module); + Py_VISIT(traverse_module_state->__pyx_n_s_name); + Py_VISIT(traverse_module_state->__pyx_n_s_new); + Py_VISIT(traverse_module_state->__pyx_n_s_obj); + Py_VISIT(traverse_module_state->__pyx_n_s_object); + Py_VISIT(traverse_module_state->__pyx_kp_u_object_is_immutable); + Py_VISIT(traverse_module_state->__pyx_kp_u_object_is_immutable_and_or_read); + Py_VISIT(traverse_module_state->__pyx_n_s_or); + Py_VISIT(traverse_module_state->__pyx_n_s_other); + Py_VISIT(traverse_module_state->__pyx_n_s_pickle); + Py_VISIT(traverse_module_state->__pyx_n_s_pop); + Py_VISIT(traverse_module_state->__pyx_n_s_popitem); + Py_VISIT(traverse_module_state->__pyx_n_s_prepare); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_result); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_state); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_type); + Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_ImmutableDictBase); + Py_VISIT(traverse_module_state->__pyx_n_s_qualname); + Py_VISIT(traverse_module_state->__pyx_n_s_readonly); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_reduce_ex); + Py_VISIT(traverse_module_state->__pyx_n_s_repr); + Py_VISIT(traverse_module_state->__pyx_n_s_result); + Py_VISIT(traverse_module_state->__pyx_n_s_return); + Py_VISIT(traverse_module_state->__pyx_n_s_ror); + Py_VISIT(traverse_module_state->__pyx_n_s_self); + Py_VISIT(traverse_module_state->__pyx_n_s_set_name); + Py_VISIT(traverse_module_state->__pyx_n_s_setattr); + Py_VISIT(traverse_module_state->__pyx_n_s_setdefault); + Py_VISIT(traverse_module_state->__pyx_n_s_setitem); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_setstate_cython); + Py_VISIT(traverse_module_state->__pyx_n_s_slots); + Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_util__immutabledict_c); + Py_VISIT(traverse_module_state->__pyx_n_s_state); + Py_VISIT(traverse_module_state->__pyx_kp_s_stringsource); + Py_VISIT(traverse_module_state->__pyx_n_s_super); + Py_VISIT(traverse_module_state->__pyx_n_s_test); + Py_VISIT(traverse_module_state->__pyx_kp_s_type_Self); + Py_VISIT(traverse_module_state->__pyx_n_s_typing); + Py_VISIT(traverse_module_state->__pyx_n_s_union); + Py_VISIT(traverse_module_state->__pyx_n_s_update); + Py_VISIT(traverse_module_state->__pyx_n_s_use_setstate); + Py_VISIT(traverse_module_state->__pyx_n_s_value); + Py_VISIT(traverse_module_state->__pyx_int_222419149); + Py_VISIT(traverse_module_state->__pyx_int_228825662); + Py_VISIT(traverse_module_state->__pyx_int_238750788); + Py_VISIT(traverse_module_state->__pyx_tuple__2); + Py_VISIT(traverse_module_state->__pyx_tuple__6); + Py_VISIT(traverse_module_state->__pyx_tuple__8); + Py_VISIT(traverse_module_state->__pyx_tuple__10); + Py_VISIT(traverse_module_state->__pyx_tuple__12); + Py_VISIT(traverse_module_state->__pyx_tuple__15); + Py_VISIT(traverse_module_state->__pyx_tuple__16); + Py_VISIT(traverse_module_state->__pyx_tuple__17); + Py_VISIT(traverse_module_state->__pyx_tuple__20); + Py_VISIT(traverse_module_state->__pyx_tuple__22); + Py_VISIT(traverse_module_state->__pyx_tuple__25); + Py_VISIT(traverse_module_state->__pyx_tuple__27); + Py_VISIT(traverse_module_state->__pyx_tuple__29); + Py_VISIT(traverse_module_state->__pyx_tuple__37); + Py_VISIT(traverse_module_state->__pyx_tuple__39); + Py_VISIT(traverse_module_state->__pyx_tuple__43); + Py_VISIT(traverse_module_state->__pyx_codeobj__5); + Py_VISIT(traverse_module_state->__pyx_codeobj__7); + Py_VISIT(traverse_module_state->__pyx_codeobj__9); + Py_VISIT(traverse_module_state->__pyx_codeobj__11); + Py_VISIT(traverse_module_state->__pyx_codeobj__13); + Py_VISIT(traverse_module_state->__pyx_codeobj__14); + Py_VISIT(traverse_module_state->__pyx_codeobj__18); + Py_VISIT(traverse_module_state->__pyx_codeobj__19); + Py_VISIT(traverse_module_state->__pyx_codeobj__21); + Py_VISIT(traverse_module_state->__pyx_codeobj__23); + Py_VISIT(traverse_module_state->__pyx_codeobj__24); + Py_VISIT(traverse_module_state->__pyx_codeobj__26); + Py_VISIT(traverse_module_state->__pyx_codeobj__28); + Py_VISIT(traverse_module_state->__pyx_codeobj__30); + Py_VISIT(traverse_module_state->__pyx_codeobj__31); + Py_VISIT(traverse_module_state->__pyx_codeobj__32); + Py_VISIT(traverse_module_state->__pyx_codeobj__33); + Py_VISIT(traverse_module_state->__pyx_codeobj__34); + Py_VISIT(traverse_module_state->__pyx_codeobj__35); + Py_VISIT(traverse_module_state->__pyx_codeobj__36); + Py_VISIT(traverse_module_state->__pyx_codeobj__38); + Py_VISIT(traverse_module_state->__pyx_codeobj__40); + Py_VISIT(traverse_module_state->__pyx_codeobj__41); + Py_VISIT(traverse_module_state->__pyx_codeobj__42); + Py_VISIT(traverse_module_state->__pyx_codeobj__44); + return 0; +} +#endif +/* #### Code section: module_state_defines ### */ +#define __pyx_d __pyx_mstate_global->__pyx_d +#define __pyx_b __pyx_mstate_global->__pyx_b +#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime +#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple +#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes +#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode +#ifdef __Pyx_CyFunction_USED +#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType +#endif +#ifdef __Pyx_FusedFunction_USED +#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType +#endif +#ifdef __Pyx_Generator_USED +#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType +#endif +#ifdef __Pyx_IterableCoroutine_USED +#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType +#endif +#ifdef __Pyx_Coroutine_USED +#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#define __pyx_ptype_7cpython_4type_type __pyx_mstate_global->__pyx_ptype_7cpython_4type_type +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#endif +#if CYTHON_USE_MODULE_STATE +#define __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase __pyx_mstate_global->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase +#define __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict __pyx_mstate_global->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict +#endif +#define __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase __pyx_mstate_global->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase +#define __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict __pyx_mstate_global->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict +#define __pyx_kp_u_ __pyx_mstate_global->__pyx_kp_u_ +#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any +#define __pyx_n_s_Dict __pyx_mstate_global->__pyx_n_s_Dict +#define __pyx_n_s_Hashable __pyx_mstate_global->__pyx_n_s_Hashable +#define __pyx_n_s_ImmutableDictBase __pyx_mstate_global->__pyx_n_s_ImmutableDictBase +#define __pyx_n_s_ImmutableDictBase___class_getite __pyx_mstate_global->__pyx_n_s_ImmutableDictBase___class_getite +#define __pyx_n_s_ImmutableDictBase___reduce_cytho __pyx_mstate_global->__pyx_n_s_ImmutableDictBase___reduce_cytho +#define __pyx_n_s_ImmutableDictBase___setstate_cyt __pyx_mstate_global->__pyx_n_s_ImmutableDictBase___setstate_cyt +#define __pyx_n_s_ImmutableDictBase_clear __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_clear +#define __pyx_n_s_ImmutableDictBase_pop __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_pop +#define __pyx_n_s_ImmutableDictBase_popitem __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_popitem +#define __pyx_n_s_ImmutableDictBase_setdefault __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_setdefault +#define __pyx_n_s_ImmutableDictBase_update __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_update +#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 +#define __pyx_n_s_KT __pyx_mstate_global->__pyx_n_s_KT +#define __pyx_n_u_KT __pyx_mstate_global->__pyx_n_u_KT +#define __pyx_n_s_Mapping __pyx_mstate_global->__pyx_n_s_Mapping +#define __pyx_n_s_NoReturn __pyx_mstate_global->__pyx_n_s_NoReturn +#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional +#define __pyx_kp_s_Optional_Any __pyx_mstate_global->__pyx_kp_s_Optional_Any +#define __pyx_kp_s_Optional_Mapping__KT__VT __pyx_mstate_global->__pyx_kp_s_Optional_Mapping__KT__VT +#define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError +#define __pyx_n_s_ReadOnlyContainer __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer +#define __pyx_n_s_ReadOnlyContainer___delitem __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer___delitem +#define __pyx_n_s_ReadOnlyContainer___setattr __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer___setattr +#define __pyx_n_s_ReadOnlyContainer___setitem __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer___setitem +#define __pyx_n_s_ReadOnlyContainer__readonly __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer__readonly +#define __pyx_n_s_Self __pyx_mstate_global->__pyx_n_s_Self +#define __pyx_n_s_TypeError __pyx_mstate_global->__pyx_n_s_TypeError +#define __pyx_n_s_TypeVar __pyx_mstate_global->__pyx_n_s_TypeVar +#define __pyx_n_s_VT __pyx_mstate_global->__pyx_n_s_VT +#define __pyx_n_u_VT __pyx_mstate_global->__pyx_n_u_VT +#define __pyx_kp_u__3 __pyx_mstate_global->__pyx_kp_u__3 +#define __pyx_n_s__4 __pyx_mstate_global->__pyx_n_s__4 +#define __pyx_n_s_arg __pyx_mstate_global->__pyx_n_s_arg +#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines +#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool +#define __pyx_n_s_bound __pyx_mstate_global->__pyx_n_s_bound +#define __pyx_n_s_class __pyx_mstate_global->__pyx_n_s_class +#define __pyx_n_s_class_getitem __pyx_mstate_global->__pyx_n_s_class_getitem +#define __pyx_n_s_clear __pyx_mstate_global->__pyx_n_s_clear +#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback +#define __pyx_n_s_cls __pyx_mstate_global->__pyx_n_s_cls +#define __pyx_n_s_copy __pyx_mstate_global->__pyx_n_s_copy +#define __pyx_n_s_d __pyx_mstate_global->__pyx_n_s_d +#define __pyx_n_s_default __pyx_mstate_global->__pyx_n_s_default +#define __pyx_n_s_delitem __pyx_mstate_global->__pyx_n_s_delitem +#define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict +#define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 +#define __pyx_n_s_dicts __pyx_mstate_global->__pyx_n_s_dicts +#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable +#define __pyx_n_s_doc __pyx_mstate_global->__pyx_n_s_doc +#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable +#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc +#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate +#define __pyx_n_s_immutable_fn __pyx_mstate_global->__pyx_n_s_immutable_fn +#define __pyx_kp_u_immutabledict __pyx_mstate_global->__pyx_kp_u_immutabledict +#define __pyx_n_s_immutabledict_2 __pyx_mstate_global->__pyx_n_s_immutabledict_2 +#define __pyx_kp_s_immutabledict__KT__VT __pyx_mstate_global->__pyx_kp_s_immutabledict__KT__VT +#define __pyx_n_s_immutabledict___class_getitem __pyx_mstate_global->__pyx_n_s_immutabledict___class_getitem +#define __pyx_n_s_immutabledict___reduce __pyx_mstate_global->__pyx_n_s_immutabledict___reduce +#define __pyx_n_s_immutabledict_clear __pyx_mstate_global->__pyx_n_s_immutabledict_clear +#define __pyx_n_s_immutabledict_copy __pyx_mstate_global->__pyx_n_s_immutabledict_copy +#define __pyx_n_s_immutabledict_merge_with __pyx_mstate_global->__pyx_n_s_immutabledict_merge_with +#define __pyx_n_s_immutabledict_pop __pyx_mstate_global->__pyx_n_s_immutabledict_pop +#define __pyx_n_s_immutabledict_popitem __pyx_mstate_global->__pyx_n_s_immutabledict_popitem +#define __pyx_n_s_immutabledict_setdefault __pyx_mstate_global->__pyx_n_s_immutabledict_setdefault +#define __pyx_n_s_immutabledict_union __pyx_mstate_global->__pyx_n_s_immutabledict_union +#define __pyx_n_s_immutabledict_update __pyx_mstate_global->__pyx_n_s_immutabledict_update +#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import +#define __pyx_n_s_init_subclass __pyx_mstate_global->__pyx_n_s_init_subclass +#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled +#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine +#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled +#define __pyx_n_s_key __pyx_mstate_global->__pyx_n_s_key +#define __pyx_n_s_kw __pyx_mstate_global->__pyx_n_s_kw +#define __pyx_kp_s_lib_sqlalchemy_util__immutabledi __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_util__immutabledi +#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main +#define __pyx_n_s_merge_with __pyx_mstate_global->__pyx_n_s_merge_with +#define __pyx_n_s_metaclass __pyx_mstate_global->__pyx_n_s_metaclass +#define __pyx_n_s_module __pyx_mstate_global->__pyx_n_s_module +#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name +#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new +#define __pyx_n_s_obj __pyx_mstate_global->__pyx_n_s_obj +#define __pyx_n_s_object __pyx_mstate_global->__pyx_n_s_object +#define __pyx_kp_u_object_is_immutable __pyx_mstate_global->__pyx_kp_u_object_is_immutable +#define __pyx_kp_u_object_is_immutable_and_or_read __pyx_mstate_global->__pyx_kp_u_object_is_immutable_and_or_read +#define __pyx_n_s_or __pyx_mstate_global->__pyx_n_s_or +#define __pyx_n_s_other __pyx_mstate_global->__pyx_n_s_other +#define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle +#define __pyx_n_s_pop __pyx_mstate_global->__pyx_n_s_pop +#define __pyx_n_s_popitem __pyx_mstate_global->__pyx_n_s_popitem +#define __pyx_n_s_prepare __pyx_mstate_global->__pyx_n_s_prepare +#define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError +#define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum +#define __pyx_n_s_pyx_result __pyx_mstate_global->__pyx_n_s_pyx_result +#define __pyx_n_s_pyx_state __pyx_mstate_global->__pyx_n_s_pyx_state +#define __pyx_n_s_pyx_type __pyx_mstate_global->__pyx_n_s_pyx_type +#define __pyx_n_s_pyx_unpickle_ImmutableDictBase __pyx_mstate_global->__pyx_n_s_pyx_unpickle_ImmutableDictBase +#define __pyx_n_s_qualname __pyx_mstate_global->__pyx_n_s_qualname +#define __pyx_n_s_readonly __pyx_mstate_global->__pyx_n_s_readonly +#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce +#define __pyx_n_s_reduce_cython __pyx_mstate_global->__pyx_n_s_reduce_cython +#define __pyx_n_s_reduce_ex __pyx_mstate_global->__pyx_n_s_reduce_ex +#define __pyx_n_s_repr __pyx_mstate_global->__pyx_n_s_repr +#define __pyx_n_s_result __pyx_mstate_global->__pyx_n_s_result +#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return +#define __pyx_n_s_ror __pyx_mstate_global->__pyx_n_s_ror +#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self +#define __pyx_n_s_set_name __pyx_mstate_global->__pyx_n_s_set_name +#define __pyx_n_s_setattr __pyx_mstate_global->__pyx_n_s_setattr +#define __pyx_n_s_setdefault __pyx_mstate_global->__pyx_n_s_setdefault +#define __pyx_n_s_setitem __pyx_mstate_global->__pyx_n_s_setitem +#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate +#define __pyx_n_s_setstate_cython __pyx_mstate_global->__pyx_n_s_setstate_cython +#define __pyx_n_s_slots __pyx_mstate_global->__pyx_n_s_slots +#define __pyx_n_s_sqlalchemy_util__immutabledict_c __pyx_mstate_global->__pyx_n_s_sqlalchemy_util__immutabledict_c +#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state +#define __pyx_kp_s_stringsource __pyx_mstate_global->__pyx_kp_s_stringsource +#define __pyx_n_s_super __pyx_mstate_global->__pyx_n_s_super +#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test +#define __pyx_kp_s_type_Self __pyx_mstate_global->__pyx_kp_s_type_Self +#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing +#define __pyx_n_s_union __pyx_mstate_global->__pyx_n_s_union +#define __pyx_n_s_update __pyx_mstate_global->__pyx_n_s_update +#define __pyx_n_s_use_setstate __pyx_mstate_global->__pyx_n_s_use_setstate +#define __pyx_n_s_value __pyx_mstate_global->__pyx_n_s_value +#define __pyx_int_222419149 __pyx_mstate_global->__pyx_int_222419149 +#define __pyx_int_228825662 __pyx_mstate_global->__pyx_int_228825662 +#define __pyx_int_238750788 __pyx_mstate_global->__pyx_int_238750788 +#define __pyx_tuple__2 __pyx_mstate_global->__pyx_tuple__2 +#define __pyx_tuple__6 __pyx_mstate_global->__pyx_tuple__6 +#define __pyx_tuple__8 __pyx_mstate_global->__pyx_tuple__8 +#define __pyx_tuple__10 __pyx_mstate_global->__pyx_tuple__10 +#define __pyx_tuple__12 __pyx_mstate_global->__pyx_tuple__12 +#define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 +#define __pyx_tuple__16 __pyx_mstate_global->__pyx_tuple__16 +#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 +#define __pyx_tuple__20 __pyx_mstate_global->__pyx_tuple__20 +#define __pyx_tuple__22 __pyx_mstate_global->__pyx_tuple__22 +#define __pyx_tuple__25 __pyx_mstate_global->__pyx_tuple__25 +#define __pyx_tuple__27 __pyx_mstate_global->__pyx_tuple__27 +#define __pyx_tuple__29 __pyx_mstate_global->__pyx_tuple__29 +#define __pyx_tuple__37 __pyx_mstate_global->__pyx_tuple__37 +#define __pyx_tuple__39 __pyx_mstate_global->__pyx_tuple__39 +#define __pyx_tuple__43 __pyx_mstate_global->__pyx_tuple__43 +#define __pyx_codeobj__5 __pyx_mstate_global->__pyx_codeobj__5 +#define __pyx_codeobj__7 __pyx_mstate_global->__pyx_codeobj__7 +#define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 +#define __pyx_codeobj__11 __pyx_mstate_global->__pyx_codeobj__11 +#define __pyx_codeobj__13 __pyx_mstate_global->__pyx_codeobj__13 +#define __pyx_codeobj__14 __pyx_mstate_global->__pyx_codeobj__14 +#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 +#define __pyx_codeobj__19 __pyx_mstate_global->__pyx_codeobj__19 +#define __pyx_codeobj__21 __pyx_mstate_global->__pyx_codeobj__21 +#define __pyx_codeobj__23 __pyx_mstate_global->__pyx_codeobj__23 +#define __pyx_codeobj__24 __pyx_mstate_global->__pyx_codeobj__24 +#define __pyx_codeobj__26 __pyx_mstate_global->__pyx_codeobj__26 +#define __pyx_codeobj__28 __pyx_mstate_global->__pyx_codeobj__28 +#define __pyx_codeobj__30 __pyx_mstate_global->__pyx_codeobj__30 +#define __pyx_codeobj__31 __pyx_mstate_global->__pyx_codeobj__31 +#define __pyx_codeobj__32 __pyx_mstate_global->__pyx_codeobj__32 +#define __pyx_codeobj__33 __pyx_mstate_global->__pyx_codeobj__33 +#define __pyx_codeobj__34 __pyx_mstate_global->__pyx_codeobj__34 +#define __pyx_codeobj__35 __pyx_mstate_global->__pyx_codeobj__35 +#define __pyx_codeobj__36 __pyx_mstate_global->__pyx_codeobj__36 +#define __pyx_codeobj__38 __pyx_mstate_global->__pyx_codeobj__38 +#define __pyx_codeobj__40 __pyx_mstate_global->__pyx_codeobj__40 +#define __pyx_codeobj__41 __pyx_mstate_global->__pyx_codeobj__41 +#define __pyx_codeobj__42 __pyx_mstate_global->__pyx_codeobj__42 +#define __pyx_codeobj__44 __pyx_mstate_global->__pyx_codeobj__44 +/* #### Code section: module_code ### */ + +/* "sqlalchemy/util/_immutabledict_cy.py":31 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +PyDoc_STRVAR(__pyx_doc_10sqlalchemy_4util_17_immutabledict_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_4util_17_immutabledict_cy__is_compiled}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy__is_compiled(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_is_compiled", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":33 + * def _is_compiled() -> bool: + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":31 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":44 + * + * + * def _immutable_fn(obj: object) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError(f"{obj.__class__.__name__} object is immutable") + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn = {"_immutable_fn", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_obj = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_immutable_fn (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_obj,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_obj)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 44, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_immutable_fn") < 0)) __PYX_ERR(0, 44, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_obj = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("_immutable_fn", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 44, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy._immutable_fn", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_2_immutable_fn(__pyx_self, __pyx_v_obj); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_2_immutable_fn(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_obj) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_immutable_fn", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":45 + * + * def _immutable_fn(obj: object) -> NoReturn: + * raise TypeError(f"{obj.__class__.__name__} object is immutable") # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_obj, __pyx_n_s_class); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_FormatSimple(__pyx_t_2, __pyx_empty_unicode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_1, __pyx_kp_u_object_is_immutable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 45, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":44 + * + * + * def _immutable_fn(obj: object) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError(f"{obj.__class__.__name__} object is immutable") + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy._immutable_fn", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":51 + * __slots__ = () + * + * def _readonly(self) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError( + * f"{self.__class__.__name__} object is immutable and/or readonly" + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly = {"_readonly", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_self = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_readonly (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_self)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 51, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_readonly") < 0)) __PYX_ERR(0, 51, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_self = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("_readonly", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 51, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer._readonly", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer__readonly(__pyx_self, __pyx_v_self); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer__readonly(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_readonly", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":53 + * def _readonly(self) -> NoReturn: + * raise TypeError( + * f"{self.__class__.__name__} object is immutable and/or readonly" # <<<<<<<<<<<<<< + * ) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_class); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_FormatSimple(__pyx_t_2, __pyx_empty_unicode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_1, __pyx_kp_u_object_is_immutable_and_or_read); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":52 + * + * def _readonly(self) -> NoReturn: + * raise TypeError( # <<<<<<<<<<<<<< + * f"{self.__class__.__name__} object is immutable and/or readonly" + * ) + */ + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 52, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":51 + * __slots__ = () + * + * def _readonly(self) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError( + * f"{self.__class__.__name__} object is immutable and/or readonly" + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer._readonly", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":56 + * ) + * + * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__ = {"__delitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_self = 0; + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[2] = {0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,&__pyx_n_s_key,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_self)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 56, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 56, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__delitem__", 1, 2, 2, 1); __PYX_ERR(0, 56, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__delitem__") < 0)) __PYX_ERR(0, 56, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 2)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + } + __pyx_v_self = values[0]; + __pyx_v_key = values[1]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__delitem__", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 56, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_2__delitem__(__pyx_self, __pyx_v_self, __pyx_v_key); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_2__delitem__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__delitem__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":57 + * + * def __delitem__(self, key: Any) -> NoReturn: + * self._readonly() # <<<<<<<<<<<<<< + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_readonly); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":56 + * ) + * + * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":59 + * self._readonly() + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__ = {"__setitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_self = 0; + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + CYTHON_UNUSED PyObject *__pyx_v_value = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[3] = {0,0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,&__pyx_n_s_key,&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_self)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 59, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 59, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__setitem__", 1, 3, 3, 1); __PYX_ERR(0, 59, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 59, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__setitem__", 1, 3, 3, 2); __PYX_ERR(0, 59, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setitem__") < 0)) __PYX_ERR(0, 59, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 3)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + } + __pyx_v_self = values[0]; + __pyx_v_key = values[1]; + __pyx_v_value = values[2]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setitem__", 1, 3, 3, __pyx_nargs); __PYX_ERR(0, 59, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_4__setitem__(__pyx_self, __pyx_v_self, __pyx_v_key, __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_4__setitem__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setitem__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":60 + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: + * self._readonly() # <<<<<<<<<<<<<< + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_readonly); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 60, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 60, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":59 + * self._readonly() + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":62 + * self._readonly() + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__ = {"__setattr__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_self = 0; + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + CYTHON_UNUSED PyObject *__pyx_v_value = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[3] = {0,0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setattr__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,&__pyx_n_s_key,&__pyx_n_s_value,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_self)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 62, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 62, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__setattr__", 1, 3, 3, 1); __PYX_ERR(0, 62, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 62, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__setattr__", 1, 3, 3, 2); __PYX_ERR(0, 62, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setattr__") < 0)) __PYX_ERR(0, 62, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 3)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + } + __pyx_v_self = values[0]; + __pyx_v_key = values[1]; + __pyx_v_value = values[2]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setattr__", 1, 3, 3, __pyx_nargs); __PYX_ERR(0, 62, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__setattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_6__setattr__(__pyx_self, __pyx_v_self, __pyx_v_key, __pyx_v_value); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_6__setattr__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setattr__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":63 + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: + * self._readonly() # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_readonly); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":62 + * self._readonly() + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__setattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":74 + * # NOTE: this method is required in 3.9 and speeds up the use case + * # ImmutableDictBase[str,int](a_dict) significantly + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__( # type: ignore[override] + * cls, key: Any + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__(PyObject *__pyx_v_cls, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__ = {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__(PyObject *__pyx_v_cls, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__class_getitem__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 74, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__class_getitem__") < 0)) __PYX_ERR(0, 74, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_key = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__class_getitem__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 74, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__class_getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase___class_getitem__(((PyTypeObject*)__pyx_v_cls), __pyx_v_key); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__class_getitem__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":78 + * cls, key: Any + * ) -> type[Self]: + * return cls # <<<<<<<<<<<<<< + * + * def __delitem__(self, key: Any) -> NoReturn: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_cls); + __pyx_r = ((PyObject *)__pyx_v_cls); + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":74 + * # NOTE: this method is required in 3.9 and speeds up the use case + * # ImmutableDictBase[str,int](a_dict) significantly + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__( # type: ignore[override] + * cls, key: Any + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":80 + * return cls + * + * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_3__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_3__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_2__delitem__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), ((PyObject *)__pyx_v_key)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_2__delitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__delitem__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":81 + * + * def __delitem__(self, key: Any) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 81, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 81, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":80 + * return cls + * + * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":83 + * _immutable_fn(self) + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_5__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_5__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_4__setitem__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_4__setitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setitem__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":84 + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 84, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 84, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":83 + * _immutable_fn(self) + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":86 + * _immutable_fn(self) + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_7__setattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_7__setattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setattr__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_6__setattr__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_6__setattr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setattr__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":87 + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def clear(self) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 87, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 87, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":86 + * _immutable_fn(self) + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__setattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":89 + * _immutable_fn(self) + * + * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear = {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("clear (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("clear", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "clear", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_8clear(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_8clear(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("clear", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":90 + * + * def clear(self) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":89 + * _immutable_fn(self) + * + * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":92 + * _immutable_fn(self) + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop = {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + CYTHON_UNUSED PyObject *__pyx_v_default = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[2] = {0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("pop (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + values[1] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 92, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (kw_args > 0) { + PyObject* value = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_default); + if (value) { values[1] = __Pyx_Arg_NewRef_FASTCALL(value); kw_args--; } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 92, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "pop") < 0)) __PYX_ERR(0, 92, __pyx_L3_error) + } + } else { + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("pop", 0, 1, 2, __pyx_nargs); __PYX_ERR(0, 92, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_10pop(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_10pop(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("pop", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":93 + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def popitem(self) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":92 + * _immutable_fn(self) + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":95 + * _immutable_fn(self) + * + * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem = {"popitem", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("popitem (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("popitem", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "popitem", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_12popitem(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_12popitem(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("popitem", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":96 + * + * def popitem(self) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 96, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":95 + * _immutable_fn(self) + * + * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.popitem", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":98 + * _immutable_fn(self) + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault = {"setdefault", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + CYTHON_UNUSED PyObject *__pyx_v_default = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[2] = {0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("setdefault (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + values[1] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 98, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (kw_args > 0) { + PyObject* value = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_default); + if (value) { values[1] = __Pyx_Arg_NewRef_FASTCALL(value); kw_args--; } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 98, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "setdefault") < 0)) __PYX_ERR(0, 98, __pyx_L3_error) + } + } else { + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("setdefault", 0, 1, 2, __pyx_nargs); __PYX_ERR(0, 98, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_14setdefault(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_14setdefault(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("setdefault", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":99 + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":98 + * _immutable_fn(self) + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":101 + * _immutable_fn(self) + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update = {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + CYTHON_UNUSED PyObject *__pyx_v_arg = 0; + CYTHON_UNUSED PyObject *__pyx_v_kw = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("update (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "update", 1))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_arg = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_16update(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), __pyx_v_arg, __pyx_v_kw); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_arg); + __Pyx_XDECREF(__pyx_v_kw); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_16update(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED PyObject *__pyx_v_kw) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("update", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":102 + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 102, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 102, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":101 + * _immutable_fn(self) + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_18__reduce_cython__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_18__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self) { + PyObject *__pyx_v_state = 0; + PyObject *__pyx_v__dict = 0; + int __pyx_v_use_setstate; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 1); + + /* "(tree fragment)":5 + * cdef object _dict + * cdef bint use_setstate + * state = () # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __Pyx_INCREF(__pyx_empty_tuple); + __pyx_v_state = __pyx_empty_tuple; + + /* "(tree fragment)":6 + * cdef bint use_setstate + * state = () + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += (_dict,) + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * state = () + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":8 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += (_dict,) # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(1, 8, __pyx_L1_error); + __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "(tree fragment)":9 + * if _dict is not None: + * state += (_dict,) + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = False + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":7 + * state = () + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += (_dict,) + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":11 + * use_setstate = True + * else: + * use_setstate = False # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state + */ + /*else*/ { + __pyx_v_use_setstate = 0; + } + __pyx_L3:; + + /* "(tree fragment)":12 + * else: + * use_setstate = False + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state + * else: + */ + if (__pyx_v_use_setstate) { + + /* "(tree fragment)":13 + * use_setstate = False + * if use_setstate: + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_ImmutableDictBase); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_238750788); + __Pyx_GIVEREF(__pyx_int_238750788); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_238750788)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state)) __PYX_ERR(1, 13, __pyx_L1_error); + __pyx_t_3 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "(tree fragment)":12 + * else: + * use_setstate = False + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state + * else: + */ + } + + /* "(tree fragment)":15 + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state + * else: + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_ImmutableDictBase); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_int_238750788); + __Pyx_GIVEREF(__pyx_int_238750788); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_238750788)) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":16 + * else: + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 16, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(1, 16, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v___pyx_state = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(1, 16, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_20__setstate_cython__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_20__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 1); + + /* "(tree fragment)":17 + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 17, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_17_immutabledict_cy___pyx_unpickle_ImmutableDictBase__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":113 + * + * # ImmutableDictBase start + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__( # type: ignore[override] + * cls, key: Any + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__(PyObject *__pyx_v_cls, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__ = {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__(PyObject *__pyx_v_cls, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__class_getitem__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 113, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__class_getitem__") < 0)) __PYX_ERR(0, 113, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 1)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + } + __pyx_v_key = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__class_getitem__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 113, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__class_getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict___class_getitem__(((PyTypeObject*)__pyx_v_cls), __pyx_v_key); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__class_getitem__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":117 + * cls, key: Any + * ) -> type[Self]: + * return cls # <<<<<<<<<<<<<< + * + * def __delitem__(self, key: Any) -> NoReturn: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_cls); + __pyx_r = ((PyObject *)__pyx_v_cls); + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":113 + * + * # ImmutableDictBase start + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__( # type: ignore[override] + * cls, key: Any + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":119 + * return cls + * + * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_3__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_3__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_2__delitem__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v_key)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_2__delitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__delitem__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":120 + * + * def __delitem__(self, key: Any) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":119 + * return cls + * + * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":122 + * _immutable_fn(self) + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_5__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_5__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_4__setitem__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_4__setitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setitem__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":123 + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":122 + * _immutable_fn(self) + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":125 + * _immutable_fn(self) + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_7__setattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_7__setattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setattr__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_6__setattr__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_6__setattr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setattr__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":126 + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def clear(self) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":125 + * _immutable_fn(self) + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__setattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":128 + * _immutable_fn(self) + * + * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear = {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("clear (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("clear", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "clear", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_8clear(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_8clear(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("clear", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":129 + * + * def clear(self) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":128 + * _immutable_fn(self) + * + * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":131 + * _immutable_fn(self) + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop = {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + CYTHON_UNUSED PyObject *__pyx_v_default = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[2] = {0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("pop (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + values[1] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 131, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (kw_args > 0) { + PyObject* value = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_default); + if (value) { values[1] = __Pyx_Arg_NewRef_FASTCALL(value); kw_args--; } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 131, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "pop") < 0)) __PYX_ERR(0, 131, __pyx_L3_error) + } + } else { + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("pop", 0, 1, 2, __pyx_nargs); __PYX_ERR(0, 131, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_10pop(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_10pop(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("pop", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":132 + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def popitem(self) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":131 + * _immutable_fn(self) + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":134 + * _immutable_fn(self) + * + * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem = {"popitem", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("popitem (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("popitem", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "popitem", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_12popitem(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_12popitem(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("popitem", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":135 + * + * def popitem(self) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":134 + * _immutable_fn(self) + * + * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.popitem", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":137 + * _immutable_fn(self) + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault = {"setdefault", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + CYTHON_UNUSED PyObject *__pyx_v_key = 0; + CYTHON_UNUSED PyObject *__pyx_v_default = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[2] = {0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("setdefault (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + values[1] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 137, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (kw_args > 0) { + PyObject* value = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_default); + if (value) { values[1] = __Pyx_Arg_NewRef_FASTCALL(value); kw_args--; } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 137, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "setdefault") < 0)) __PYX_ERR(0, 137, __pyx_L3_error) + } + } else { + switch (__pyx_nargs) { + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("setdefault", 0, 1, 2, __pyx_nargs); __PYX_ERR(0, 137, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_14setdefault(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_14setdefault(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("setdefault", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":138 + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 138, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 138, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":137 + * _immutable_fn(self) + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":140 + * _immutable_fn(self) + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update = {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + CYTHON_UNUSED PyObject *__pyx_v_arg = 0; + CYTHON_UNUSED PyObject *__pyx_v_kw = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("update (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "update", 1))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_arg = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_16update(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_arg, __pyx_v_kw); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_arg); + __Pyx_XDECREF(__pyx_v_kw); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_16update(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED PyObject *__pyx_v_kw) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("update", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":141 + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * # ImmutableDictBase end + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":140 + * _immutable_fn(self) + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":145 + * # ImmutableDictBase end + * + * def __repr__(self) -> str: # <<<<<<<<<<<<<< + * return f"immutabledict({dict.__repr__(self)})" + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__(PyObject *__pyx_v_self) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_18__repr__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_18__repr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + Py_UCS4 __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + unsigned int __pyx_t_7; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__repr__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":146 + * + * def __repr__(self) -> str: + * return f"immutabledict({dict.__repr__(self)})" # <<<<<<<<<<<<<< + * + * @cython.annotation_typing(False) # avoid cython crash from generic return + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = 0; + __pyx_t_3 = 127; + __Pyx_INCREF(__pyx_kp_u_immutabledict); + __pyx_t_2 += 14; + __Pyx_GIVEREF(__pyx_kp_u_immutabledict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_kp_u_immutabledict); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_repr); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + __pyx_t_7 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + __pyx_t_7 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_6, ((PyObject *)__pyx_v_self)}; + __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_5, __pyx_callargs+1-__pyx_t_7, 1+__pyx_t_7); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __pyx_t_5 = __Pyx_PyObject_FormatSimple(__pyx_t_4, __pyx_empty_unicode); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_5) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_5) : __pyx_t_3; + __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_5); + __pyx_t_5 = 0; + __Pyx_INCREF(__pyx_kp_u_); + __pyx_t_2 += 1; + __Pyx_GIVEREF(__pyx_kp_u_); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_kp_u_); + __pyx_t_5 = __Pyx_PyUnicode_Join(__pyx_t_1, 3, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":145 + * # ImmutableDictBase end + * + * def __repr__(self) -> str: # <<<<<<<<<<<<<< + * return f"immutabledict({dict.__repr__(self)})" + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":148 + * return f"immutabledict({dict.__repr__(self)})" + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def union( + * self, other: Optional[Mapping[_KT, _VT]] = None, / + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union = {"union", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v_other = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[1] = {0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("union (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {0}; + + /* "sqlalchemy/util/_immutabledict_cy.py":150 + * @cython.annotation_typing(False) # avoid cython crash from generic return + * def union( + * self, other: Optional[Mapping[_KT, _VT]] = None, / # <<<<<<<<<<<<<< + * ) -> immutabledict[_KT, _VT]: + * if not other: + */ + values[0] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); + if (__pyx_kwds && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) > 0) { + if (likely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values, 0, "union") < 0)) __PYX_ERR(0, 148, __pyx_L3_error) + } else { + switch (__pyx_nargs) { + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_other = values[0]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("union", 0, 0, 1, __pyx_nargs); __PYX_ERR(0, 148, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.union", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_20union(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_other); + + /* "sqlalchemy/util/_immutabledict_cy.py":148 + * return f"immutabledict({dict.__repr__(self)})" + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def union( + * self, other: Optional[Mapping[_KT, _VT]] = None, / + */ + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_20union(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v_other) { + struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_result = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + unsigned int __pyx_t_7; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("union", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":152 + * self, other: Optional[Mapping[_KT, _VT]] = None, / + * ) -> immutabledict[_KT, _VT]: + * if not other: # <<<<<<<<<<<<<< + * return self + * # new + update is faster than immutabledict(self) + */ + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_other); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 152, __pyx_L1_error) + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_immutabledict_cy.py":153 + * ) -> immutabledict[_KT, _VT]: + * if not other: + * return self # <<<<<<<<<<<<<< + * # new + update is faster than immutabledict(self) + * result: immutabledict = immutabledict() # type: ignore[type-arg] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":152 + * self, other: Optional[Mapping[_KT, _VT]] = None, / + * ) -> immutabledict[_KT, _VT]: + * if not other: # <<<<<<<<<<<<<< + * return self + * # new + update is faster than immutabledict(self) + */ + } + + /* "sqlalchemy/util/_immutabledict_cy.py":155 + * return self + * # new + update is faster than immutabledict(self) + * result: immutabledict = immutabledict() # type: ignore[type-arg] # <<<<<<<<<<<<<< + * PyDict_Update(result, self) + * if isinstance(other, dict): + */ + __pyx_t_3 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":156 + * # new + update is faster than immutabledict(self) + * result: immutabledict = immutabledict() # type: ignore[type-arg] + * PyDict_Update(result, self) # <<<<<<<<<<<<<< + * if isinstance(other, dict): + * # c version of PyDict_Update supports only dicts + */ + __pyx_t_4 = PyDict_Update(((PyObject *)__pyx_v_result), ((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 156, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":157 + * result: immutabledict = immutabledict() # type: ignore[type-arg] + * PyDict_Update(result, self) + * if isinstance(other, dict): # <<<<<<<<<<<<<< + * # c version of PyDict_Update supports only dicts + * PyDict_Update(result, other) + */ + __pyx_t_2 = PyDict_Check(__pyx_v_other); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_immutabledict_cy.py":159 + * if isinstance(other, dict): + * # c version of PyDict_Update supports only dicts + * PyDict_Update(result, other) # <<<<<<<<<<<<<< + * else: + * dict.update(result, other) + */ + __pyx_t_4 = PyDict_Update(((PyObject *)__pyx_v_result), __pyx_v_other); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 159, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":157 + * result: immutabledict = immutabledict() # type: ignore[type-arg] + * PyDict_Update(result, self) + * if isinstance(other, dict): # <<<<<<<<<<<<<< + * # c version of PyDict_Update supports only dicts + * PyDict_Update(result, other) + */ + goto __pyx_L4; + } + + /* "sqlalchemy/util/_immutabledict_cy.py":161 + * PyDict_Update(result, other) + * else: + * dict.update(result, other) # <<<<<<<<<<<<<< + * return result + * + */ + /*else*/ { + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_update); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 161, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + __pyx_t_7 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + __pyx_t_7 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[3] = {__pyx_t_6, ((PyObject *)__pyx_v_result), __pyx_v_other}; + __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_5, __pyx_callargs+1-__pyx_t_7, 2+__pyx_t_7); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 161, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_L4:; + + /* "sqlalchemy/util/_immutabledict_cy.py":162 + * else: + * dict.update(result, other) + * return result # <<<<<<<<<<<<<< + * + * @cython.annotation_typing(False) # avoid cython crash from generic return + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_result); + __pyx_r = ((PyObject *)__pyx_v_result); + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":148 + * return f"immutabledict({dict.__repr__(self)})" + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def union( + * self, other: Optional[Mapping[_KT, _VT]] = None, / + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.union", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":164 + * return result + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def merge_with( + * self, *dicts: Optional[Mapping[_KT, _VT]] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with = {"merge_with", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_dicts = 0; + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("merge_with (wrapper)", 0); + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "merge_with", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_dicts = __pyx_args; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_22merge_with(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_dicts); + + /* function exit code */ + __Pyx_DECREF(__pyx_v_dicts); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_22merge_with(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v_dicts) { + struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_result = NULL; + PyObject *__pyx_v_d = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + Py_ssize_t __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + unsigned int __pyx_t_10; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("merge_with", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":168 + * self, *dicts: Optional[Mapping[_KT, _VT]] + * ) -> immutabledict[_KT, _VT]: + * result: Optional[immutabledict] = None # type: ignore[type-arg] # <<<<<<<<<<<<<< + * d: object + * if not dicts: + */ + __Pyx_INCREF(Py_None); + __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)Py_None); + + /* "sqlalchemy/util/_immutabledict_cy.py":170 + * result: Optional[immutabledict] = None # type: ignore[type-arg] + * d: object + * if not dicts: # <<<<<<<<<<<<<< + * return self + * for d in dicts: + */ + __pyx_t_1 = (PyTuple_GET_SIZE(__pyx_v_dicts) != 0); + __pyx_t_2 = (!__pyx_t_1); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_immutabledict_cy.py":171 + * d: object + * if not dicts: + * return self # <<<<<<<<<<<<<< + * for d in dicts: + * if d is not None and len(d) > 0: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":170 + * result: Optional[immutabledict] = None # type: ignore[type-arg] + * d: object + * if not dicts: # <<<<<<<<<<<<<< + * return self + * for d in dicts: + */ + } + + /* "sqlalchemy/util/_immutabledict_cy.py":172 + * if not dicts: + * return self + * for d in dicts: # <<<<<<<<<<<<<< + * if d is not None and len(d) > 0: + * if result is None: + */ + __pyx_t_3 = __pyx_v_dicts; __Pyx_INCREF(__pyx_t_3); + __pyx_t_4 = 0; + for (;;) { + { + Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); + #if !CYTHON_ASSUME_SAFE_MACROS + if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 172, __pyx_L1_error) + #endif + if (__pyx_t_4 >= __pyx_temp) break; + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_5); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 172, __pyx_L1_error) + #else + __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 172, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + __Pyx_XDECREF_SET(__pyx_v_d, __pyx_t_5); + __pyx_t_5 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":173 + * return self + * for d in dicts: + * if d is not None and len(d) > 0: # <<<<<<<<<<<<<< + * if result is None: + * # new + update is faster than immutabledict(self) + */ + __pyx_t_1 = (__pyx_v_d != Py_None); + if (__pyx_t_1) { + } else { + __pyx_t_2 = __pyx_t_1; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_6 = PyObject_Length(__pyx_v_d); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 173, __pyx_L1_error) + __pyx_t_1 = (__pyx_t_6 > 0); + __pyx_t_2 = __pyx_t_1; + __pyx_L7_bool_binop_done:; + if (__pyx_t_2) { + + /* "sqlalchemy/util/_immutabledict_cy.py":174 + * for d in dicts: + * if d is not None and len(d) > 0: + * if result is None: # <<<<<<<<<<<<<< + * # new + update is faster than immutabledict(self) + * result = immutabledict() + */ + __pyx_t_2 = (((PyObject *)__pyx_v_result) == Py_None); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_immutabledict_cy.py":176 + * if result is None: + * # new + update is faster than immutabledict(self) + * result = immutabledict() # <<<<<<<<<<<<<< + * PyDict_Update(result, self) + * if isinstance(d, dict): + */ + __pyx_t_5 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 176, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF_SET(__pyx_v_result, ((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_t_5)); + __pyx_t_5 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":177 + * # new + update is faster than immutabledict(self) + * result = immutabledict() + * PyDict_Update(result, self) # <<<<<<<<<<<<<< + * if isinstance(d, dict): + * # c version of PyDict_Update supports only dicts + */ + __pyx_t_7 = PyDict_Update(((PyObject *)__pyx_v_result), ((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 177, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":174 + * for d in dicts: + * if d is not None and len(d) > 0: + * if result is None: # <<<<<<<<<<<<<< + * # new + update is faster than immutabledict(self) + * result = immutabledict() + */ + } + + /* "sqlalchemy/util/_immutabledict_cy.py":178 + * result = immutabledict() + * PyDict_Update(result, self) + * if isinstance(d, dict): # <<<<<<<<<<<<<< + * # c version of PyDict_Update supports only dicts + * PyDict_Update(result, d) + */ + __pyx_t_2 = PyDict_Check(__pyx_v_d); + if (__pyx_t_2) { + + /* "sqlalchemy/util/_immutabledict_cy.py":180 + * if isinstance(d, dict): + * # c version of PyDict_Update supports only dicts + * PyDict_Update(result, d) # <<<<<<<<<<<<<< + * else: + * dict.update(result, d) + */ + __pyx_t_7 = PyDict_Update(((PyObject *)__pyx_v_result), __pyx_v_d); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 180, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":178 + * result = immutabledict() + * PyDict_Update(result, self) + * if isinstance(d, dict): # <<<<<<<<<<<<<< + * # c version of PyDict_Update supports only dicts + * PyDict_Update(result, d) + */ + goto __pyx_L10; + } + + /* "sqlalchemy/util/_immutabledict_cy.py":182 + * PyDict_Update(result, d) + * else: + * dict.update(result, d) # <<<<<<<<<<<<<< + * + * return self if result is None else result + */ + /*else*/ { + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 182, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = NULL; + __pyx_t_10 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_10 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[3] = {__pyx_t_9, ((PyObject *)__pyx_v_result), __pyx_v_d}; + __pyx_t_5 = __Pyx_PyObject_FastCall(__pyx_t_8, __pyx_callargs+1-__pyx_t_10, 2+__pyx_t_10); + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 182, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __pyx_L10:; + + /* "sqlalchemy/util/_immutabledict_cy.py":173 + * return self + * for d in dicts: + * if d is not None and len(d) > 0: # <<<<<<<<<<<<<< + * if result is None: + * # new + update is faster than immutabledict(self) + */ + } + + /* "sqlalchemy/util/_immutabledict_cy.py":172 + * if not dicts: + * return self + * for d in dicts: # <<<<<<<<<<<<<< + * if d is not None and len(d) > 0: + * if result is None: + */ + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":184 + * dict.update(result, d) + * + * return self if result is None else result # <<<<<<<<<<<<<< + * + * def copy(self) -> Self: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = (((PyObject *)__pyx_v_result) == Py_None); + if (__pyx_t_2) { + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_t_3 = ((PyObject *)__pyx_v_self); + } else { + __Pyx_INCREF((PyObject *)__pyx_v_result); + __pyx_t_3 = ((PyObject *)__pyx_v_result); + } + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":164 + * return result + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def merge_with( + * self, *dicts: Optional[Mapping[_KT, _VT]] + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.merge_with", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_result); + __Pyx_XDECREF(__pyx_v_d); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":186 + * return self if result is None else result + * + * def copy(self) -> Self: # <<<<<<<<<<<<<< + * return self + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy = {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("copy (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("copy", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "copy", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_24copy(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_24copy(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("copy", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":187 + * + * def copy(self) -> Self: + * return self # <<<<<<<<<<<<<< + * + * def __reduce__(self) -> Any: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF((PyObject *)__pyx_v_self); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":186 + * return self if result is None else result + * + * def copy(self) -> Self: # <<<<<<<<<<<<<< + * return self + * + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":189 + * return self + * + * def __reduce__(self) -> Any: # <<<<<<<<<<<<<< + * return immutabledict, (dict(self),) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__ = {"__reduce__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__(PyObject *__pyx_v_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce__ (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + if (unlikely(__pyx_nargs > 0)) { + __Pyx_RaiseArgtupleInvalid("__reduce__", 1, 0, 0, __pyx_nargs); return NULL;} + if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce__", 0))) return NULL; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_26__reduce__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_26__reduce__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":190 + * + * def __reduce__(self) -> Any: + * return immutabledict, (dict(self),) # <<<<<<<<<<<<<< + * + * # PEP 584 + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyDict_Type)), ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 190, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 190, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_1); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1)) __PYX_ERR(0, 190, __pyx_L1_error); + __pyx_t_1 = 0; + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 190, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + __Pyx_GIVEREF((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict))) __PYX_ERR(0, 190, __pyx_L1_error); + __Pyx_GIVEREF(__pyx_t_2); + if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2)) __PYX_ERR(0, 190, __pyx_L1_error); + __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":189 + * return self + * + * def __reduce__(self) -> Any: # <<<<<<<<<<<<<< + * return immutabledict, (dict(self),) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__reduce__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":193 + * + * # PEP 584 + * def __ior__(self, __value: Any, /) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_29__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_29__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__ior__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_28__ior__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v__immutabledict__value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_28__ior__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v__immutabledict__value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__ior__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":194 + * # PEP 584 + * def __ior__(self, __value: Any, /) -> NoReturn: + * _immutable_fn(self) # <<<<<<<<<<<<<< + * + * def __or__( # type: ignore[override] + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 194, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 194, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":193 + * + * # PEP 584 + * def __ior__(self, __value: Any, /) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__ior__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":196 + * _immutable_fn(self) + * + * def __or__( # type: ignore[override] # <<<<<<<<<<<<<< + * self, __value: Mapping[_KT, _VT], / + * ) -> immutabledict[_KT, _VT]: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_31__or__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_31__or__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__or__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_30__or__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v__immutabledict__value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_30__or__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__or__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":199 + * self, __value: Mapping[_KT, _VT], / + * ) -> immutabledict[_KT, _VT]: + * return immutabledict( # <<<<<<<<<<<<<< + * dict.__or__(self, __value), # type: ignore[call-overload] + * ) + */ + __Pyx_XDECREF(__pyx_r); + + /* "sqlalchemy/util/_immutabledict_cy.py":200 + * ) -> immutabledict[_KT, _VT]: + * return immutabledict( + * dict.__or__(self, __value), # type: ignore[call-overload] # <<<<<<<<<<<<<< + * ) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_or); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 200, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[3] = {__pyx_t_3, ((PyObject *)__pyx_v_self), __pyx_v__immutabledict__value}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 2+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 200, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + + /* "sqlalchemy/util/_immutabledict_cy.py":199 + * self, __value: Mapping[_KT, _VT], / + * ) -> immutabledict[_KT, _VT]: + * return immutabledict( # <<<<<<<<<<<<<< + * dict.__or__(self, __value), # type: ignore[call-overload] + * ) + */ + __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 199, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":196 + * _immutable_fn(self) + * + * def __or__( # type: ignore[override] # <<<<<<<<<<<<<< + * self, __value: Mapping[_KT, _VT], / + * ) -> immutabledict[_KT, _VT]: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__or__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "sqlalchemy/util/_immutabledict_cy.py":203 + * ) + * + * def __ror__( # type: ignore[override] # <<<<<<<<<<<<<< + * self, __value: Mapping[_KT, _VT], / + * ) -> immutabledict[_KT, _VT]: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_33__ror__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /*proto*/ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_33__ror__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__ror__ (wrapper)", 0); + __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_32__ror__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v__immutabledict__value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_32__ror__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + unsigned int __pyx_t_4; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__ror__", 1); + + /* "sqlalchemy/util/_immutabledict_cy.py":206 + * self, __value: Mapping[_KT, _VT], / + * ) -> immutabledict[_KT, _VT]: + * return immutabledict( # <<<<<<<<<<<<<< + * dict.__ror__(self, __value), # type: ignore[call-overload] + * ) + */ + __Pyx_XDECREF(__pyx_r); + + /* "sqlalchemy/util/_immutabledict_cy.py":207 + * ) -> immutabledict[_KT, _VT]: + * return immutabledict( + * dict.__ror__(self, __value), # type: ignore[call-overload] # <<<<<<<<<<<<<< + * ) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_ror); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 207, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[3] = {__pyx_t_3, ((PyObject *)__pyx_v_self), __pyx_v__immutabledict__value}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 2+__pyx_t_4); + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 207, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + + /* "sqlalchemy/util/_immutabledict_cy.py":206 + * self, __value: Mapping[_KT, _VT], / + * ) -> immutabledict[_KT, _VT]: + * return immutabledict( # <<<<<<<<<<<<<< + * dict.__ror__(self, __value), # type: ignore[call-overload] + * ) + */ + __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 206, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "sqlalchemy/util/_immutabledict_cy.py":203 + * ) + * + * def __ror__( # type: ignore[override] # <<<<<<<<<<<<<< + * self, __value: Mapping[_KT, _VT], / + * ) -> immutabledict[_KT, _VT]: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__ror__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_ImmutableDictBase(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +); /*proto*/ +static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase = {"__pyx_unpickle_ImmutableDictBase", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase(PyObject *__pyx_self, +#if CYTHON_METH_FASTCALL +PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds +#else +PyObject *__pyx_args, PyObject *__pyx_kwds +#endif +) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + #if !CYTHON_METH_FASTCALL + CYTHON_UNUSED Py_ssize_t __pyx_nargs; + #endif + CYTHON_UNUSED PyObject *const *__pyx_kwvalues; + PyObject* values[3] = {0,0,0}; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_ImmutableDictBase (wrapper)", 0); + #if !CYTHON_METH_FASTCALL + #if CYTHON_ASSUME_SAFE_MACROS + __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); + #else + __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; + #endif + #endif + __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); + { + PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + if (__pyx_kwds) { + Py_ssize_t kw_args; + switch (__pyx_nargs) { + case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); + switch (__pyx_nargs) { + case 0: + if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ImmutableDictBase", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { + (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); + kw_args--; + } + else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ImmutableDictBase", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + const Py_ssize_t kwd_pos_args = __pyx_nargs; + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_ImmutableDictBase") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (unlikely(__pyx_nargs != 3)) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); + values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); + values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L6_skip; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ImmutableDictBase", 1, 3, 3, __pyx_nargs); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L6_skip:; + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.__pyx_unpickle_ImmutableDictBase", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_4__pyx_unpickle_ImmutableDictBase(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + { + Py_ssize_t __pyx_temp; + for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { + __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); + } + } + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_4__pyx_unpickle_ImmutableDictBase(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v___pyx_PickleError = 0; + PyObject *__pyx_v___pyx_result = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + unsigned int __pyx_t_5; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_ImmutableDictBase", 1); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + */ + __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__2, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "(tree fragment)":5 + * cdef object __pyx_result + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): + * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + * __pyx_result = ImmutableDictBase.__new__(__pyx_type) + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(1, 5, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_1); + __pyx_v___pyx_PickleError = __pyx_t_1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum # <<<<<<<<<<<<<< + * __pyx_result = ImmutableDictBase.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 6, __pyx_L1_error) + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + */ + } + + /* "(tree fragment)":7 + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + * __pyx_result = ImmutableDictBase.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; + __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_v___pyx_result = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + * __pyx_result = ImmutableDictBase.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + __pyx_t_2 = (__pyx_v___pyx_state != Py_None); + if (__pyx_t_2) { + + /* "(tree fragment)":9 + * __pyx_result = ImmutableDictBase.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< + * return __pyx_result + * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_t_1 = __pyx_f_10sqlalchemy_4util_17_immutabledict_cy___pyx_unpickle_ImmutableDictBase__set_state(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":8 + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + * __pyx_result = ImmutableDictBase.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) + * return __pyx_result + */ + } + + /* "(tree fragment)":10 + * if __pyx_state is not None: + * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) + * return __pyx_result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v___pyx_result); + __pyx_r = __pyx_v___pyx_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_ImmutableDictBase(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.__pyx_unpickle_ImmutableDictBase", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v___pyx_PickleError); + __Pyx_XDECREF(__pyx_v___pyx_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":11 + * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[0]) + */ + +static PyObject *__pyx_f_10sqlalchemy_4util_17_immutabledict_cy___pyx_unpickle_ImmutableDictBase__set_state(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + Py_ssize_t __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + unsigned int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__pyx_unpickle_ImmutableDictBase__set_state", 1); + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[0]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(1, 12, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 > 0); + if (__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_3 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(1, 12, __pyx_L1_error) + __pyx_t_1 = __pyx_t_3; + __pyx_L4_bool_binop_done:; + if (__pyx_t_1) { + + /* "(tree fragment)":13 + * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[0]) # <<<<<<<<<<<<<< + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 13, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + #if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #endif + { + PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "(tree fragment)":12 + * return __pyx_result + * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< + * __pyx_result.__dict__.update(__pyx_state[0]) + */ + } + + /* "(tree fragment)":11 + * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) + * return __pyx_result + * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< + * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): + * __pyx_result.__dict__.update(__pyx_state[0]) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.__pyx_unpickle_ImmutableDictBase__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase(PyObject *o, visitproc v, void *a) { + int e; + if (!(&PyDict_Type)->tp_traverse); else { e = (&PyDict_Type)->tp_traverse(o,v,a); if (e) return e; } + return 0; +} + +static int __pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase(PyObject *o) { + if (!(&PyDict_Type)->tp_clear); else (&PyDict_Type)->tp_clear(o); + return 0; +} + +static int __pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase(PyObject *o, PyObject *i, PyObject *v) { + if (v) { + return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_5__setitem__(o, i, v); + } + else { + return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_3__delitem__(o, i); + } +} + +static int __pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase(PyObject *o, PyObject *n, PyObject *v) { + if (v) { + return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_7__setattr__(o, n, v); + } + else { + if ((&PyDict_Type)->tp_setattro) + return (&PyDict_Type)->tp_setattro(o, n, v); + return PyObject_GenericSetAttr(o, n, 0); + } +} + +static PyMethodDef __pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase[] = { + {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"popitem", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"setdefault", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update, METH_VARARGS|METH_KEYWORDS, 0}, + {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {0, 0, 0, 0} +}; +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_slots[] = { + {Py_mp_ass_subscript, (void *)__pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, + {Py_tp_setattro, (void *)__pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, + {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, + {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, + {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, + {0, 0}, +}; +static PyType_Spec __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_spec = { + "sqlalchemy.util._immutabledict_cy.ImmutableDictBase", + sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase), + 0, + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, + __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_slots, +}; +#else + +static PyMappingMethods __pyx_tp_as_mapping_ImmutableDictBase = { + 0, /*mp_length*/ + 0, /*mp_subscript*/ + __pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase = { + PyVarObject_HEAD_INIT(0, 0) + "sqlalchemy.util._immutabledict_cy.""ImmutableDictBase", /*tp_name*/ + sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + 0, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + &__pyx_tp_as_mapping_ImmutableDictBase, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + __pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*tp_traverse*/ + __pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + #if !CYTHON_USE_TYPE_SPECS + 0, /*tp_dictoffset*/ + #endif + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + #if CYTHON_USE_TP_FINALIZE + 0, /*tp_finalize*/ + #else + NULL, /*tp_finalize*/ + #endif + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if __PYX_NEED_TP_PRINT_SLOT == 1 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030C0000 + 0, /*tp_watched*/ + #endif + #if PY_VERSION_HEX >= 0x030d00A4 + 0, /*tp_versions_used*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, /*tp_pypy_flags*/ + #endif +}; +#endif + +static int __pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *o, visitproc v, void *a) { + int e; + if (!(&PyDict_Type)->tp_traverse); else { e = (&PyDict_Type)->tp_traverse(o,v,a); if (e) return e; } + return 0; +} + +static int __pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *o) { + if (!(&PyDict_Type)->tp_clear); else (&PyDict_Type)->tp_clear(o); + return 0; +} + +static int __pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *o, PyObject *i, PyObject *v) { + if (v) { + return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_5__setitem__(o, i, v); + } + else { + return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_3__delitem__(o, i); + } +} + +static int __pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *o, PyObject *n, PyObject *v) { + if (v) { + return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_7__setattr__(o, n, v); + } + else { + if ((&PyDict_Type)->tp_setattro) + return (&PyDict_Type)->tp_setattro(o, n, v); + return PyObject_GenericSetAttr(o, n, 0); + } +} + +static CYTHON_INLINE PyObject *__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { + binaryfunc slot; +#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + slot = type->tp_as_number ? type->tp_as_number->nb_or : NULL; +#else + slot = (binaryfunc) PyType_GetSlot(type, Py_nb_or); +#endif + return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); +} +static PyObject *__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *left, PyObject *right ) { + int maybe_self_is_left, maybe_self_is_right = 0; + maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) +#endif + || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + if (maybe_self_is_left) { + PyObject *res; + res = __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_31__or__(left, right); + if (res != Py_NotImplemented) return res; + Py_DECREF(res); + } + maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) +#if CYTHON_USE_TYPE_SLOTS + || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) +#endif + || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + if (maybe_self_is_right) { + return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_33__ror__(right, left); + } + return __Pyx_NewRef(Py_NotImplemented); +} + + + +static PyObject *__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__(PyObject *self, CYTHON_UNUSED PyObject *arg) { + return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__(self); +} + +static PyMethodDef __pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_immutabledict[] = { + {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"popitem", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"setdefault", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update, METH_VARARGS|METH_KEYWORDS, 0}, + {"__repr__", (PyCFunction)__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__, METH_NOARGS|METH_COEXIST, 0}, + {"union", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"merge_with", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with, METH_VARARGS|METH_KEYWORDS, 0}, + {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__reduce__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, + {"__ror__", (PyCFunction)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_33__ror__, METH_O|METH_COEXIST, 0}, + {0, 0, 0, 0} +}; +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_slots[] = { + {Py_tp_repr, (void *)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__}, + {Py_nb_or, (void *)__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, + {Py_nb_inplace_or, (void *)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_29__ior__}, + {Py_mp_ass_subscript, (void *)__pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, + {Py_tp_setattro, (void *)__pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, + {Py_tp_doc, (void *)PyDoc_STR("An immutable version of a dict.")}, + {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, + {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, + {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, + {0, 0}, +}; +static PyType_Spec __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_spec = { + "sqlalchemy.util._immutabledict_cy.immutabledict", + sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict), + 0, + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, + __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_slots, +}; +#else + +static PyNumberMethods __pyx_tp_as_number_immutabledict = { + 0, /*nb_add*/ + 0, /*nb_subtract*/ + 0, /*nb_multiply*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_divide*/ + #endif + 0, /*nb_remainder*/ + 0, /*nb_divmod*/ + 0, /*nb_power*/ + 0, /*nb_negative*/ + 0, /*nb_positive*/ + 0, /*nb_absolute*/ + 0, /*nb_bool*/ + 0, /*nb_invert*/ + 0, /*nb_lshift*/ + 0, /*nb_rshift*/ + 0, /*nb_and*/ + 0, /*nb_xor*/ + __pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*nb_or*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_coerce*/ + #endif + 0, /*nb_int*/ + #if PY_MAJOR_VERSION < 3 + 0, /*nb_long*/ + #else + 0, /*reserved*/ + #endif + 0, /*nb_float*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_oct*/ + #endif + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_hex*/ + #endif + 0, /*nb_inplace_add*/ + 0, /*nb_inplace_subtract*/ + 0, /*nb_inplace_multiply*/ + #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) + 0, /*nb_inplace_divide*/ + #endif + 0, /*nb_inplace_remainder*/ + 0, /*nb_inplace_power*/ + 0, /*nb_inplace_lshift*/ + 0, /*nb_inplace_rshift*/ + 0, /*nb_inplace_and*/ + 0, /*nb_inplace_xor*/ + __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_29__ior__, /*nb_inplace_or*/ + 0, /*nb_floor_divide*/ + 0, /*nb_true_divide*/ + 0, /*nb_inplace_floor_divide*/ + 0, /*nb_inplace_true_divide*/ + 0, /*nb_index*/ + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_matrix_multiply*/ + #endif + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_inplace_matrix_multiply*/ + #endif +}; + +static PyMappingMethods __pyx_tp_as_mapping_immutabledict = { + 0, /*mp_length*/ + 0, /*mp_subscript*/ + __pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict = { + PyVarObject_HEAD_INIT(0, 0) + "sqlalchemy.util._immutabledict_cy.""immutabledict", /*tp_name*/ + sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + 0, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__, /*tp_repr*/ + &__pyx_tp_as_number_immutabledict, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + &__pyx_tp_as_mapping_immutabledict, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + __pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ + PyDoc_STR("An immutable version of a dict."), /*tp_doc*/ + __pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*tp_traverse*/ + __pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + #if !CYTHON_USE_TYPE_SPECS + 0, /*tp_dictoffset*/ + #endif + 0, /*tp_init*/ + 0, /*tp_alloc*/ + 0, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + #if CYTHON_USE_TP_FINALIZE + 0, /*tp_finalize*/ + #else + NULL, /*tp_finalize*/ + #endif + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if __PYX_NEED_TP_PRINT_SLOT == 1 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030C0000 + 0, /*tp_watched*/ + #endif + #if PY_VERSION_HEX >= 0x030d00A4 + 0, /*tp_versions_used*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, /*tp_pypy_flags*/ + #endif +}; +#endif + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif +/* #### Code section: pystring_table ### */ + +static int __Pyx_CreateStringTabAndInitStrings(void) { + __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, + {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, + {&__pyx_n_s_Dict, __pyx_k_Dict, sizeof(__pyx_k_Dict), 0, 0, 1, 1}, + {&__pyx_n_s_Hashable, __pyx_k_Hashable, sizeof(__pyx_k_Hashable), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase, __pyx_k_ImmutableDictBase, sizeof(__pyx_k_ImmutableDictBase), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase___class_getite, __pyx_k_ImmutableDictBase___class_getite, sizeof(__pyx_k_ImmutableDictBase___class_getite), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase___reduce_cytho, __pyx_k_ImmutableDictBase___reduce_cytho, sizeof(__pyx_k_ImmutableDictBase___reduce_cytho), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase___setstate_cyt, __pyx_k_ImmutableDictBase___setstate_cyt, sizeof(__pyx_k_ImmutableDictBase___setstate_cyt), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase_clear, __pyx_k_ImmutableDictBase_clear, sizeof(__pyx_k_ImmutableDictBase_clear), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase_pop, __pyx_k_ImmutableDictBase_pop, sizeof(__pyx_k_ImmutableDictBase_pop), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase_popitem, __pyx_k_ImmutableDictBase_popitem, sizeof(__pyx_k_ImmutableDictBase_popitem), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase_setdefault, __pyx_k_ImmutableDictBase_setdefault, sizeof(__pyx_k_ImmutableDictBase_setdefault), 0, 0, 1, 1}, + {&__pyx_n_s_ImmutableDictBase_update, __pyx_k_ImmutableDictBase_update, sizeof(__pyx_k_ImmutableDictBase_update), 0, 0, 1, 1}, + {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, + {&__pyx_n_s_KT, __pyx_k_KT, sizeof(__pyx_k_KT), 0, 0, 1, 1}, + {&__pyx_n_u_KT, __pyx_k_KT, sizeof(__pyx_k_KT), 0, 1, 0, 1}, + {&__pyx_n_s_Mapping, __pyx_k_Mapping, sizeof(__pyx_k_Mapping), 0, 0, 1, 1}, + {&__pyx_n_s_NoReturn, __pyx_k_NoReturn, sizeof(__pyx_k_NoReturn), 0, 0, 1, 1}, + {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, + {&__pyx_kp_s_Optional_Any, __pyx_k_Optional_Any, sizeof(__pyx_k_Optional_Any), 0, 0, 1, 0}, + {&__pyx_kp_s_Optional_Mapping__KT__VT, __pyx_k_Optional_Mapping__KT__VT, sizeof(__pyx_k_Optional_Mapping__KT__VT), 0, 0, 1, 0}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_ReadOnlyContainer, __pyx_k_ReadOnlyContainer, sizeof(__pyx_k_ReadOnlyContainer), 0, 0, 1, 1}, + {&__pyx_n_s_ReadOnlyContainer___delitem, __pyx_k_ReadOnlyContainer___delitem, sizeof(__pyx_k_ReadOnlyContainer___delitem), 0, 0, 1, 1}, + {&__pyx_n_s_ReadOnlyContainer___setattr, __pyx_k_ReadOnlyContainer___setattr, sizeof(__pyx_k_ReadOnlyContainer___setattr), 0, 0, 1, 1}, + {&__pyx_n_s_ReadOnlyContainer___setitem, __pyx_k_ReadOnlyContainer___setitem, sizeof(__pyx_k_ReadOnlyContainer___setitem), 0, 0, 1, 1}, + {&__pyx_n_s_ReadOnlyContainer__readonly, __pyx_k_ReadOnlyContainer__readonly, sizeof(__pyx_k_ReadOnlyContainer__readonly), 0, 0, 1, 1}, + {&__pyx_n_s_Self, __pyx_k_Self, sizeof(__pyx_k_Self), 0, 0, 1, 1}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_TypeVar, __pyx_k_TypeVar, sizeof(__pyx_k_TypeVar), 0, 0, 1, 1}, + {&__pyx_n_s_VT, __pyx_k_VT, sizeof(__pyx_k_VT), 0, 0, 1, 1}, + {&__pyx_n_u_VT, __pyx_k_VT, sizeof(__pyx_k_VT), 0, 1, 0, 1}, + {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, + {&__pyx_n_s__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 1, 1}, + {&__pyx_n_s_arg, __pyx_k_arg, sizeof(__pyx_k_arg), 0, 0, 1, 1}, + {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, + {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, + {&__pyx_n_s_bound, __pyx_k_bound, sizeof(__pyx_k_bound), 0, 0, 1, 1}, + {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1}, + {&__pyx_n_s_class_getitem, __pyx_k_class_getitem, sizeof(__pyx_k_class_getitem), 0, 0, 1, 1}, + {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_cls, __pyx_k_cls, sizeof(__pyx_k_cls), 0, 0, 1, 1}, + {&__pyx_n_s_copy, __pyx_k_copy, sizeof(__pyx_k_copy), 0, 0, 1, 1}, + {&__pyx_n_s_d, __pyx_k_d, sizeof(__pyx_k_d), 0, 0, 1, 1}, + {&__pyx_n_s_default, __pyx_k_default, sizeof(__pyx_k_default), 0, 0, 1, 1}, + {&__pyx_n_s_delitem, __pyx_k_delitem, sizeof(__pyx_k_delitem), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, + {&__pyx_n_s_dicts, __pyx_k_dicts, sizeof(__pyx_k_dicts), 0, 0, 1, 1}, + {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, + {&__pyx_n_s_doc, __pyx_k_doc, sizeof(__pyx_k_doc), 0, 0, 1, 1}, + {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, + {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_immutable_fn, __pyx_k_immutable_fn, sizeof(__pyx_k_immutable_fn), 0, 0, 1, 1}, + {&__pyx_kp_u_immutabledict, __pyx_k_immutabledict, sizeof(__pyx_k_immutabledict), 0, 1, 0, 0}, + {&__pyx_n_s_immutabledict_2, __pyx_k_immutabledict_2, sizeof(__pyx_k_immutabledict_2), 0, 0, 1, 1}, + {&__pyx_kp_s_immutabledict__KT__VT, __pyx_k_immutabledict__KT__VT, sizeof(__pyx_k_immutabledict__KT__VT), 0, 0, 1, 0}, + {&__pyx_n_s_immutabledict___class_getitem, __pyx_k_immutabledict___class_getitem, sizeof(__pyx_k_immutabledict___class_getitem), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict___reduce, __pyx_k_immutabledict___reduce, sizeof(__pyx_k_immutabledict___reduce), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict_clear, __pyx_k_immutabledict_clear, sizeof(__pyx_k_immutabledict_clear), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict_copy, __pyx_k_immutabledict_copy, sizeof(__pyx_k_immutabledict_copy), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict_merge_with, __pyx_k_immutabledict_merge_with, sizeof(__pyx_k_immutabledict_merge_with), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict_pop, __pyx_k_immutabledict_pop, sizeof(__pyx_k_immutabledict_pop), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict_popitem, __pyx_k_immutabledict_popitem, sizeof(__pyx_k_immutabledict_popitem), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict_setdefault, __pyx_k_immutabledict_setdefault, sizeof(__pyx_k_immutabledict_setdefault), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict_union, __pyx_k_immutabledict_union, sizeof(__pyx_k_immutabledict_union), 0, 0, 1, 1}, + {&__pyx_n_s_immutabledict_update, __pyx_k_immutabledict_update, sizeof(__pyx_k_immutabledict_update), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_init_subclass, __pyx_k_init_subclass, sizeof(__pyx_k_init_subclass), 0, 0, 1, 1}, + {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, + {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, + {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, + {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, + {&__pyx_n_s_kw, __pyx_k_kw, sizeof(__pyx_k_kw), 0, 0, 1, 1}, + {&__pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_k_lib_sqlalchemy_util__immutabledi, sizeof(__pyx_k_lib_sqlalchemy_util__immutabledi), 0, 0, 1, 0}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_merge_with, __pyx_k_merge_with, sizeof(__pyx_k_merge_with), 0, 0, 1, 1}, + {&__pyx_n_s_metaclass, __pyx_k_metaclass, sizeof(__pyx_k_metaclass), 0, 0, 1, 1}, + {&__pyx_n_s_module, __pyx_k_module, sizeof(__pyx_k_module), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1}, + {&__pyx_n_s_object, __pyx_k_object, sizeof(__pyx_k_object), 0, 0, 1, 1}, + {&__pyx_kp_u_object_is_immutable, __pyx_k_object_is_immutable, sizeof(__pyx_k_object_is_immutable), 0, 1, 0, 0}, + {&__pyx_kp_u_object_is_immutable_and_or_read, __pyx_k_object_is_immutable_and_or_read, sizeof(__pyx_k_object_is_immutable_and_or_read), 0, 1, 0, 0}, + {&__pyx_n_s_or, __pyx_k_or, sizeof(__pyx_k_or), 0, 0, 1, 1}, + {&__pyx_n_s_other, __pyx_k_other, sizeof(__pyx_k_other), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, + {&__pyx_n_s_popitem, __pyx_k_popitem, sizeof(__pyx_k_popitem), 0, 0, 1, 1}, + {&__pyx_n_s_prepare, __pyx_k_prepare, sizeof(__pyx_k_prepare), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_ImmutableDictBase, __pyx_k_pyx_unpickle_ImmutableDictBase, sizeof(__pyx_k_pyx_unpickle_ImmutableDictBase), 0, 0, 1, 1}, + {&__pyx_n_s_qualname, __pyx_k_qualname, sizeof(__pyx_k_qualname), 0, 0, 1, 1}, + {&__pyx_n_s_readonly, __pyx_k_readonly, sizeof(__pyx_k_readonly), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_repr, __pyx_k_repr, sizeof(__pyx_k_repr), 0, 0, 1, 1}, + {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, + {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, + {&__pyx_n_s_ror, __pyx_k_ror, sizeof(__pyx_k_ror), 0, 0, 1, 1}, + {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, + {&__pyx_n_s_set_name, __pyx_k_set_name, sizeof(__pyx_k_set_name), 0, 0, 1, 1}, + {&__pyx_n_s_setattr, __pyx_k_setattr, sizeof(__pyx_k_setattr), 0, 0, 1, 1}, + {&__pyx_n_s_setdefault, __pyx_k_setdefault, sizeof(__pyx_k_setdefault), 0, 0, 1, 1}, + {&__pyx_n_s_setitem, __pyx_k_setitem, sizeof(__pyx_k_setitem), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_slots, __pyx_k_slots, sizeof(__pyx_k_slots), 0, 0, 1, 1}, + {&__pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_k_sqlalchemy_util__immutabledict_c, sizeof(__pyx_k_sqlalchemy_util__immutabledict_c), 0, 0, 1, 1}, + {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_super, __pyx_k_super, sizeof(__pyx_k_super), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_kp_s_type_Self, __pyx_k_type_Self, sizeof(__pyx_k_type_Self), 0, 0, 1, 0}, + {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, + {&__pyx_n_s_union, __pyx_k_union, sizeof(__pyx_k_union), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_n_s_use_setstate, __pyx_k_use_setstate, sizeof(__pyx_k_use_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} + }; + return __Pyx_InitStrings(__pyx_string_tab); +} +/* #### Code section: cached_builtins ### */ +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 45, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: cached_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "(tree fragment)":4 + * cdef object __pyx_PickleError + * cdef object __pyx_result + * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< + * from pickle import PickleError as __pyx_PickleError + * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum + */ + __pyx_tuple__2 = PyTuple_Pack(3, __pyx_int_238750788, __pyx_int_228825662, __pyx_int_222419149); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "sqlalchemy/util/_immutabledict_cy.py":31 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_codeobj__5 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_is_compiled, 31, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__5)) __PYX_ERR(0, 31, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":44 + * + * + * def _immutable_fn(obj: object) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError(f"{obj.__class__.__name__} object is immutable") + * + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + __pyx_codeobj__7 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__6, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_immutable_fn, 44, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__7)) __PYX_ERR(0, 44, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":51 + * __slots__ = () + * + * def _readonly(self) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError( + * f"{self.__class__.__name__} object is immutable and/or readonly" + */ + __pyx_tuple__8 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_readonly, 51, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 51, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":56 + * ) + * + * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + __pyx_tuple__10 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_key); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); + __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__10, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_delitem, 56, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 56, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":59 + * self._readonly() + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + __pyx_tuple__12 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_key, __pyx_n_s_value); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_setitem, 59, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 59, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":62 + * self._readonly() + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_setattr, 62, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 62, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":66 + * + * + * _KT = TypeVar("_KT", bound=Hashable) # <<<<<<<<<<<<<< + * _VT = TypeVar("_VT", bound=Any) + * + */ + __pyx_tuple__15 = PyTuple_Pack(1, __pyx_n_u_KT); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + + /* "sqlalchemy/util/_immutabledict_cy.py":67 + * + * _KT = TypeVar("_KT", bound=Hashable) + * _VT = TypeVar("_VT", bound=Any) # <<<<<<<<<<<<<< + * + * + */ + __pyx_tuple__16 = PyTuple_Pack(1, __pyx_n_u_VT); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__16); + __Pyx_GIVEREF(__pyx_tuple__16); + + /* "sqlalchemy/util/_immutabledict_cy.py":74 + * # NOTE: this method is required in 3.9 and speeds up the use case + * # ImmutableDictBase[str,int](a_dict) significantly + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__( # type: ignore[override] + * cls, key: Any + */ + __pyx_tuple__17 = PyTuple_Pack(2, __pyx_n_s_cls, __pyx_n_s_key); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__17); + __Pyx_GIVEREF(__pyx_tuple__17); + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_class_getitem, 74, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 74, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":89 + * _immutable_fn(self) + * + * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_clear, 89, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 89, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":92 + * _immutable_fn(self) + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_tuple__20 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_key, __pyx_n_s_default); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__20); + __Pyx_GIVEREF(__pyx_tuple__20); + __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_pop, 92, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(0, 92, __pyx_L1_error) + __pyx_tuple__22 = PyTuple_Pack(1, Py_None); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__22); + __Pyx_GIVEREF(__pyx_tuple__22); + + /* "sqlalchemy/util/_immutabledict_cy.py":95 + * _immutable_fn(self) + * + * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_codeobj__23 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_popitem, 95, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__23)) __PYX_ERR(0, 95, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":98 + * _immutable_fn(self) + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_codeobj__24 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_setdefault, 98, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__24)) __PYX_ERR(0, 98, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":101 + * _immutable_fn(self) + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_tuple__25 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_arg, __pyx_n_s_kw); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__25); + __Pyx_GIVEREF(__pyx_tuple__25); + __pyx_codeobj__26 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_update, 101, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__26)) __PYX_ERR(0, 101, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_tuple__27 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__27); + __Pyx_GIVEREF(__pyx_tuple__27); + __pyx_codeobj__28 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__27, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__28)) __PYX_ERR(1, 1, __pyx_L1_error) + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) + */ + __pyx_tuple__29 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__29); + __Pyx_GIVEREF(__pyx_tuple__29); + __pyx_codeobj__30 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__29, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__30)) __PYX_ERR(1, 16, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":113 + * + * # ImmutableDictBase start + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__( # type: ignore[override] + * cls, key: Any + */ + __pyx_codeobj__31 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_class_getitem, 113, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__31)) __PYX_ERR(0, 113, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":128 + * _immutable_fn(self) + * + * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_codeobj__32 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_clear, 128, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__32)) __PYX_ERR(0, 128, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":131 + * _immutable_fn(self) + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_pop, 131, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(0, 131, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":134 + * _immutable_fn(self) + * + * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_codeobj__34 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_popitem, 134, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__34)) __PYX_ERR(0, 134, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":137 + * _immutable_fn(self) + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_setdefault, 137, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(0, 137, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":140 + * _immutable_fn(self) + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_codeobj__36 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_update, 140, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__36)) __PYX_ERR(0, 140, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":148 + * return f"immutabledict({dict.__repr__(self)})" + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def union( + * self, other: Optional[Mapping[_KT, _VT]] = None, / + */ + __pyx_tuple__37 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_other, __pyx_n_s_result); if (unlikely(!__pyx_tuple__37)) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__37); + __Pyx_GIVEREF(__pyx_tuple__37); + __pyx_codeobj__38 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_union, 148, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__38)) __PYX_ERR(0, 148, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":164 + * return result + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def merge_with( + * self, *dicts: Optional[Mapping[_KT, _VT]] + */ + __pyx_tuple__39 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_dicts, __pyx_n_s_result, __pyx_n_s_d); if (unlikely(!__pyx_tuple__39)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__39); + __Pyx_GIVEREF(__pyx_tuple__39); + __pyx_codeobj__40 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__39, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_merge_with, 164, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__40)) __PYX_ERR(0, 164, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":186 + * return self if result is None else result + * + * def copy(self) -> Self: # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_codeobj__41 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_copy, 186, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__41)) __PYX_ERR(0, 186, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":189 + * return self + * + * def __reduce__(self) -> Any: # <<<<<<<<<<<<<< + * return immutabledict, (dict(self),) + * + */ + __pyx_codeobj__42 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_reduce, 189, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__42)) __PYX_ERR(0, 189, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __pyx_unpickle_ImmutableDictBase(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_tuple__43 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__43)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__43); + __Pyx_GIVEREF(__pyx_tuple__43); + __pyx_codeobj__44 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__43, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_ImmutableDictBase, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__44)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} +/* #### Code section: init_constants ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { + if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_222419149 = PyInt_FromLong(222419149L); if (unlikely(!__pyx_int_222419149)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_228825662 = PyInt_FromLong(228825662L); if (unlikely(!__pyx_int_228825662)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_238750788 = PyInt_FromLong(238750788L); if (unlikely(!__pyx_int_238750788)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} +/* #### Code section: init_globals ### */ + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + return 0; +} +/* #### Code section: init_module ### */ + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + #if CYTHON_USE_TYPE_SPECS + __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PyDict_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 71, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_spec, __pyx_t_1); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase)) __PYX_ERR(0, 71, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_spec, __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) < 0) __PYX_ERR(0, 71, __pyx_L1_error) + #else + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase = &__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase; + #endif + if (sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) != sizeof(PyDictObject)) { + if (__Pyx_validate_extern_base((&PyDict_Type)) < 0) __PYX_ERR(0, 71, __pyx_L1_error) + } + #if !CYTHON_COMPILING_IN_LIMITED_API + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_dealloc = (&PyDict_Type)->tp_dealloc; + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_base = (&PyDict_Type); + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_new = (&PyDict_Type)->tp_new; + #endif + #if !CYTHON_USE_TYPE_SPECS + if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) < 0) __PYX_ERR(0, 71, __pyx_L1_error) + #endif + #if PY_MAJOR_VERSION < 3 + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_print = 0; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_dictoffset && __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_getattro == PyObject_GenericGetAttr)) { + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_ImmutableDictBase, (PyObject *) __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) < 0) __PYX_ERR(0, 71, __pyx_L1_error) + #if !CYTHON_COMPILING_IN_LIMITED_API + if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) < 0) __PYX_ERR(0, 71, __pyx_L1_error) + #endif + #if CYTHON_USE_TYPE_SPECS + __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PyDict_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 109, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_spec, __pyx_t_1); + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + if (unlikely(!__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict)) __PYX_ERR(0, 109, __pyx_L1_error) + if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_spec, __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) < 0) __PYX_ERR(0, 109, __pyx_L1_error) + #else + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict = &__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict; + #endif + if (sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) != sizeof(PyDictObject)) { + if (__Pyx_validate_extern_base((&PyDict_Type)) < 0) __PYX_ERR(0, 109, __pyx_L1_error) + } + #if !CYTHON_COMPILING_IN_LIMITED_API + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_dealloc = (&PyDict_Type)->tp_dealloc; + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_base = (&PyDict_Type); + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_new = (&PyDict_Type)->tp_new; + #endif + #if !CYTHON_USE_TYPE_SPECS + if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) < 0) __PYX_ERR(0, 109, __pyx_L1_error) + #endif + #if PY_MAJOR_VERSION < 3 + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_print = 0; + #endif + #if !CYTHON_COMPILING_IN_LIMITED_API + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_dictoffset && __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_getattro == PyObject_GenericGetAttr)) { + __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + #endif + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_immutabledict_2, (PyObject *) __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) < 0) __PYX_ERR(0, 109, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_0_11(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_11(PyTypeObject), + #elif CYTHON_COMPILING_IN_LIMITED_API + sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_11(PyTypeObject), + #else + sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_11(PyHeapTypeObject), + #endif + __Pyx_ImportType_CheckSize_Warn_3_0_11); if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec__immutabledict_cy(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec__immutabledict_cy}, + {0, NULL} +}; +#endif + +#ifdef __cplusplus +namespace { + struct PyModuleDef __pyx_moduledef = + #else + static struct PyModuleDef __pyx_moduledef = + #endif + { + PyModuleDef_HEAD_INIT, + "_immutabledict_cy", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #elif CYTHON_USE_MODULE_STATE + sizeof(__pyx_mstate), /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + #if CYTHON_USE_MODULE_STATE + __pyx_m_traverse, /* m_traverse */ + __pyx_m_clear, /* m_clear */ + NULL /* m_free */ + #else + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ + #endif + }; + #ifdef __cplusplus +} /* anonymous namespace */ +#endif +#endif + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC init_immutabledict_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC init_immutabledict_cy(void) +#else +__Pyx_PyMODINIT_FUNC PyInit__immutabledict_cy(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit__immutabledict_cy(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) +#else +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) +#endif +{ + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { +#if CYTHON_COMPILING_IN_LIMITED_API + result = PyModule_AddObject(module, to_name, value); +#else + result = PyDict_SetItemString(moddict, to_name, value); +#endif + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + CYTHON_UNUSED_VAR(def); + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; +#if CYTHON_COMPILING_IN_LIMITED_API + moddict = module; +#else + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; +#endif + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec__immutabledict_cy(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + int stringtab_initialized = 0; + #if CYTHON_USE_MODULE_STATE + int pystate_addmodule_run = 0; + #endif + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module '_immutabledict_cy' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_immutabledict_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #elif CYTHON_USE_MODULE_STATE + __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + { + int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); + __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_immutabledict_cy" pseudovariable */ + if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + pystate_addmodule_run = 1; + } + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #endif + CYTHON_UNUSED_VAR(__pyx_t_1); + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__immutabledict_cy(void)", 0); + if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + stringtab_initialized = 1; + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_sqlalchemy__util___immutabledict_cy) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "sqlalchemy.util._immutabledict_cy")) { + if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.util._immutabledict_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely((__Pyx_modinit_type_import_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "sqlalchemy/util/_immutabledict_cy.py":10 + * from __future__ import annotations + * + * from typing import Any # <<<<<<<<<<<<<< + * from typing import Dict + * from typing import Hashable + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Any); + __Pyx_GIVEREF(__pyx_n_s_Any); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Any)) __PYX_ERR(0, 10, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Any); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":11 + * + * from typing import Any + * from typing import Dict # <<<<<<<<<<<<<< + * from typing import Hashable + * from typing import Mapping + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Dict); + __Pyx_GIVEREF(__pyx_n_s_Dict); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Dict)) __PYX_ERR(0, 11, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Dict); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Dict, __pyx_t_3) < 0) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":12 + * from typing import Any + * from typing import Dict + * from typing import Hashable # <<<<<<<<<<<<<< + * from typing import Mapping + * from typing import NoReturn + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Hashable); + __Pyx_GIVEREF(__pyx_n_s_Hashable); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Hashable)) __PYX_ERR(0, 12, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Hashable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Hashable, __pyx_t_2) < 0) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":13 + * from typing import Dict + * from typing import Hashable + * from typing import Mapping # <<<<<<<<<<<<<< + * from typing import NoReturn + * from typing import Optional + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Mapping); + __Pyx_GIVEREF(__pyx_n_s_Mapping); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Mapping)) __PYX_ERR(0, 13, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Mapping, __pyx_t_3) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":14 + * from typing import Hashable + * from typing import Mapping + * from typing import NoReturn # <<<<<<<<<<<<<< + * from typing import Optional + * from typing import TypeVar + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_NoReturn); + __Pyx_GIVEREF(__pyx_n_s_NoReturn); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_NoReturn)) __PYX_ERR(0, 14, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_NoReturn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NoReturn, __pyx_t_2) < 0) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":15 + * from typing import Mapping + * from typing import NoReturn + * from typing import Optional # <<<<<<<<<<<<<< + * from typing import TypeVar + * + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Optional); + __Pyx_GIVEREF(__pyx_n_s_Optional); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 15, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Optional); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_3) < 0) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":16 + * from typing import NoReturn + * from typing import Optional + * from typing import TypeVar # <<<<<<<<<<<<<< + * + * from .typing import Self + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_TypeVar); + __Pyx_GIVEREF(__pyx_n_s_TypeVar); + if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_TypeVar)) __PYX_ERR(0, 16, __pyx_L1_error); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TypeVar, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":18 + * from typing import TypeVar + * + * from .typing import Self # <<<<<<<<<<<<<< + * + * # START GENERATED CYTHON IMPORT + */ + __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_n_s_Self); + __Pyx_GIVEREF(__pyx_n_s_Self); + if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Self)) __PYX_ERR(0, 18, __pyx_L1_error); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Self, __pyx_t_3) < 0) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":22 + * # START GENERATED CYTHON IMPORT + * # This section is automatically generated by the script tools/cython_imports.py + * try: # <<<<<<<<<<<<<< + * # NOTE: the cython compiler needs this "import cython" in the file, it + * # can't be only "from sqlalchemy.util import cython" with the fallback + */ + { + (void)__pyx_t_1; (void)__pyx_t_4; (void)__pyx_t_5; /* mark used */ + /*try:*/ { + + /* "sqlalchemy/util/_immutabledict_cy.py":26 + * # can't be only "from sqlalchemy.util import cython" with the fallback + * # in that module + * import cython # <<<<<<<<<<<<<< + * except ModuleNotFoundError: + * from sqlalchemy.util import cython + */ + } + } + + /* "sqlalchemy/util/_immutabledict_cy.py":31 + * + * + * def _is_compiled() -> bool: # <<<<<<<<<<<<<< + * """Utility function to indicate if this module is compiled or not.""" + * return cython.compiled # type: ignore[no-any-return] + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 31, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 31, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__5)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 31, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(0, 31, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":44 + * + * + * def _immutable_fn(obj: object) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError(f"{obj.__class__.__name__} object is immutable") + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_obj, __pyx_n_s_object) < 0) __PYX_ERR(0, 44, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 44, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn, 0, __pyx_n_s_immutable_fn, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__7)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_immutable_fn, __pyx_t_2) < 0) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":48 + * + * + * class ReadOnlyContainer: # <<<<<<<<<<<<<< + * __slots__ = () + * + */ + __pyx_t_2 = __Pyx_Py3MetaclassPrepare((PyObject *) NULL, __pyx_empty_tuple, __pyx_n_s_ReadOnlyContainer, __pyx_n_s_ReadOnlyContainer, (PyObject *) NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, (PyObject *) NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + + /* "sqlalchemy/util/_immutabledict_cy.py":49 + * + * class ReadOnlyContainer: + * __slots__ = () # <<<<<<<<<<<<<< + * + * def _readonly(self) -> NoReturn: + */ + if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_slots, __pyx_empty_tuple) < 0) __PYX_ERR(0, 49, __pyx_L1_error) + + /* "sqlalchemy/util/_immutabledict_cy.py":51 + * __slots__ = () + * + * def _readonly(self) -> NoReturn: # <<<<<<<<<<<<<< + * raise TypeError( + * f"{self.__class__.__name__} object is immutable and/or readonly" + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 51, __pyx_L1_error) + __pyx_t_6 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly, 0, __pyx_n_s_ReadOnlyContainer__readonly, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__9)); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_6, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_readonly, __pyx_t_6) < 0) __PYX_ERR(0, 51, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":56 + * ) + * + * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + __pyx_t_6 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 56, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 56, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__, 0, __pyx_n_s_ReadOnlyContainer___delitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__11)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_6); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_delitem, __pyx_t_3) < 0) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":59 + * self._readonly() + * + * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 59, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 59, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 59, __pyx_L1_error) + __pyx_t_6 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__, 0, __pyx_n_s_ReadOnlyContainer___setitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__13)); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_6, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_setitem, __pyx_t_6) < 0) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":62 + * self._readonly() + * + * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< + * self._readonly() + * + */ + __pyx_t_6 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 62, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 62, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 62, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 62, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__, 0, __pyx_n_s_ReadOnlyContainer___setattr, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__14)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 62, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_6); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_setattr, __pyx_t_3) < 0) __PYX_ERR(0, 62, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":48 + * + * + * class ReadOnlyContainer: # <<<<<<<<<<<<<< + * __slots__ = () + * + */ + __pyx_t_3 = __Pyx_Py3ClassCreate(((PyObject*)&PyType_Type), __pyx_n_s_ReadOnlyContainer, __pyx_empty_tuple, __pyx_t_2, NULL, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ReadOnlyContainer, __pyx_t_3) < 0) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":66 + * + * + * _KT = TypeVar("_KT", bound=Hashable) # <<<<<<<<<<<<<< + * _VT = TypeVar("_VT", bound=Any) + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_Hashable); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_bound, __pyx_t_6) < 0) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__15, __pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_KT, __pyx_t_6) < 0) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":67 + * + * _KT = TypeVar("_KT", bound=Hashable) + * _VT = TypeVar("_VT", bound=Any) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_Any); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_bound, __pyx_t_2) < 0) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_tuple__16, __pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_VT, __pyx_t_2) < 0) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":74 + * # NOTE: this method is required in 3.9 and speeds up the use case + * # ImmutableDictBase[str,int](a_dict) significantly + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__( # type: ignore[override] + * cls, key: Any + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 74, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_type_Self) < 0) __PYX_ERR(0, 74, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__, __Pyx_CYFUNCTION_CLASSMETHOD | __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase___class_getite, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_class_getitem, __pyx_t_3) < 0) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + __Pyx_GetNameInClass(__pyx_t_3, (PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_class_getitem); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_Method_ClassMethod(__pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_class_getitem, __pyx_t_2) < 0) __PYX_ERR(0, 74, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + + /* "sqlalchemy/util/_immutabledict_cy.py":89 + * _immutable_fn(self) + * + * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 89, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_clear, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__19)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_clear, __pyx_t_3) < 0) __PYX_ERR(0, 89, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + + /* "sqlalchemy/util/_immutabledict_cy.py":92 + * _immutable_fn(self) + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 92, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_default, __pyx_kp_s_Optional_Any) < 0) __PYX_ERR(0, 92, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 92, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_pop, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__21)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_2, __pyx_tuple__22); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_pop, __pyx_t_2) < 0) __PYX_ERR(0, 92, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + + /* "sqlalchemy/util/_immutabledict_cy.py":95 + * _immutable_fn(self) + * + * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 95, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_popitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__23)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_popitem, __pyx_t_3) < 0) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + + /* "sqlalchemy/util/_immutabledict_cy.py":98 + * _immutable_fn(self) + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 98, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 98, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_default, __pyx_kp_s_Optional_Any) < 0) __PYX_ERR(0, 98, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 98, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_setdefault, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__24)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 98, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_2, __pyx_tuple__22); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_setdefault, __pyx_t_2) < 0) __PYX_ERR(0, 98, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + + /* "sqlalchemy/util/_immutabledict_cy.py":101 + * _immutable_fn(self) + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_arg, __pyx_n_s_Any) < 0) __PYX_ERR(0, 101, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_kw, __pyx_n_s_Any) < 0) __PYX_ERR(0, 101, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 101, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_update, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__26)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_update, __pyx_t_3) < 0) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef tuple state + * cdef object _dict + */ + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase___reduce_cytho, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__28)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + + /* "(tree fragment)":16 + * else: + * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) + */ + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase___setstate_cyt, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__30)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); + + /* "sqlalchemy/util/_immutabledict_cy.py":113 + * + * # ImmutableDictBase start + * @classmethod # <<<<<<<<<<<<<< + * def __class_getitem__( # type: ignore[override] + * cls, key: Any + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 113, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_type_Self) < 0) __PYX_ERR(0, 113, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__, __Pyx_CYFUNCTION_CLASSMETHOD | __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict___class_getitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__31)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_class_getitem, __pyx_t_2) < 0) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + __Pyx_GetNameInClass(__pyx_t_2, (PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_class_getitem); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_Method_ClassMethod(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_class_getitem, __pyx_t_3) < 0) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":128 + * _immutable_fn(self) + * + * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 128, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 128, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_clear, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__32)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 128, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_clear, __pyx_t_2) < 0) __PYX_ERR(0, 128, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":131 + * _immutable_fn(self) + * + * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_default, __pyx_kp_s_Optional_Any) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_pop, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__33)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_3, __pyx_tuple__22); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_pop, __pyx_t_3) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":134 + * _immutable_fn(self) + * + * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 134, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_popitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__34)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_popitem, __pyx_t_2) < 0) __PYX_ERR(0, 134, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":137 + * _immutable_fn(self) + * + * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 137, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_default, __pyx_kp_s_Optional_Any) < 0) __PYX_ERR(0, 137, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 137, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_setdefault, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__35)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_3, __pyx_tuple__22); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_setdefault, __pyx_t_3) < 0) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":140 + * _immutable_fn(self) + * + * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< + * _immutable_fn(self) + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_arg, __pyx_n_s_Any) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_kw, __pyx_n_s_Any) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_update, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__36)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_update, __pyx_t_2) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":148 + * return f"immutabledict({dict.__repr__(self)})" + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def union( + * self, other: Optional[Mapping[_KT, _VT]] = None, / + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Optional_Mapping__KT__VT) < 0) __PYX_ERR(0, 148, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_immutabledict__KT__VT) < 0) __PYX_ERR(0, 148, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_union, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__38)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_3, __pyx_tuple__22); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_union, __pyx_t_3) < 0) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":164 + * return result + * + * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< + * def merge_with( + * self, *dicts: Optional[Mapping[_KT, _VT]] + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_dicts, __pyx_kp_s_Optional_Mapping__KT__VT) < 0) __PYX_ERR(0, 164, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_immutabledict__KT__VT) < 0) __PYX_ERR(0, 164, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_merge_with, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__40)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_merge_with, __pyx_t_2) < 0) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":186 + * return self if result is None else result + * + * def copy(self) -> Self: # <<<<<<<<<<<<<< + * return self + * + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_Self) < 0) __PYX_ERR(0, 186, __pyx_L1_error) + __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_copy, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__41)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_copy, __pyx_t_3) < 0) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "sqlalchemy/util/_immutabledict_cy.py":189 + * return self + * + * def __reduce__(self) -> Any: # <<<<<<<<<<<<<< + * return immutabledict, (dict(self),) + * + */ + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 189, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_Any) < 0) __PYX_ERR(0, 189, __pyx_L1_error) + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict___reduce, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__42)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 189, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_reduce, __pyx_t_2) < 0) __PYX_ERR(0, 189, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); + + /* "(tree fragment)":1 + * def __pyx_unpickle_ImmutableDictBase(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * cdef object __pyx_PickleError + * cdef object __pyx_result + */ + __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase, 0, __pyx_n_s_pyx_unpickle_ImmutableDictBase, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__44)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_ImmutableDictBase, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "sqlalchemy/util/_immutabledict_cy.py":1 + * # util/_immutabledict_cy.py # <<<<<<<<<<<<<< + * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors + * # + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_6); + if (__pyx_m) { + if (__pyx_d && stringtab_initialized) { + __Pyx_AddTraceback("init sqlalchemy.util._immutabledict_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + #if !CYTHON_USE_MODULE_STATE + Py_CLEAR(__pyx_m); + #else + Py_DECREF(__pyx_m); + if (pystate_addmodule_run) { + PyObject *tp, *value, *tb; + PyErr_Fetch(&tp, &value, &tb); + PyState_RemoveModule(&__pyx_moduledef); + PyErr_Restore(tp, value, tb); + } + #endif + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init sqlalchemy.util._immutabledict_cy"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} +/* #### Code section: cleanup_globals ### */ +/* #### Code section: cleanup_module ### */ +/* #### Code section: main_method ### */ +/* #### Code section: utility_code_pragmas ### */ +#ifdef _MSC_VER +#pragma warning( push ) +/* Warning 4127: conditional expression is constant + * Cython uses constant conditional expressions to allow in inline functions to be optimized at + * compile-time, so this warning is not useful + */ +#pragma warning( disable : 4127 ) +#endif + + + +/* #### Code section: utility_code_def ### */ + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030C00A6 + PyObject *current_exception = tstate->current_exception; + if (unlikely(!current_exception)) return 0; + exc_type = (PyObject*) Py_TYPE(current_exception); + if (exc_type == err) return 1; +#else + exc_type = tstate->curexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; +#endif + #if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(exc_type); + #endif + if (unlikely(PyTuple_Check(err))) { + result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + } else { + result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); + } + #if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(exc_type); + #endif + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject *tmp_value; + assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); + if (value) { + #if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) + #endif + PyException_SetTraceback(value, tb); + } + tmp_value = tstate->current_exception; + tstate->current_exception = value; + Py_XDECREF(tmp_value); + Py_XDECREF(type); + Py_XDECREF(tb); +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#if PY_VERSION_HEX >= 0x030C00A6 + PyObject* exc_value; + exc_value = tstate->current_exception; + tstate->current_exception = 0; + *value = exc_value; + *type = NULL; + *tb = NULL; + if (exc_value) { + *type = (PyObject*) Py_TYPE(exc_value); + Py_INCREF(*type); + #if CYTHON_COMPILING_IN_CPYTHON + *tb = ((PyBaseExceptionObject*) exc_value)->traceback; + Py_XINCREF(*tb); + #else + *tb = PyException_GetTraceback(exc_value); + #endif + } +#else + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#endif +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* PyObjectGetAttrStrNoError */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + (void) PyObject_GetOptionalAttr(obj, attr_name, &result); + return result; +#else +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +#endif +} + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); + if (unlikely(!result) && !PyErr_Occurred()) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* TupleAndListFromArray */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { + PyObject *v; + Py_ssize_t i; + for (i = 0; i < length; i++) { + v = dest[i] = src[i]; + Py_INCREF(v); + } +} +static CYTHON_INLINE PyObject * +__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + Py_INCREF(__pyx_empty_tuple); + return __pyx_empty_tuple; + } + res = PyTuple_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); + return res; +} +static CYTHON_INLINE PyObject * +__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) +{ + PyObject *res; + if (n <= 0) { + return PyList_New(0); + } + res = PyList_New(n); + if (unlikely(res == NULL)) return NULL; + __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); + return res; +} +#endif + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* fastcall */ +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) +{ + Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); + for (i = 0; i < n; i++) + { + if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; + } + for (i = 0; i < n; i++) + { + int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); + if (unlikely(eq != 0)) { + if (unlikely(eq < 0)) return NULL; + return kwvalues[i]; + } + } + return NULL; +} +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 +CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { + Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); + PyObject *dict; + dict = PyDict_New(); + if (unlikely(!dict)) + return NULL; + for (i=0; i= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject *const *kwvalues, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); + while (1) { + Py_XDECREF(key); key = NULL; + Py_XDECREF(value); value = NULL; + if (kwds_is_tuple) { + Py_ssize_t size; +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(kwds); +#else + size = PyTuple_Size(kwds); + if (size < 0) goto bad; +#endif + if (pos >= size) break; +#if CYTHON_AVOID_BORROWED_REFS + key = __Pyx_PySequence_ITEM(kwds, pos); + if (!key) goto bad; +#elif CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kwds, pos); +#else + key = PyTuple_GetItem(kwds, pos); + if (!key) goto bad; +#endif + value = kwvalues[pos]; + pos++; + } + else + { + if (!PyDict_Next(kwds, &pos, &key, &value)) break; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + } + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + Py_INCREF(value); + Py_DECREF(key); +#endif + key = NULL; + value = NULL; + continue; + } +#if !CYTHON_AVOID_BORROWED_REFS + Py_INCREF(key); +#endif + Py_INCREF(value); + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = ( + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key) + ); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; +#if CYTHON_AVOID_BORROWED_REFS + value = NULL; +#endif + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + Py_XDECREF(key); + Py_XDECREF(value); + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + Py_XDECREF(key); + Py_XDECREF(value); + return -1; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* UnicodeConcatInPlace */ +# if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 +static int +__Pyx_unicode_modifiable(PyObject *unicode) +{ + if (Py_REFCNT(unicode) != 1) + return 0; + if (!PyUnicode_CheckExact(unicode)) + return 0; + if (PyUnicode_CHECK_INTERNED(unicode)) + return 0; + return 1; +} +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_ConcatInPlaceImpl(PyObject **p_left, PyObject *right + #if CYTHON_REFNANNY + , void* __pyx_refnanny + #endif + ) { + PyObject *left = *p_left; + Py_ssize_t left_len, right_len, new_len; + if (unlikely(__Pyx_PyUnicode_READY(left) == -1)) + return NULL; + if (unlikely(__Pyx_PyUnicode_READY(right) == -1)) + return NULL; + left_len = PyUnicode_GET_LENGTH(left); + if (left_len == 0) { + Py_INCREF(right); + return right; + } + right_len = PyUnicode_GET_LENGTH(right); + if (right_len == 0) { + Py_INCREF(left); + return left; + } + if (unlikely(left_len > PY_SSIZE_T_MAX - right_len)) { + PyErr_SetString(PyExc_OverflowError, + "strings are too large to concat"); + return NULL; + } + new_len = left_len + right_len; + if (__Pyx_unicode_modifiable(left) + && PyUnicode_CheckExact(right) + && PyUnicode_KIND(right) <= PyUnicode_KIND(left) + && !(PyUnicode_IS_ASCII(left) && !PyUnicode_IS_ASCII(right))) { + int ret; + __Pyx_GIVEREF(*p_left); + ret = PyUnicode_Resize(p_left, new_len); + __Pyx_GOTREF(*p_left); + if (unlikely(ret != 0)) + return NULL; + #if PY_VERSION_HEX >= 0x030d0000 + if (unlikely(PyUnicode_CopyCharacters(*p_left, left_len, right, 0, right_len) < 0)) return NULL; + #else + _PyUnicode_FastCopyCharacters(*p_left, left_len, right, 0, right_len); + #endif + __Pyx_INCREF(*p_left); + __Pyx_GIVEREF(*p_left); + return *p_left; + } else { + return __Pyx_PyUnicode_Concat(left, right); + } + } +#endif + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { + return NULL; + } + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { + return NULL; + } + #endif + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); + self = __Pyx_CyOrPyCFunction_GET_SELF(func); + #if PY_MAJOR_VERSION < 3 + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + #else + if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) + return NULL; + #endif + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectFastCall */ +#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API +static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { + PyObject *argstuple; + PyObject *result = 0; + size_t i; + argstuple = PyTuple_New((Py_ssize_t)nargs); + if (unlikely(!argstuple)) return NULL; + for (i = 0; i < nargs; i++) { + Py_INCREF(args[i]); + if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; + } + result = __Pyx_PyObject_Call(func, argstuple, kwargs); + bad: + Py_DECREF(argstuple); + return result; +} +#endif +static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { + Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); +#if CYTHON_COMPILING_IN_CPYTHON + if (nargs == 0 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) + return __Pyx_PyObject_CallMethO(func, NULL); + } + else if (nargs == 1 && kwargs == NULL) { + if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) + return __Pyx_PyObject_CallMethO(func, args[0]); + } +#endif + #if PY_VERSION_HEX < 0x030800B1 + #if CYTHON_FAST_PYCCALL + if (PyCFunction_Check(func)) { + if (kwargs) { + return _PyCFunction_FastCallDict(func, args, nargs, kwargs); + } else { + return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); + } + } + #if PY_VERSION_HEX >= 0x030700A1 + if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { + return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); + } + #endif + #endif + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); + } + #endif + #endif + if (kwargs == NULL) { + #if CYTHON_VECTORCALL + #if PY_VERSION_HEX < 0x03090000 + vectorcallfunc f = _PyVectorcall_Function(func); + #else + vectorcallfunc f = PyVectorcall_Function(func); + #endif + if (f) { + return f(func, args, (size_t)nargs, NULL); + } + #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL + if (__Pyx_CyFunction_CheckExact(func)) { + __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); + if (f) return f(func, args, (size_t)nargs, NULL); + } + #endif + } + if (nargs == 0) { + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); + } + #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API + return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); + #else + return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); + #endif +} + +/* PyObjectCallOneArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *args[2] = {NULL, arg}; + return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + __Pyx_PyThreadState_declare + CYTHON_UNUSED_VAR(cause); + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + #if PY_VERSION_HEX >= 0x030C00A6 + PyException_SetTraceback(value, tb); + #elif CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#else + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#elif CYTHON_COMPILING_IN_LIMITED_API + if (unlikely(!__pyx_m)) { + return NULL; + } + result = PyObject_GetAttr(__pyx_m, name); + if (likely(result)) { + return result; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kw, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { + Py_ssize_t kwsize; +#if CYTHON_ASSUME_SAFE_MACROS + kwsize = PyTuple_GET_SIZE(kw); +#else + kwsize = PyTuple_Size(kw); + if (kwsize < 0) return 0; +#endif + if (unlikely(kwsize == 0)) + return 1; + if (!kw_allowed) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, 0); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + goto invalid_keyword; + } +#if PY_VERSION_HEX < 0x03090000 + for (pos = 0; pos < kwsize; pos++) { +#if CYTHON_ASSUME_SAFE_MACROS + key = PyTuple_GET_ITEM(kw, pos); +#else + key = PyTuple_GetItem(kw, pos); + if (!key) return 0; +#endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } +#endif + return 1; + } + while (PyDict_Next(kw, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if (!kw_allowed && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + #if PY_MAJOR_VERSION < 3 + PyErr_Format(PyExc_TypeError, + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + PyErr_Format(PyExc_TypeError, + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* GetAttr3 */ +#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +#endif +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r; +#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 + int res = PyObject_GetOptionalAttr(o, n, &r); + return (res != 0) ? r : __Pyx_NewRef(d); +#else + #if CYTHON_USE_TYPE_SLOTS + if (likely(PyString_Check(n))) { + r = __Pyx_PyObject_GetAttrStrNoError(o, n); + if (unlikely(!r) && likely(!PyErr_Occurred())) { + r = __Pyx_NewRef(d); + } + return r; + } + #endif + r = PyObject_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +#endif +} + +/* RaiseUnexpectedTypeError */ +static int +__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) +{ + __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); + PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, + expected, obj_type_name); + __Pyx_DECREF_TypeName(obj_type_name); + return 0; +} + +/* JoinPyUnicode */ +static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, + Py_UCS4 max_char) { +#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyObject *result_uval; + int result_ukind, kind_shift; + Py_ssize_t i, char_pos; + void *result_udata; + CYTHON_MAYBE_UNUSED_VAR(max_char); +#if CYTHON_PEP393_ENABLED + result_uval = PyUnicode_New(result_ulength, max_char); + if (unlikely(!result_uval)) return NULL; + result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; + kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; + result_udata = PyUnicode_DATA(result_uval); +#else + result_uval = PyUnicode_FromUnicode(NULL, result_ulength); + if (unlikely(!result_uval)) return NULL; + result_ukind = sizeof(Py_UNICODE); + kind_shift = (result_ukind == 4) ? 2 : result_ukind - 1; + result_udata = PyUnicode_AS_UNICODE(result_uval); +#endif + assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); + char_pos = 0; + for (i=0; i < value_count; i++) { + int ukind; + Py_ssize_t ulength; + void *udata; + PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); + if (unlikely(__Pyx_PyUnicode_READY(uval))) + goto bad; + ulength = __Pyx_PyUnicode_GET_LENGTH(uval); + if (unlikely(!ulength)) + continue; + if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) + goto overflow; + ukind = __Pyx_PyUnicode_KIND(uval); + udata = __Pyx_PyUnicode_DATA(uval); + if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { + memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); + } else { + #if PY_VERSION_HEX >= 0x030d0000 + if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; + #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) + _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); + #else + Py_ssize_t j; + for (j=0; j < ulength; j++) { + Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); + __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); + } + #endif + } + char_pos += ulength; + } + return result_uval; +overflow: + PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); +bad: + Py_DECREF(result_uval); + return NULL; +#else + CYTHON_UNUSED_VAR(max_char); + CYTHON_UNUSED_VAR(result_ulength); + CYTHON_UNUSED_VAR(value_count); + return PyUnicode_Join(__pyx_empty_unicode, value_tuple); +#endif +} + +/* PyObjectCallNoArg */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { + PyObject *arg[2] = {NULL, NULL}; + return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *module = 0; + PyObject *empty_dict = 0; + PyObject *empty_list = 0; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (unlikely(!py_import)) + goto bad; + if (!from_list) { + empty_list = PyList_New(0); + if (unlikely(!empty_list)) + goto bad; + from_list = empty_list; + } + #endif + empty_dict = PyDict_New(); + if (unlikely(!empty_dict)) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, 1); + if (unlikely(!module)) { + if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (unlikely(!py_level)) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, __pyx_d, empty_dict, from_list, level); + #endif + } + } +bad: + Py_XDECREF(empty_dict); + Py_XDECREF(empty_list); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + const char* module_name_str = 0; + PyObject* module_name = 0; + PyObject* module_dot = 0; + PyObject* full_name = 0; + PyErr_Clear(); + module_name_str = PyModule_GetName(module); + if (unlikely(!module_name_str)) { goto modbad; } + module_name = PyUnicode_FromString(module_name_str); + if (unlikely(!module_name)) { goto modbad; } + module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__3); + if (unlikely(!module_dot)) { goto modbad; } + full_name = PyUnicode_Concat(module_dot, name); + if (unlikely(!full_name)) { goto modbad; } + #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) + { + PyObject *modules = PyImport_GetModuleDict(); + if (unlikely(!modules)) + goto modbad; + value = PyObject_GetItem(modules, full_name); + } + #else + value = PyImport_GetModule(full_name); + #endif + modbad: + Py_XDECREF(full_name); + Py_XDECREF(module_dot); + Py_XDECREF(module_name); + } + if (unlikely(!value)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* HasAttr */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (!r) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* GetItemInt */ +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (unlikely(!j)) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; + PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; + if (mm && mm->mp_subscript) { + PyObject *r, *key = PyInt_FromSsize_t(i); + if (unlikely(!key)) return NULL; + r = mm->mp_subscript(o, key); + Py_DECREF(key); + return r; + } + if (likely(sm && sm->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { + Py_ssize_t l = sm->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return sm->sq_item(o, i); + } + } +#else + if (is_list || !PyMapping_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* FixUpExtensionType */ +#if CYTHON_USE_TYPE_SPECS +static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { +#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + CYTHON_UNUSED_VAR(spec); + CYTHON_UNUSED_VAR(type); +#else + const PyType_Slot *slot = spec->slots; + while (slot && slot->slot && slot->slot != Py_tp_members) + slot++; + if (slot && slot->slot == Py_tp_members) { + int changed = 0; +#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) + const +#endif + PyMemberDef *memb = (PyMemberDef*) slot->pfunc; + while (memb && memb->name) { + if (memb->name[0] == '_' && memb->name[1] == '_') { +#if PY_VERSION_HEX < 0x030900b1 + if (strcmp(memb->name, "__weaklistoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_weaklistoffset = memb->offset; + changed = 1; + } + else if (strcmp(memb->name, "__dictoffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + type->tp_dictoffset = memb->offset; + changed = 1; + } +#if CYTHON_METH_FASTCALL + else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); +#if PY_VERSION_HEX >= 0x030800b4 + type->tp_vectorcall_offset = memb->offset; +#else + type->tp_print = (printfunc) memb->offset; +#endif + changed = 1; + } +#endif +#else + if ((0)); +#endif +#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON + else if (strcmp(memb->name, "__module__") == 0) { + PyObject *descr; + assert(memb->type == T_OBJECT); + assert(memb->flags == 0 || memb->flags == READONLY); + descr = PyDescr_NewMember(type, memb); + if (unlikely(!descr)) + return -1; + if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { + Py_DECREF(descr); + return -1; + } + Py_DECREF(descr); + changed = 1; + } +#endif + } + memb++; + } + if (changed) + PyType_Modified(type); + } +#endif + return 0; +} +#endif + +/* FormatTypeName */ +#if CYTHON_COMPILING_IN_LIMITED_API +static __Pyx_TypeName +__Pyx_PyType_GetName(PyTypeObject* tp) +{ + PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, + __pyx_n_s_name); + if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { + PyErr_Clear(); + Py_XDECREF(name); + name = __Pyx_NewRef(__pyx_n_s__4); + } + return name; +} +#endif + +/* ValidateExternBase */ +static int __Pyx_validate_extern_base(PyTypeObject *base) { + Py_ssize_t itemsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_itemsize; +#endif +#if !CYTHON_COMPILING_IN_LIMITED_API + itemsize = ((PyTypeObject *)base)->tp_itemsize; +#else + py_itemsize = PyObject_GetAttrString((PyObject*)base, "__itemsize__"); + if (!py_itemsize) + return -1; + itemsize = PyLong_AsSsize_t(py_itemsize); + Py_DECREF(py_itemsize); + py_itemsize = 0; + if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) + return -1; +#endif + if (itemsize) { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(base); + PyErr_Format(PyExc_TypeError, + "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name); + __Pyx_DECREF_TypeName(b_name); + return -1; + } + return 0; +} + +/* PyObjectGetMethod */ +static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { + PyObject *attr; +#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP + __Pyx_TypeName type_name; + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr; + descrgetfunc f = NULL; + PyObject **dictptr, *dict; + int meth_found = 0; + assert (*method == NULL); + if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; + } + if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { + return 0; + } + descr = _PyType_Lookup(tp, name); + if (likely(descr != NULL)) { + Py_INCREF(descr); +#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR + if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) +#elif PY_MAJOR_VERSION >= 3 + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) + #endif +#else + #ifdef __Pyx_CyFunction_USED + if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) + #else + if (likely(PyFunction_Check(descr))) + #endif +#endif + { + meth_found = 1; + } else { + f = Py_TYPE(descr)->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + } + } + dictptr = _PyObject_GetDictPtr(obj); + if (dictptr != NULL && (dict = *dictptr) != NULL) { + Py_INCREF(dict); + attr = __Pyx_PyDict_GetItemStr(dict, name); + if (attr != NULL) { + Py_INCREF(attr); + Py_DECREF(dict); + Py_XDECREF(descr); + goto try_unpack; + } + Py_DECREF(dict); + } + if (meth_found) { + *method = descr; + return 1; + } + if (f != NULL) { + attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto try_unpack; + } + if (likely(descr != NULL)) { + *method = descr; + return 0; + } + type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return 0; +#else + attr = __Pyx_PyObject_GetAttrStr(obj, name); + goto try_unpack; +#endif +try_unpack: +#if CYTHON_UNPACK_METHODS + if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { + PyObject *function = PyMethod_GET_FUNCTION(attr); + Py_INCREF(function); + Py_DECREF(attr); + *method = function; + return 1; + } +#endif + *method = attr; + return 0; +} + +/* PyObjectCallMethod0 */ +static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { + PyObject *method = NULL, *result = NULL; + int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); + if (likely(is_method)) { + result = __Pyx_PyObject_CallOneArg(method, obj); + Py_DECREF(method); + return result; + } + if (unlikely(!method)) goto bad; + result = __Pyx_PyObject_CallNoArg(method); + Py_DECREF(method); +bad: + return result; +} + +/* ValidateBasesTuple */ +#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS +static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { + Py_ssize_t i, n; +#if CYTHON_ASSUME_SAFE_MACROS + n = PyTuple_GET_SIZE(bases); +#else + n = PyTuple_Size(bases); + if (n < 0) return -1; +#endif + for (i = 1; i < n; i++) + { +#if CYTHON_AVOID_BORROWED_REFS + PyObject *b0 = PySequence_GetItem(bases, i); + if (!b0) return -1; +#elif CYTHON_ASSUME_SAFE_MACROS + PyObject *b0 = PyTuple_GET_ITEM(bases, i); +#else + PyObject *b0 = PyTuple_GetItem(bases, i); + if (!b0) return -1; +#endif + PyTypeObject *b; +#if PY_MAJOR_VERSION < 3 + if (PyClass_Check(b0)) + { + PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", + PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } +#endif + b = (PyTypeObject*) b0; + if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); + __Pyx_DECREF_TypeName(b_name); +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + if (dictoffset == 0) + { + Py_ssize_t b_dictoffset = 0; +#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY + b_dictoffset = b->tp_dictoffset; +#else + PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); + if (!py_b_dictoffset) goto dictoffset_return; + b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); + Py_DECREF(py_b_dictoffset); + if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; +#endif + if (b_dictoffset) { + { + __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); + PyErr_Format(PyExc_TypeError, + "extension type '%.200s' has no __dict__ slot, " + "but base type '" __Pyx_FMT_TYPENAME "' has: " + "either add 'cdef dict __dict__' to the extension type " + "or add '__slots__ = [...]' to the base type", + type_name, b_name); + __Pyx_DECREF_TypeName(b_name); + } +#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) + dictoffset_return: +#endif +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + return -1; + } + } +#if CYTHON_AVOID_BORROWED_REFS + Py_DECREF(b0); +#endif + } + return 0; +} +#endif + +/* PyType_Ready */ +static int __Pyx_PyType_Ready(PyTypeObject *t) { +#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) + (void)__Pyx_PyObject_CallMethod0; +#if CYTHON_USE_TYPE_SPECS + (void)__Pyx_validate_bases_tuple; +#endif + return PyType_Ready(t); +#else + int r; + PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); + if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) + return -1; +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + { + int gc_was_enabled; + #if PY_VERSION_HEX >= 0x030A00b1 + gc_was_enabled = PyGC_Disable(); + (void)__Pyx_PyObject_CallMethod0; + #else + PyObject *ret, *py_status; + PyObject *gc = NULL; + #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) + gc = PyImport_GetModule(__pyx_kp_u_gc); + #endif + if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); + if (unlikely(!gc)) return -1; + py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); + if (unlikely(!py_status)) { + Py_DECREF(gc); + return -1; + } + gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); + Py_DECREF(py_status); + if (gc_was_enabled > 0) { + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); + if (unlikely(!ret)) { + Py_DECREF(gc); + return -1; + } + Py_DECREF(ret); + } else if (unlikely(gc_was_enabled == -1)) { + Py_DECREF(gc); + return -1; + } + #endif + t->tp_flags |= Py_TPFLAGS_HEAPTYPE; +#if PY_VERSION_HEX >= 0x030A0000 + t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; +#endif +#else + (void)__Pyx_PyObject_CallMethod0; +#endif + r = PyType_Ready(t); +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; + #if PY_VERSION_HEX >= 0x030A00b1 + if (gc_was_enabled) + PyGC_Enable(); + #else + if (gc_was_enabled) { + PyObject *tp, *v, *tb; + PyErr_Fetch(&tp, &v, &tb); + ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); + if (likely(ret || r == -1)) { + Py_XDECREF(ret); + PyErr_Restore(tp, v, tb); + } else { + Py_XDECREF(tp); + Py_XDECREF(v); + Py_XDECREF(tb); + r = -1; + } + } + Py_DECREF(gc); + #endif + } +#endif + return r; +#endif +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", + type_name, attr_name); +#else + "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", + type_name, PyString_AS_STRING(attr_name)); +#endif + __Pyx_DECREF_TypeName(type_name); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetupReduce */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStrNoError(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_getstate = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; + PyObject *getstate = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); +#else + getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); + if (!getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (getstate) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); +#else + object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); + if (!object_getstate && PyErr_Occurred()) { + goto __PYX_BAD; + } +#endif + if (object_getstate != getstate) { + goto __PYX_GOOD; + } + } +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + goto __PYX_BAD; + } + setstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; +__PYX_BAD: + if (!PyErr_Occurred()) { + __Pyx_TypeName type_obj_name = + __Pyx_PyType_GetName((PyTypeObject*)type_obj); + PyErr_Format(PyExc_RuntimeError, + "Unable to initialize pickling for " __Pyx_FMT_TYPENAME, type_obj_name); + __Pyx_DECREF_TypeName(type_obj_name); + } + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); + Py_XDECREF(object_getstate); + Py_XDECREF(getstate); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} +#endif + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType_3_0_11 +#define __PYX_HAVE_RT_ImportType_3_0_11 +static PyTypeObject *__Pyx_ImportType_3_0_11(PyObject *module, const char *module_name, const char *class_name, + size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_11 check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; + Py_ssize_t itemsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_basicsize; + PyObject *py_itemsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#if !CYTHON_COMPILING_IN_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; + itemsize = ((PyTypeObject *)result)->tp_itemsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; + py_itemsize = PyObject_GetAttrString(result, "__itemsize__"); + if (!py_itemsize) + goto bad; + itemsize = PyLong_AsSsize_t(py_itemsize); + Py_DECREF(py_itemsize); + py_itemsize = 0; + if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (itemsize) { + if (size % alignment) { + alignment = size % alignment; + } + if (itemsize < (Py_ssize_t)alignment) + itemsize = (Py_ssize_t)alignment; + } + if ((size_t)(basicsize + itemsize) < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize+itemsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error_3_0_11 && + ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd-%zd from PyObject", + module_name, class_name, size, basicsize, basicsize+itemsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_0_11 && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* FetchSharedCythonModule */ +static PyObject *__Pyx_FetchSharedCythonABIModule(void) { + return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); +} + +/* FetchCommonType */ +static int __Pyx_VerifyCachedType(PyObject *cached_type, + const char *name, + Py_ssize_t basicsize, + Py_ssize_t expected_basicsize) { + if (!PyType_Check(cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", name); + return -1; + } + if (basicsize != expected_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + name); + return -1; + } + return 0; +} +#if !CYTHON_USE_TYPE_SPECS +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { + PyObject* abi_module; + const char* object_name; + PyTypeObject *cached_type = NULL; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + object_name = strrchr(type->tp_name, '.'); + object_name = object_name ? object_name+1 : type->tp_name; + cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + if (__Pyx_VerifyCachedType( + (PyObject *)cached_type, + object_name, + cached_type->tp_basicsize, + type->tp_basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; +done: + Py_DECREF(abi_module); + return cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#else +static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { + PyObject *abi_module, *cached_type = NULL; + const char* object_name = strrchr(spec->name, '.'); + object_name = object_name ? object_name+1 : spec->name; + abi_module = __Pyx_FetchSharedCythonABIModule(); + if (!abi_module) return NULL; + cached_type = PyObject_GetAttrString(abi_module, object_name); + if (cached_type) { + Py_ssize_t basicsize; +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *py_basicsize; + py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); + if (unlikely(!py_basicsize)) goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; +#else + basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; +#endif + if (__Pyx_VerifyCachedType( + cached_type, + object_name, + basicsize, + spec->basicsize) < 0) { + goto bad; + } + goto done; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + CYTHON_UNUSED_VAR(module); + cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); + if (unlikely(!cached_type)) goto bad; + if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; + if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; +done: + Py_DECREF(abi_module); + assert(cached_type == NULL || PyType_Check(cached_type)); + return (PyTypeObject *) cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} +#endif + +/* PyVectorcallFastCallDict */ +#if CYTHON_METH_FASTCALL +static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + PyObject *res = NULL; + PyObject *kwnames; + PyObject **newargs; + PyObject **kwvalues; + Py_ssize_t i, pos; + size_t j; + PyObject *key, *value; + unsigned long keys_are_strings; + Py_ssize_t nkw = PyDict_GET_SIZE(kw); + newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); + if (unlikely(newargs == NULL)) { + PyErr_NoMemory(); + return NULL; + } + for (j = 0; j < nargs; j++) newargs[j] = args[j]; + kwnames = PyTuple_New(nkw); + if (unlikely(kwnames == NULL)) { + PyMem_Free(newargs); + return NULL; + } + kwvalues = newargs + nargs; + pos = i = 0; + keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; + while (PyDict_Next(kw, &pos, &key, &value)) { + keys_are_strings &= Py_TYPE(key)->tp_flags; + Py_INCREF(key); + Py_INCREF(value); + PyTuple_SET_ITEM(kwnames, i, key); + kwvalues[i] = value; + i++; + } + if (unlikely(!keys_are_strings)) { + PyErr_SetString(PyExc_TypeError, "keywords must be strings"); + goto cleanup; + } + res = vc(func, newargs, nargs, kwnames); +cleanup: + Py_DECREF(kwnames); + for (i = 0; i < nkw; i++) + Py_DECREF(kwvalues[i]); + PyMem_Free(newargs); + return res; +} +static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) +{ + if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { + return vc(func, args, nargs, NULL); + } + return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); +} +#endif + +/* CythonFunctionShared */ +#if CYTHON_COMPILING_IN_LIMITED_API +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + if (__Pyx_CyFunction_Check(func)) { + return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; + } else if (PyCFunction_Check(func)) { + return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; + } + return 0; +} +#else +static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { + return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; +} +#endif +static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + __Pyx_Py_XDECREF_SET( + __Pyx_CyFunction_GetClassObj(f), + ((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#else + __Pyx_Py_XDECREF_SET( + ((PyCMethodObject *) (f))->mm_class, + (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); +#endif +} +static PyObject * +__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) +{ + CYTHON_UNUSED_VAR(closure); + if (unlikely(op->func_doc == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); + if (unlikely(!op->func_doc)) return NULL; +#else + if (((PyCFunctionObject*)op)->m_ml->ml_doc) { +#if PY_MAJOR_VERSION >= 3 + op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#else + op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); +#endif + if (unlikely(op->func_doc == NULL)) + return NULL; + } else { + Py_INCREF(Py_None); + return Py_None; + } +#endif + } + Py_INCREF(op->func_doc); + return op->func_doc; +} +static int +__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (value == NULL) { + value = Py_None; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_doc, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_name == NULL)) { +#if CYTHON_COMPILING_IN_LIMITED_API + op->func_name = PyObject_GetAttrString(op->func, "__name__"); +#elif PY_MAJOR_VERSION >= 3 + op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#else + op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); +#endif + if (unlikely(op->func_name == NULL)) + return NULL; + } + Py_INCREF(op->func_name); + return op->func_name; +} +static int +__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_name, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_qualname); + return op->func_qualname; +} +static int +__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) +#else + if (unlikely(value == NULL || !PyString_Check(value))) +#endif + { + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_qualname, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(op->func_dict == NULL)) { + op->func_dict = PyDict_New(); + if (unlikely(op->func_dict == NULL)) + return NULL; + } + Py_INCREF(op->func_dict); + return op->func_dict; +} +static int +__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) +{ + CYTHON_UNUSED_VAR(context); + if (unlikely(value == NULL)) { + PyErr_SetString(PyExc_TypeError, + "function's dictionary may not be deleted"); + return -1; + } + if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "setting function's dictionary to a non-dict"); + return -1; + } + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->func_dict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(context); + Py_INCREF(op->func_globals); + return op->func_globals; +} +static PyObject * +__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) +{ + CYTHON_UNUSED_VAR(op); + CYTHON_UNUSED_VAR(context); + Py_INCREF(Py_None); + return Py_None; +} +static PyObject * +__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) +{ + PyObject* result = (op->func_code) ? op->func_code : Py_None; + CYTHON_UNUSED_VAR(context); + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { + int result = 0; + PyObject *res = op->defaults_getter((PyObject *) op); + if (unlikely(!res)) + return -1; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + op->defaults_tuple = PyTuple_GET_ITEM(res, 0); + Py_INCREF(op->defaults_tuple); + op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); + Py_INCREF(op->defaults_kwdict); + #else + op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); + if (unlikely(!op->defaults_tuple)) result = -1; + else { + op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); + if (unlikely(!op->defaults_kwdict)) result = -1; + } + #endif + Py_DECREF(res); + return result; +} +static int +__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__defaults__ must be set to a tuple object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_tuple; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_tuple; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value) { + value = Py_None; + } else if (unlikely(value != Py_None && !PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__kwdefaults__ must be set to a dict object"); + return -1; + } + PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " + "currently affect the values used in function calls", 1); + Py_INCREF(value); + __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->defaults_kwdict; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + if (op->defaults_getter) { + if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; + result = op->defaults_kwdict; + } else { + result = Py_None; + } + } + Py_INCREF(result); + return result; +} +static int +__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + if (!value || value == Py_None) { + value = NULL; + } else if (unlikely(!PyDict_Check(value))) { + PyErr_SetString(PyExc_TypeError, + "__annotations__ must be set to a dict object"); + return -1; + } + Py_XINCREF(value); + __Pyx_Py_XDECREF_SET(op->func_annotations, value); + return 0; +} +static PyObject * +__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { + PyObject* result = op->func_annotations; + CYTHON_UNUSED_VAR(context); + if (unlikely(!result)) { + result = PyDict_New(); + if (unlikely(!result)) return NULL; + op->func_annotations = result; + } + Py_INCREF(result); + return result; +} +static PyObject * +__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { + int is_coroutine; + CYTHON_UNUSED_VAR(context); + if (op->func_is_coroutine) { + return __Pyx_NewRef(op->func_is_coroutine); + } + is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; +#if PY_VERSION_HEX >= 0x03050000 + if (is_coroutine) { + PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; + fromlist = PyList_New(1); + if (unlikely(!fromlist)) return NULL; + Py_INCREF(marker); +#if CYTHON_ASSUME_SAFE_MACROS + PyList_SET_ITEM(fromlist, 0, marker); +#else + if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { + Py_DECREF(marker); + Py_DECREF(fromlist); + return NULL; + } +#endif + module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); + Py_DECREF(fromlist); + if (unlikely(!module)) goto ignore; + op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); + Py_DECREF(module); + if (likely(op->func_is_coroutine)) { + return __Pyx_NewRef(op->func_is_coroutine); + } +ignore: + PyErr_Clear(); + } +#endif + op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); + return __Pyx_NewRef(op->func_is_coroutine); +} +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject * +__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_GetAttrString(op->func, "__module__"); +} +static int +__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { + CYTHON_UNUSED_VAR(context); + return PyObject_SetAttrString(op->func, "__module__", value); +} +#endif +static PyGetSetDef __pyx_CyFunction_getsets[] = { + {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, + {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, + {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, + {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, + {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, + {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, + {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, + {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, + {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, + {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, + {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, +#if CYTHON_COMPILING_IN_LIMITED_API + {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, +#endif + {0, 0, 0, 0, 0} +}; +static PyMemberDef __pyx_CyFunction_members[] = { +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, +#endif +#if CYTHON_USE_TYPE_SPECS + {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, +#if CYTHON_METH_FASTCALL +#if CYTHON_BACKPORT_VECTORCALL + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, +#else +#if !CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, +#endif +#endif +#endif +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, +#else + {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, +#endif +#endif + {0, 0, 0, 0, 0} +}; +static PyObject * +__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) +{ + CYTHON_UNUSED_VAR(args); +#if PY_MAJOR_VERSION >= 3 + Py_INCREF(m->func_qualname); + return m->func_qualname; +#else + return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); +#endif +} +static PyMethodDef __pyx_CyFunction_methods[] = { + {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, + {0, 0, 0, 0} +}; +#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API +#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) +#else +#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) +#endif +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { +#if !CYTHON_COMPILING_IN_LIMITED_API + PyCFunctionObject *cf = (PyCFunctionObject*) op; +#endif + if (unlikely(op == NULL)) + return NULL; +#if CYTHON_COMPILING_IN_LIMITED_API + op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); + if (unlikely(!op->func)) return NULL; +#endif + op->flags = flags; + __Pyx_CyFunction_weakreflist(op) = NULL; +#if !CYTHON_COMPILING_IN_LIMITED_API + cf->m_ml = ml; + cf->m_self = (PyObject *) op; +#endif + Py_XINCREF(closure); + op->func_closure = closure; +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_XINCREF(module); + cf->m_module = module; +#endif + op->func_dict = NULL; + op->func_name = NULL; + Py_INCREF(qualname); + op->func_qualname = qualname; + op->func_doc = NULL; +#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API + op->func_classobj = NULL; +#else + ((PyCMethodObject*)op)->mm_class = NULL; +#endif + op->func_globals = globals; + Py_INCREF(op->func_globals); + Py_XINCREF(code); + op->func_code = code; + op->defaults_pyobjects = 0; + op->defaults_size = 0; + op->defaults = NULL; + op->defaults_tuple = NULL; + op->defaults_kwdict = NULL; + op->defaults_getter = NULL; + op->func_annotations = NULL; + op->func_is_coroutine = NULL; +#if CYTHON_METH_FASTCALL + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { + case METH_NOARGS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; + break; + case METH_O: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; + break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; + break; + case METH_FASTCALL | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; + break; + case METH_VARARGS | METH_KEYWORDS: + __Pyx_CyFunction_func_vectorcall(op) = NULL; + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + Py_DECREF(op); + return NULL; + } +#endif + return (PyObject *) op; +} +static int +__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) +{ + Py_CLEAR(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_CLEAR(m->func); +#else + Py_CLEAR(((PyCFunctionObject*)m)->m_module); +#endif + Py_CLEAR(m->func_dict); + Py_CLEAR(m->func_name); + Py_CLEAR(m->func_qualname); + Py_CLEAR(m->func_doc); + Py_CLEAR(m->func_globals); + Py_CLEAR(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API +#if PY_VERSION_HEX < 0x030900B1 + Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); +#else + { + PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; + ((PyCMethodObject *) (m))->mm_class = NULL; + Py_XDECREF(cls); + } +#endif +#endif + Py_CLEAR(m->defaults_tuple); + Py_CLEAR(m->defaults_kwdict); + Py_CLEAR(m->func_annotations); + Py_CLEAR(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_XDECREF(pydefaults[i]); + PyObject_Free(m->defaults); + m->defaults = NULL; + } + return 0; +} +static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + if (__Pyx_CyFunction_weakreflist(m) != NULL) + PyObject_ClearWeakRefs((PyObject *) m); + __Pyx_CyFunction_clear(m); + __Pyx_PyHeapTypeObject_GC_Del(m); +} +static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) +{ + PyObject_GC_UnTrack(m); + __Pyx__CyFunction_dealloc(m); +} +static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) +{ + Py_VISIT(m->func_closure); +#if CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(m->func); +#else + Py_VISIT(((PyCFunctionObject*)m)->m_module); +#endif + Py_VISIT(m->func_dict); + Py_VISIT(m->func_name); + Py_VISIT(m->func_qualname); + Py_VISIT(m->func_doc); + Py_VISIT(m->func_globals); + Py_VISIT(m->func_code); +#if !CYTHON_COMPILING_IN_LIMITED_API + Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); +#endif + Py_VISIT(m->defaults_tuple); + Py_VISIT(m->defaults_kwdict); + Py_VISIT(m->func_is_coroutine); + if (m->defaults) { + PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); + int i; + for (i = 0; i < m->defaults_pyobjects; i++) + Py_VISIT(pydefaults[i]); + } + return 0; +} +static PyObject* +__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) +{ +#if PY_MAJOR_VERSION >= 3 + return PyUnicode_FromFormat("", + op->func_qualname, (void *)op); +#else + return PyString_FromFormat("", + PyString_AsString(op->func_qualname), (void *)op); +#endif +} +static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { +#if CYTHON_COMPILING_IN_LIMITED_API + PyObject *f = ((__pyx_CyFunctionObject*)func)->func; + PyObject *py_name = NULL; + PyCFunction meth; + int flags; + meth = PyCFunction_GetFunction(f); + if (unlikely(!meth)) return NULL; + flags = PyCFunction_GetFlags(f); + if (unlikely(flags < 0)) return NULL; +#else + PyCFunctionObject* f = (PyCFunctionObject*)func; + PyCFunction meth = f->m_ml->ml_meth; + int flags = f->m_ml->ml_flags; +#endif + Py_ssize_t size; + switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { + case METH_VARARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) + return (*meth)(self, arg); + break; + case METH_VARARGS | METH_KEYWORDS: + return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); + case METH_NOARGS: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 0)) + return (*meth)(self, NULL); +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + case METH_O: + if (likely(kw == NULL || PyDict_Size(kw) == 0)) { +#if CYTHON_ASSUME_SAFE_MACROS + size = PyTuple_GET_SIZE(arg); +#else + size = PyTuple_Size(arg); + if (unlikely(size < 0)) return NULL; +#endif + if (likely(size == 1)) { + PyObject *result, *arg0; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + arg0 = PyTuple_GET_ITEM(arg, 0); + #else + arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; + #endif + result = (*meth)(self, arg0); + #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) + Py_DECREF(arg0); + #endif + return result; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, + "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + py_name, size); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + f->m_ml->ml_name, size); +#endif + return NULL; + } + break; + default: + PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); + return NULL; + } +#if CYTHON_COMPILING_IN_LIMITED_API + py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); + if (!py_name) return NULL; + PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", + py_name); + Py_DECREF(py_name); +#else + PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", + f->m_ml->ml_name); +#endif + return NULL; +} +static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *self, *result; +#if CYTHON_COMPILING_IN_LIMITED_API + self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); + if (unlikely(!self) && PyErr_Occurred()) return NULL; +#else + self = ((PyCFunctionObject*)func)->m_self; +#endif + result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); + return result; +} +static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { + PyObject *result; + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; +#if CYTHON_METH_FASTCALL + __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); + if (vc) { +#if CYTHON_ASSUME_SAFE_MACROS + return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); +#else + (void) &__Pyx_PyVectorcall_FastCallDict; + return PyVectorcall_Call(func, args, kw); +#endif + } +#endif + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + Py_ssize_t argc; + PyObject *new_args; + PyObject *self; +#if CYTHON_ASSUME_SAFE_MACROS + argc = PyTuple_GET_SIZE(args); +#else + argc = PyTuple_Size(args); + if (unlikely(!argc) < 0) return NULL; +#endif + new_args = PyTuple_GetSlice(args, 1, argc); + if (unlikely(!new_args)) + return NULL; + self = PyTuple_GetItem(args, 0); + if (unlikely(!self)) { + Py_DECREF(new_args); +#if PY_MAJOR_VERSION > 2 + PyErr_Format(PyExc_TypeError, + "unbound method %.200S() needs an argument", + cyfunc->func_qualname); +#else + PyErr_SetString(PyExc_TypeError, + "unbound method needs an argument"); +#endif + return NULL; + } + result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); + Py_DECREF(new_args); + } else { + result = __Pyx_CyFunction_Call(func, args, kw); + } + return result; +} +#if CYTHON_METH_FASTCALL +static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) +{ + int ret = 0; + if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { + if (unlikely(nargs < 1)) { + PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", + ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + ret = 1; + } + if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); + return -1; + } + return ret; +} +static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 0)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, NULL); +} +static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + if (unlikely(nargs != 1)) { + PyErr_Format(PyExc_TypeError, + "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", + def->ml_name, nargs); + return NULL; + } + return def->ml_meth(self, args[0]); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); +} +static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; + PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; + PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); +#if CYTHON_BACKPORT_VECTORCALL + Py_ssize_t nargs = (Py_ssize_t)nargsf; +#else + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); +#endif + PyObject *self; + switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { + case 1: + self = args[0]; + args += 1; + nargs -= 1; + break; + case 0: + self = ((PyCFunctionObject*)cyfunc)->m_self; + break; + default: + return NULL; + } + return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); +} +#endif +#if CYTHON_USE_TYPE_SPECS +static PyType_Slot __pyx_CyFunctionType_slots[] = { + {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, + {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, + {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, + {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, + {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, + {Py_tp_methods, (void *)__pyx_CyFunction_methods}, + {Py_tp_members, (void *)__pyx_CyFunction_members}, + {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, + {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, + {0, 0}, +}; +static PyType_Spec __pyx_CyFunctionType_spec = { + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + __pyx_CyFunctionType_slots +}; +#else +static PyTypeObject __pyx_CyFunctionType_type = { + PyVarObject_HEAD_INIT(0, 0) + __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", + sizeof(__pyx_CyFunctionObject), + 0, + (destructor) __Pyx_CyFunction_dealloc, +#if !CYTHON_METH_FASTCALL + 0, +#elif CYTHON_BACKPORT_VECTORCALL + (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), +#else + offsetof(PyCFunctionObject, vectorcall), +#endif + 0, + 0, +#if PY_MAJOR_VERSION < 3 + 0, +#else + 0, +#endif + (reprfunc) __Pyx_CyFunction_repr, + 0, + 0, + 0, + 0, + __Pyx_CyFunction_CallAsMethod, + 0, + 0, + 0, + 0, +#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR + Py_TPFLAGS_METHOD_DESCRIPTOR | +#endif +#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL + _Py_TPFLAGS_HAVE_VECTORCALL | +#endif + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, + 0, + (traverseproc) __Pyx_CyFunction_traverse, + (inquiry) __Pyx_CyFunction_clear, + 0, +#if PY_VERSION_HEX < 0x030500A0 + offsetof(__pyx_CyFunctionObject, func_weakreflist), +#else + offsetof(PyCFunctionObject, m_weakreflist), +#endif + 0, + 0, + __pyx_CyFunction_methods, + __pyx_CyFunction_members, + __pyx_CyFunction_getsets, + 0, + 0, + __Pyx_PyMethod_New, + 0, + offsetof(__pyx_CyFunctionObject, func_dict), + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +#if PY_VERSION_HEX >= 0x030400a1 + 0, +#endif +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, +#endif +#if __PYX_NEED_TP_PRINT_SLOT + 0, +#endif +#if PY_VERSION_HEX >= 0x030C0000 + 0, +#endif +#if PY_VERSION_HEX >= 0x030d00A4 + 0, +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 + 0, +#endif +}; +#endif +static int __pyx_CyFunction_init(PyObject *module) { +#if CYTHON_USE_TYPE_SPECS + __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); +#else + CYTHON_UNUSED_VAR(module); + __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); +#endif + if (unlikely(__pyx_CyFunctionType == NULL)) { + return -1; + } + return 0; +} +static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults = PyObject_Malloc(size); + if (unlikely(!m->defaults)) + return PyErr_NoMemory(); + memset(m->defaults, 0, size); + m->defaults_pyobjects = pyobjects; + m->defaults_size = size; + return m->defaults; +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_tuple = tuple; + Py_INCREF(tuple); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->defaults_kwdict = dict; + Py_INCREF(dict); +} +static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { + __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; + m->func_annotations = dict; + Py_INCREF(dict); +} + +/* CythonFunction */ +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + PyObject *op = __Pyx_CyFunction_Init( + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + PyObject_GC_Track(op); + } + return op; +} + +/* CalculateMetaclass */ +static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases) { + Py_ssize_t i, nbases; +#if CYTHON_ASSUME_SAFE_MACROS + nbases = PyTuple_GET_SIZE(bases); +#else + nbases = PyTuple_Size(bases); + if (nbases < 0) return NULL; +#endif + for (i=0; i < nbases; i++) { + PyTypeObject *tmptype; +#if CYTHON_ASSUME_SAFE_MACROS + PyObject *tmp = PyTuple_GET_ITEM(bases, i); +#else + PyObject *tmp = PyTuple_GetItem(bases, i); + if (!tmp) return NULL; +#endif + tmptype = Py_TYPE(tmp); +#if PY_MAJOR_VERSION < 3 + if (tmptype == &PyClass_Type) + continue; +#endif + if (!metaclass) { + metaclass = tmptype; + continue; + } + if (PyType_IsSubtype(metaclass, tmptype)) + continue; + if (PyType_IsSubtype(tmptype, metaclass)) { + metaclass = tmptype; + continue; + } + PyErr_SetString(PyExc_TypeError, + "metaclass conflict: " + "the metaclass of a derived class " + "must be a (non-strict) subclass " + "of the metaclasses of all its bases"); + return NULL; + } + if (!metaclass) { +#if PY_MAJOR_VERSION < 3 + metaclass = &PyClass_Type; +#else + metaclass = &PyType_Type; +#endif + } + Py_INCREF((PyObject*) metaclass); + return (PyObject*) metaclass; +} + +/* PyObjectCall2Args */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args[3] = {NULL, arg1, arg2}; + return __Pyx_PyObject_FastCall(function, args+1, 2 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); +} + +/* PyObjectLookupSpecial */ +#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx__PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name, int with_error) { + PyObject *res; + PyTypeObject *tp = Py_TYPE(obj); +#if PY_MAJOR_VERSION < 3 + if (unlikely(PyInstance_Check(obj))) + return with_error ? __Pyx_PyObject_GetAttrStr(obj, attr_name) : __Pyx_PyObject_GetAttrStrNoError(obj, attr_name); +#endif + res = _PyType_Lookup(tp, attr_name); + if (likely(res)) { + descrgetfunc f = Py_TYPE(res)->tp_descr_get; + if (!f) { + Py_INCREF(res); + } else { + res = f(res, obj, (PyObject *)tp); + } + } else if (with_error) { + PyErr_SetObject(PyExc_AttributeError, attr_name); + } + return res; +} +#endif + +/* Py3ClassCreate */ +static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, + PyObject *qualname, PyObject *mkw, PyObject *modname, PyObject *doc) { + PyObject *ns; + if (metaclass) { + PyObject *prep = __Pyx_PyObject_GetAttrStrNoError(metaclass, __pyx_n_s_prepare); + if (prep) { + PyObject *pargs[3] = {NULL, name, bases}; + ns = __Pyx_PyObject_FastCallDict(prep, pargs+1, 2 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET, mkw); + Py_DECREF(prep); + } else { + if (unlikely(PyErr_Occurred())) + return NULL; + ns = PyDict_New(); + } + } else { + ns = PyDict_New(); + } + if (unlikely(!ns)) + return NULL; + if (unlikely(PyObject_SetItem(ns, __pyx_n_s_module, modname) < 0)) goto bad; +#if PY_VERSION_HEX >= 0x03030000 + if (unlikely(PyObject_SetItem(ns, __pyx_n_s_qualname, qualname) < 0)) goto bad; +#else + CYTHON_MAYBE_UNUSED_VAR(qualname); +#endif + if (unlikely(doc && PyObject_SetItem(ns, __pyx_n_s_doc, doc) < 0)) goto bad; + return ns; +bad: + Py_DECREF(ns); + return NULL; +} +#if PY_VERSION_HEX < 0x030600A4 && CYTHON_PEP487_INIT_SUBCLASS +static int __Pyx_SetNamesPEP487(PyObject *type_obj) { + PyTypeObject *type = (PyTypeObject*) type_obj; + PyObject *names_to_set, *key, *value, *set_name, *tmp; + Py_ssize_t i = 0; +#if CYTHON_USE_TYPE_SLOTS + names_to_set = PyDict_Copy(type->tp_dict); +#else + { + PyObject *d = PyObject_GetAttr(type_obj, __pyx_n_s_dict); + names_to_set = NULL; + if (likely(d)) { + PyObject *names_to_set = PyDict_New(); + int ret = likely(names_to_set) ? PyDict_Update(names_to_set, d) : -1; + Py_DECREF(d); + if (unlikely(ret < 0)) + Py_CLEAR(names_to_set); + } + } +#endif + if (unlikely(names_to_set == NULL)) + goto bad; + while (PyDict_Next(names_to_set, &i, &key, &value)) { + set_name = __Pyx_PyObject_LookupSpecialNoError(value, __pyx_n_s_set_name); + if (unlikely(set_name != NULL)) { + tmp = __Pyx_PyObject_Call2Args(set_name, type_obj, key); + Py_DECREF(set_name); + if (unlikely(tmp == NULL)) { + __Pyx_TypeName value_type_name = + __Pyx_PyType_GetName(Py_TYPE(value)); + __Pyx_TypeName type_name = __Pyx_PyType_GetName(type); + PyErr_Format(PyExc_RuntimeError, +#if PY_MAJOR_VERSION >= 3 + "Error calling __set_name__ on '" __Pyx_FMT_TYPENAME "' instance %R " "in '" __Pyx_FMT_TYPENAME "'", + value_type_name, key, type_name); +#else + "Error calling __set_name__ on '" __Pyx_FMT_TYPENAME "' instance %.100s in '" __Pyx_FMT_TYPENAME "'", + value_type_name, + PyString_Check(key) ? PyString_AS_STRING(key) : "?", + type_name); +#endif + goto bad; + } else { + Py_DECREF(tmp); + } + } + else if (unlikely(PyErr_Occurred())) { + goto bad; + } + } + Py_DECREF(names_to_set); + return 0; +bad: + Py_XDECREF(names_to_set); + return -1; +} +static PyObject *__Pyx_InitSubclassPEP487(PyObject *type_obj, PyObject *mkw) { +#if CYTHON_USE_TYPE_SLOTS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + PyTypeObject *type = (PyTypeObject*) type_obj; + PyObject *mro = type->tp_mro; + Py_ssize_t i, nbases; + if (unlikely(!mro)) goto done; + (void) &__Pyx_GetBuiltinName; + Py_INCREF(mro); + nbases = PyTuple_GET_SIZE(mro); + assert(PyTuple_GET_ITEM(mro, 0) == type_obj); + for (i = 1; i < nbases-1; i++) { + PyObject *base, *dict, *meth; + base = PyTuple_GET_ITEM(mro, i); + dict = ((PyTypeObject *)base)->tp_dict; + meth = __Pyx_PyDict_GetItemStrWithError(dict, __pyx_n_s_init_subclass); + if (unlikely(meth)) { + descrgetfunc f = Py_TYPE(meth)->tp_descr_get; + PyObject *res; + Py_INCREF(meth); + if (likely(f)) { + res = f(meth, NULL, type_obj); + Py_DECREF(meth); + if (unlikely(!res)) goto bad; + meth = res; + } + res = __Pyx_PyObject_FastCallDict(meth, NULL, 0, mkw); + Py_DECREF(meth); + if (unlikely(!res)) goto bad; + Py_DECREF(res); + goto done; + } else if (unlikely(PyErr_Occurred())) { + goto bad; + } + } +done: + Py_XDECREF(mro); + return type_obj; +bad: + Py_XDECREF(mro); + Py_DECREF(type_obj); + return NULL; +#else + PyObject *super_type, *super, *func, *res; +#if CYTHON_COMPILING_IN_PYPY && !defined(PySuper_Type) + super_type = __Pyx_GetBuiltinName(__pyx_n_s_super); +#else + super_type = (PyObject*) &PySuper_Type; + (void) &__Pyx_GetBuiltinName; +#endif + super = likely(super_type) ? __Pyx_PyObject_Call2Args(super_type, type_obj, type_obj) : NULL; +#if CYTHON_COMPILING_IN_PYPY && !defined(PySuper_Type) + Py_XDECREF(super_type); +#endif + if (unlikely(!super)) { + Py_CLEAR(type_obj); + goto done; + } + func = __Pyx_PyObject_GetAttrStrNoError(super, __pyx_n_s_init_subclass); + Py_DECREF(super); + if (likely(!func)) { + if (unlikely(PyErr_Occurred())) + Py_CLEAR(type_obj); + goto done; + } + res = __Pyx_PyObject_FastCallDict(func, NULL, 0, mkw); + Py_DECREF(func); + if (unlikely(!res)) + Py_CLEAR(type_obj); + Py_XDECREF(res); +done: + return type_obj; +#endif +} +#endif +static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, + PyObject *dict, PyObject *mkw, + int calculate_metaclass, int allow_py2_metaclass) { + PyObject *result; + PyObject *owned_metaclass = NULL; + PyObject *margs[4] = {NULL, name, bases, dict}; + if (allow_py2_metaclass) { + owned_metaclass = PyObject_GetItem(dict, __pyx_n_s_metaclass); + if (owned_metaclass) { + metaclass = owned_metaclass; + } else if (likely(PyErr_ExceptionMatches(PyExc_KeyError))) { + PyErr_Clear(); + } else { + return NULL; + } + } + if (calculate_metaclass && (!metaclass || PyType_Check(metaclass))) { + metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); + Py_XDECREF(owned_metaclass); + if (unlikely(!metaclass)) + return NULL; + owned_metaclass = metaclass; + } + result = __Pyx_PyObject_FastCallDict(metaclass, margs+1, 3 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET, +#if PY_VERSION_HEX < 0x030600A4 + (metaclass == (PyObject*)&PyType_Type) ? NULL : mkw +#else + mkw +#endif + ); + Py_XDECREF(owned_metaclass); +#if PY_VERSION_HEX < 0x030600A4 && CYTHON_PEP487_INIT_SUBCLASS + if (likely(result) && likely(PyType_Check(result))) { + if (unlikely(__Pyx_SetNamesPEP487(result) < 0)) { + Py_CLEAR(result); + } else { + result = __Pyx_InitSubclassPEP487(result, mkw); + } + } +#else + (void) &__Pyx_GetBuiltinName; +#endif + return result; +} + +/* ClassMethod */ +static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { +#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM <= 0x05080000 + if (PyObject_TypeCheck(method, &PyWrapperDescr_Type)) { + return PyClassMethod_New(method); + } +#else +#if CYTHON_COMPILING_IN_PYPY + if (PyMethodDescr_Check(method)) +#else + #if PY_MAJOR_VERSION == 2 + static PyTypeObject *methoddescr_type = NULL; + if (unlikely(methoddescr_type == NULL)) { + PyObject *meth = PyObject_GetAttrString((PyObject*)&PyList_Type, "append"); + if (unlikely(!meth)) return NULL; + methoddescr_type = Py_TYPE(meth); + Py_DECREF(meth); + } + #else + PyTypeObject *methoddescr_type = &PyMethodDescr_Type; + #endif + if (__Pyx_TypeCheck(method, methoddescr_type)) +#endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject *)method; + #if PY_VERSION_HEX < 0x03020000 + PyTypeObject *d_type = descr->d_type; + #else + PyTypeObject *d_type = descr->d_common.d_type; + #endif + return PyDescr_NewClassMethod(d_type, descr->d_method); + } +#endif + else if (PyMethod_Check(method)) { + return PyClassMethod_New(PyMethod_GET_FUNCTION(method)); + } + else { + return PyClassMethod_New(method); + } +} + +/* GetNameInClass */ +static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name) { + PyObject *result; + PyObject *dict; + assert(PyType_Check(nmspace)); +#if CYTHON_USE_TYPE_SLOTS + dict = ((PyTypeObject*)nmspace)->tp_dict; + Py_XINCREF(dict); +#else + dict = PyObject_GetAttr(nmspace, __pyx_n_s_dict); +#endif + if (likely(dict)) { + result = PyObject_GetItem(dict, name); + Py_DECREF(dict); + if (result) { + return result; + } + } + PyErr_Clear(); + __Pyx_GetModuleGlobalNameUncached(result, name); + return result; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + CYTHON_MAYBE_UNUSED_VAR(tstate); + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +#if !CYTHON_COMPILING_IN_LIMITED_API +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} +#endif + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API + #ifndef Py_BUILD_CORE + #define Py_BUILD_CORE 1 + #endif + #include "internal/pycore_frame.h" +#endif +#if CYTHON_COMPILING_IN_LIMITED_API +static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, + PyObject *firstlineno, PyObject *name) { + PyObject *replace = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; + replace = PyObject_GetAttrString(code, "replace"); + if (likely(replace)) { + PyObject *result; + result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); + Py_DECREF(replace); + return result; + } + PyErr_Clear(); + #if __PYX_LIMITED_VERSION_HEX < 0x030780000 + { + PyObject *compiled = NULL, *result = NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; + if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; + compiled = Py_CompileString( + "out = type(code)(\n" + " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" + " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" + " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" + " code.co_lnotab)\n", "", Py_file_input); + if (!compiled) return NULL; + result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); + Py_DECREF(compiled); + if (!result) PyErr_Print(); + Py_DECREF(result); + result = PyDict_GetItemString(scratch_dict, "out"); + if (result) Py_INCREF(result); + return result; + } + #else + return NULL; + #endif +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; + PyObject *replace = NULL, *getframe = NULL, *frame = NULL; + PyObject *exc_type, *exc_value, *exc_traceback; + int success = 0; + if (c_line) { + (void) __pyx_cfilenm; + (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); + } + PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); + code_object = Py_CompileString("_getframe()", filename, Py_eval_input); + if (unlikely(!code_object)) goto bad; + py_py_line = PyLong_FromLong(py_line); + if (unlikely(!py_py_line)) goto bad; + py_funcname = PyUnicode_FromString(funcname); + if (unlikely(!py_funcname)) goto bad; + dict = PyDict_New(); + if (unlikely(!dict)) goto bad; + { + PyObject *old_code_object = code_object; + code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); + Py_DECREF(old_code_object); + } + if (unlikely(!code_object)) goto bad; + getframe = PySys_GetObject("_getframe"); + if (unlikely(!getframe)) goto bad; + if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; + frame = PyEval_EvalCode(code_object, dict, dict); + if (unlikely(!frame) || frame == Py_None) goto bad; + success = 1; + bad: + PyErr_Restore(exc_type, exc_value, exc_traceback); + Py_XDECREF(code_object); + Py_XDECREF(py_py_line); + Py_XDECREF(py_funcname); + Py_XDECREF(dict); + Py_XDECREF(replace); + if (success) { + PyTraceBack_Here( + (struct _frame*)frame); + } + Py_XDECREF(frame); +} +#else +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject *ptype, *pvalue, *ptraceback; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) { + /* If the code object creation fails, then we should clear the + fetched exception references and propagate the new exception */ + Py_XDECREF(ptype); + Py_XDECREF(pvalue); + Py_XDECREF(ptraceback); + goto bad; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} +#endif + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(long) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(long) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(long) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + long val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (long) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (long) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (long) -1; + } else { + stepval = v; + } + v = NULL; + val = (long) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((long) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((long) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (long) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + unsigned char *bytes = (unsigned char *)&value; +#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 + if (is_unsigned) { + return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); + } else { + return PyLong_FromNativeBytes(bytes, sizeof(value), -1); + } +#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 + int one = 1; int little = (int)*(unsigned char *)&one; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); +#else + int one = 1; int little = (int)*(unsigned char *)&one; + PyObject *from_bytes, *result = NULL; + PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; + from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); + if (!from_bytes) return NULL; + py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); + if (!py_bytes) goto limited_bad; + order_str = PyUnicode_FromString(little ? "little" : "big"); + if (!order_str) goto limited_bad; + arg_tuple = PyTuple_Pack(2, py_bytes, order_str); + if (!arg_tuple) goto limited_bad; + if (!is_unsigned) { + kwds = PyDict_New(); + if (!kwds) goto limited_bad; + if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; + } + result = PyObject_Call(from_bytes, arg_tuple, kwds); + limited_bad: + Py_XDECREF(kwds); + Py_XDECREF(arg_tuple); + Py_XDECREF(order_str); + Py_XDECREF(py_bytes); + Py_XDECREF(from_bytes); + return result; +#endif + } +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if ((sizeof(int) < sizeof(long))) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } +#endif + if (unlikely(!PyLong_Check(x))) { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + if (unlikely(__Pyx_PyLong_IsNeg(x))) { + goto raise_neg_overflow; + } else if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_DigitCount(x)) { + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if ((sizeof(int) <= sizeof(unsigned long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + if (__Pyx_PyLong_IsCompact(x)) { + __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) + } else { + const digit* digits = __Pyx_PyLong_Digits(x); + assert(__Pyx_PyLong_DigitCount(x) > 1); + switch (__Pyx_PyLong_SignedDigitCount(x)) { + case -2: + if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { + if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } + } +#endif + if ((sizeof(int) <= sizeof(long))) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { + int val; + int ret = -1; +#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API + Py_ssize_t bytes_copied = PyLong_AsNativeBytes( + x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); + if (unlikely(bytes_copied == -1)) { + } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { + goto raise_overflow; + } else { + ret = 0; + } +#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + ret = _PyLong_AsByteArray((PyLongObject *)x, + bytes, sizeof(val), + is_little, !is_unsigned); +#else + PyObject *v; + PyObject *stepval = NULL, *mask = NULL, *shift = NULL; + int bits, remaining_bits, is_negative = 0; + int chunk_size = (sizeof(long) < 8) ? 30 : 62; + if (likely(PyLong_CheckExact(x))) { + v = __Pyx_NewRef(x); + } else { + v = PyNumber_Long(x); + if (unlikely(!v)) return (int) -1; + assert(PyLong_CheckExact(v)); + } + { + int result = PyObject_RichCompareBool(v, Py_False, Py_LT); + if (unlikely(result < 0)) { + Py_DECREF(v); + return (int) -1; + } + is_negative = result == 1; + } + if (is_unsigned && unlikely(is_negative)) { + Py_DECREF(v); + goto raise_neg_overflow; + } else if (is_negative) { + stepval = PyNumber_Invert(v); + Py_DECREF(v); + if (unlikely(!stepval)) + return (int) -1; + } else { + stepval = v; + } + v = NULL; + val = (int) 0; + mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; + shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; + for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { + PyObject *tmp, *digit; + long idigit; + digit = PyNumber_And(stepval, mask); + if (unlikely(!digit)) goto done; + idigit = PyLong_AsLong(digit); + Py_DECREF(digit); + if (unlikely(idigit < 0)) goto done; + val |= ((int) idigit) << bits; + tmp = PyNumber_Rshift(stepval, shift); + if (unlikely(!tmp)) goto done; + Py_DECREF(stepval); stepval = tmp; + } + Py_DECREF(shift); shift = NULL; + Py_DECREF(mask); mask = NULL; + { + long idigit = PyLong_AsLong(stepval); + if (unlikely(idigit < 0)) goto done; + remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); + if (unlikely(idigit >= (1L << remaining_bits))) + goto raise_overflow; + val |= ((int) idigit) << bits; + } + if (!is_unsigned) { + if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) + goto raise_overflow; + if (is_negative) + val = ~val; + } + ret = 0; + done: + Py_XDECREF(shift); + Py_XDECREF(mask); + Py_XDECREF(stepval); +#endif + if (unlikely(ret)) + return (int) -1; + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (cls == a || cls == b) return 1; + mro = cls->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + PyObject *base = PyTuple_GET_ITEM(mro, i); + if (base == (PyObject *)a || base == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + if (exc_type1) { + return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); + } else { + return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030B00A4 + return Py_Version & ~0xFFUL; +#else + const char* rt_version = Py_GetVersion(); + unsigned long version = 0; + unsigned long factor = 0x01000000UL; + unsigned int digit = 0; + int i = 0; + while (factor) { + while ('0' <= rt_version[i] && rt_version[i] <= '9') { + digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); + ++i; + } + version += factor * digit; + if (rt_version[i] != '.') + break; + digit = 0; + factor >>= 8; + ++i; + } + return version; +#endif +} +static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { + const unsigned long MAJOR_MINOR = 0xFFFF0000UL; + if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) + return 0; + if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) + return 1; + { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compile time Python version %d.%d " + "of module '%.100s' " + "%s " + "runtime version %d.%d", + (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), + __Pyx_MODULE_NAME, + (allow_newer) ? "was newer than" : "does not match", + (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) + ); + return PyErr_WarnEx(NULL, message, 1); + } +} + +/* InitStrings */ +#if PY_MAJOR_VERSION >= 3 +static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { + if (t.is_unicode | t.is_str) { + if (t.intern) { + *str = PyUnicode_InternFromString(t.s); + } else if (t.encoding) { + *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); + } else { + *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); + } + } else { + *str = PyBytes_FromStringAndSize(t.s, t.n - 1); + } + if (!*str) + return -1; + if (PyObject_Hash(*str) == -1) + return -1; + return 0; +} +#endif +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION >= 3 + __Pyx_InitString(*t, t->p); + #else + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + #endif + ++t; + } + return 0; +} + +#include +static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { + size_t len = strlen(s); + if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { + PyErr_SetString(PyExc_OverflowError, "byte string is too long"); + return -1; + } + return (Py_ssize_t) len; +} +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return __Pyx_PyUnicode_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { + Py_ssize_t len = __Pyx_ssize_strlen(c_str); + if (unlikely(len < 0)) return NULL; + return PyByteArray_FromStringAndSize(c_str, len); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { + __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " + "The ability to return an instance of a strict subclass of int is deprecated, " + "and may be removed in a future version of Python.", + result_type_name)) { + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; + } + __Pyx_DECREF_TypeName(result_type_name); + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", + type_name, type_name, result_type_name); + __Pyx_DECREF_TypeName(result_type_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(__Pyx_PyLong_IsCompact(b))) { + return __Pyx_PyLong_CompactValue(b); + } else { + const digit* digits = __Pyx_PyLong_Digits(b); + const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +/* #### Code section: utility_code_pragmas_end ### */ +#ifdef _MSC_VER +#pragma warning( pop ) +#endif + + + +/* #### Code section: end ### */ +#endif /* Py_PYTHON_H */ diff --git a/test/ext/test_hybrid.py b/test/ext/test_hybrid.py index 8e3d7e9cd57..f6ad0de8d4d 100644 --- a/test/ext/test_hybrid.py +++ b/test/ext/test_hybrid.py @@ -22,6 +22,7 @@ from sqlalchemy.orm import relationship from sqlalchemy.orm import Session from sqlalchemy.orm import synonym +from sqlalchemy.orm.context import ORMSelectCompileState from sqlalchemy.sql import coercions from sqlalchemy.sql import operators from sqlalchemy.sql import roles @@ -531,7 +532,9 @@ def test_labeling_for_unnamed_matches_col( "SELECT a.id, a.foo FROM a", ) - compile_state = stmt._compile_state_factory(stmt, None) + compile_state = ORMSelectCompileState._create_orm_context( + stmt, toplevel=True, compiler=None + ) eq_( compile_state._column_naming_convention( LABEL_STYLE_DISAMBIGUATE_ONLY, legacy=False diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py index 51c86a5f1da..e0d75db7e16 100644 --- a/test/orm/test_froms.py +++ b/test/orm/test_froms.py @@ -1893,7 +1893,9 @@ def test_no_uniquing_cols(self, with_entities): .order_by(User.id) ) - compile_state = ORMSelectCompileState.create_for_statement(stmt, None) + compile_state = ORMSelectCompileState._create_orm_context( + stmt, toplevel=True, compiler=None + ) is_(compile_state._primary_entity, None) def test_column_queries_one(self): From 20f516d9587bb4bbadb845611c57db66000c93b5 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 4 Mar 2025 20:42:08 +0000 Subject: [PATCH 456/544] Revert "ensure compiler is not optional in create_for_statement()" This reverts commit b7e0ebe1ebbe6c0f97247a0854fc9ccfd9f763b1. Reason for revert: this change included a bunch of generated c files Change-Id: Ief8191394a91ebcf9315b24ef71659ccd8088bc8 --- lib/sqlalchemy/engine/_processors_cy.c | 11358 ---------- lib/sqlalchemy/engine/_row_cy.c | 11171 ---------- lib/sqlalchemy/engine/_util_cy.c | 8853 -------- lib/sqlalchemy/orm/context.py | 55 +- lib/sqlalchemy/orm/query.py | 4 +- lib/sqlalchemy/sql/_util_cy.c | 11241 ---------- lib/sqlalchemy/sql/base.py | 5 +- lib/sqlalchemy/sql/elements.py | 11 +- lib/sqlalchemy/sql/selectable.py | 5 +- lib/sqlalchemy/util/_collections_cy.c | 24882 ---------------------- lib/sqlalchemy/util/_immutabledict_cy.c | 15840 -------------- test/ext/test_hybrid.py | 5 +- test/orm/test_froms.py | 4 +- 13 files changed, 28 insertions(+), 83406 deletions(-) delete mode 100644 lib/sqlalchemy/engine/_processors_cy.c delete mode 100644 lib/sqlalchemy/engine/_row_cy.c delete mode 100644 lib/sqlalchemy/engine/_util_cy.c delete mode 100644 lib/sqlalchemy/sql/_util_cy.c delete mode 100644 lib/sqlalchemy/util/_collections_cy.c delete mode 100644 lib/sqlalchemy/util/_immutabledict_cy.c diff --git a/lib/sqlalchemy/engine/_processors_cy.c b/lib/sqlalchemy/engine/_processors_cy.c deleted file mode 100644 index 46d69c1a4d0..00000000000 --- a/lib/sqlalchemy/engine/_processors_cy.c +++ /dev/null @@ -1,11358 +0,0 @@ -/* Generated by Cython 3.0.11 */ - -/* BEGIN: Cython Metadata -{ - "distutils": { - "name": "sqlalchemy.engine._processors_cy", - "sources": [ - "lib/sqlalchemy/engine/_processors_cy.py" - ] - }, - "module_name": "sqlalchemy.engine._processors_cy" -} -END: Cython Metadata */ - -#ifndef PY_SSIZE_T_CLEAN -#define PY_SSIZE_T_CLEAN -#endif /* PY_SSIZE_T_CLEAN */ -#if defined(CYTHON_LIMITED_API) && 0 - #ifndef Py_LIMITED_API - #if CYTHON_LIMITED_API+0 > 0x03030000 - #define Py_LIMITED_API CYTHON_LIMITED_API - #else - #define Py_LIMITED_API 0x03030000 - #endif - #endif -#endif - -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.7+ or Python 3.3+. -#else -#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API -#define __PYX_EXTRA_ABI_MODULE_NAME "limited" -#else -#define __PYX_EXTRA_ABI_MODULE_NAME "" -#endif -#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME -#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI -#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x03000BF0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #define HAVE_LONG_LONG -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX -#if defined(GRAALVM_PYTHON) - /* For very preliminary testing purposes. Most variables are set the same as PyPy. - The existence of this section does not imply that anything works or is even tested */ - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 1 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(PYPY_VERSION) - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #if PY_VERSION_HEX < 0x03090000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(CYTHON_LIMITED_API) - #ifdef Py_LIMITED_API - #undef __PYX_LIMITED_VERSION_HEX - #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API - #endif - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 1 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_CLINE_IN_TRACEBACK - #define CYTHON_CLINE_IN_TRACEBACK 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 1 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #endif - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 1 - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #ifndef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 1 - #endif - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 - #endif -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #ifndef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) - #endif - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #if PY_VERSION_HEX < 0x030400a1 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #elif !defined(CYTHON_USE_TP_FINALIZE) - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #if PY_VERSION_HEX < 0x030600B1 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #elif !defined(CYTHON_USE_DICT_VERSIONS) - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) - #endif - #if PY_VERSION_HEX < 0x030700A3 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #elif !defined(CYTHON_USE_EXC_INFO_STACK) - #define CYTHON_USE_EXC_INFO_STACK 1 - #endif - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 1 - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if !defined(CYTHON_VECTORCALL) -#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) -#endif -#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_MAJOR_VERSION < 3 - #include "longintrepr.h" - #endif - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(maybe_unused) - #define CYTHON_UNUSED [[maybe_unused]] - #endif - #endif - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR - #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_USE_CPP_STD_MOVE - #if defined(__cplusplus) && (\ - __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) - #define CYTHON_USE_CPP_STD_MOVE 1 - #else - #define CYTHON_USE_CPP_STD_MOVE 0 - #endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned short uint16_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - #endif - #endif - #if _MSC_VER < 1300 - #ifdef _WIN64 - typedef unsigned long long __pyx_uintptr_t; - #else - typedef unsigned int __pyx_uintptr_t; - #endif - #else - #ifdef _WIN64 - typedef unsigned __int64 __pyx_uintptr_t; - #else - typedef unsigned __int32 __pyx_uintptr_t; - #endif - #endif -#else - #include - typedef uintptr_t __pyx_uintptr_t; -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif -#ifdef __cplusplus - template - struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; - #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) -#else - #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) -#endif -#if CYTHON_COMPILING_IN_PYPY == 1 - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) -#else - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) -#endif -#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_DefaultClassType PyClass_Type - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" - #define __Pyx_DefaultClassType PyType_Type -#if CYTHON_COMPILING_IN_LIMITED_API - static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyObject *exception_table = NULL; - PyObject *types_module=NULL, *code_type=NULL, *result=NULL; - #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; - PyObject *py_minor_version = NULL; - #endif - long minor_version = 0; - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); - #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; - #else - if (!(version_info = PySys_GetObject("version_info"))) goto end; - if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; - minor_version = PyLong_AsLong(py_minor_version); - Py_DECREF(py_minor_version); - if (minor_version == -1 && PyErr_Occurred()) goto end; - #endif - if (!(types_module = PyImport_ImportModule("types"))) goto end; - if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; - if (minor_version <= 7) { - (void)p; - result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else if (minor_version <= 10) { - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else { - if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); - } - end: - Py_XDECREF(code_type); - Py_XDECREF(exception_table); - Py_XDECREF(types_module); - if (type) { - PyErr_Restore(type, value, traceback); - } - return result; - } - #ifndef CO_OPTIMIZED - #define CO_OPTIMIZED 0x0001 - #endif - #ifndef CO_NEWLOCALS - #define CO_NEWLOCALS 0x0002 - #endif - #ifndef CO_VARARGS - #define CO_VARARGS 0x0004 - #endif - #ifndef CO_VARKEYWORDS - #define CO_VARKEYWORDS 0x0008 - #endif - #ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x0200 - #endif - #ifndef CO_GENERATOR - #define CO_GENERATOR 0x0020 - #endif - #ifndef CO_COROUTINE - #define CO_COROUTINE 0x0080 - #endif -#elif PY_VERSION_HEX >= 0x030B0000 - static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); - if (!empty_bytes) return NULL; - result = - #if PY_VERSION_HEX >= 0x030C0000 - PyUnstable_Code_NewWithPosOnlyArgs - #else - PyCode_NewWithPosOnlyArgs - #endif - (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); - Py_DECREF(empty_bytes); - return result; - } -#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif -#endif -#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) - #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) -#else - #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) - #define __Pyx_Py_Is(x, y) Py_Is(x, y) -#else - #define __Pyx_Py_Is(x, y) ((x) == (y)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) - #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) -#else - #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) - #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) -#else - #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) - #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) -#else - #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) -#endif -#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) -#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) -#else - #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) -#endif -#ifndef CO_COROUTINE - #define CO_COROUTINE 0x80 -#endif -#ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x200 -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef Py_TPFLAGS_SEQUENCE - #define Py_TPFLAGS_SEQUENCE 0 -#endif -#ifndef Py_TPFLAGS_MAPPING - #define Py_TPFLAGS_MAPPING 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #if PY_VERSION_HEX >= 0x030d00A4 - # define __Pyx_PyCFunctionFast PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords - #else - # define __Pyx_PyCFunctionFast _PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords - #endif -#endif -#if CYTHON_METH_FASTCALL - #define __Pyx_METH_FASTCALL METH_FASTCALL - #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast - #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords -#else - #define __Pyx_METH_FASTCALL METH_VARARGS - #define __Pyx_PyCFunction_FastCall PyCFunction - #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords -#endif -#if CYTHON_VECTORCALL - #define __pyx_vectorcallfunc vectorcallfunc - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET - #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) -#elif CYTHON_BACKPORT_VECTORCALL - typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, - size_t nargsf, PyObject *kwnames); - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) -#else - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) -#endif -#if PY_MAJOR_VERSION >= 0x030900B1 -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) -#else -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) -#endif -#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) -#elif !CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) -#endif -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) -static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { - return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; -} -#endif -static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { -#if CYTHON_COMPILING_IN_LIMITED_API - return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; -#else - return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -#endif -} -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) -#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) - typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); -#else - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) - #define __Pyx_PyCMethod PyCMethod -#endif -#ifndef METH_METHOD - #define METH_METHOD 0x200 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyThreadState_Current PyThreadState_Get() -#elif !CYTHON_FAST_THREAD_STATE - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) -{ - void *result; - result = PyModule_GetState(op); - if (!result) - Py_FatalError("Couldn't find the module state"); - return result; -} -#endif -#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) -#else - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if PY_MAJOR_VERSION < 3 - #if CYTHON_COMPILING_IN_PYPY - #if PYPY_VERSION_NUM < 0x07030600 - #if defined(__cplusplus) && __cplusplus >= 201402L - [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] - #elif defined(__GNUC__) || defined(__clang__) - __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) - #elif defined(_MSC_VER) - __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) - #endif - static CYTHON_INLINE int PyGILState_Check(void) { - return 0; - } - #else // PYPY_VERSION_NUM < 0x07030600 - #endif // PYPY_VERSION_NUM < 0x07030600 - #else - static CYTHON_INLINE int PyGILState_Check(void) { - PyThreadState * tstate = _PyThreadState_Current; - return tstate && (tstate == PyGILState_GetThisThreadState()); - } - #endif -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { - PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); - if (res == NULL) PyErr_Clear(); - return res; -} -#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) -#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#else -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { -#if CYTHON_COMPILING_IN_PYPY - return PyDict_GetItem(dict, name); -#else - PyDictEntry *ep; - PyDictObject *mp = (PyDictObject*) dict; - long hash = ((PyStringObject *) name)->ob_shash; - assert(hash != -1); - ep = (mp->ma_lookup)(mp, name, hash); - if (ep == NULL) { - return NULL; - } - return ep->me_value; -#endif -} -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#endif -#if CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) - #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) - #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) -#else - #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) - #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) - #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) -#else - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) -#endif -#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 -#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE((PyObject*)obj);\ - assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ - PyObject_GC_Del(obj);\ - Py_DECREF(type);\ -} -#else -#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) - #define __Pyx_PyUnicode_DATA(u) ((void*)u) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) -#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_READY(op) (0) - #else - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #endif - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #else - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #endif - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #if !defined(PyUnicode_DecodeUnicodeEscape) - #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) - #endif - #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) - #undef PyUnicode_Contains - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) - #endif - #if !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) - #endif - #if !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) - #endif -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#if CYTHON_COMPILING_IN_CPYTHON - #define __Pyx_PySequence_ListKeepNew(obj)\ - (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) -#else - #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) -#else - #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) - #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) -#endif -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) -#else - static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { - PyObject *module = PyImport_AddModule(name); - Py_XINCREF(module); - return module; - } -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define __Pyx_Py3Int_Check(op) PyLong_Check(op) - #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#else - #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) - #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) - #if !defined(_USE_MATH_DEFINES) - #define _USE_MATH_DEFINES - #endif -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifdef CYTHON_EXTERN_C - #undef __PYX_EXTERN_C - #define __PYX_EXTERN_C CYTHON_EXTERN_C -#elif defined(__PYX_EXTERN_C) - #ifdef _MSC_VER - #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") - #else - #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. - #endif -#else - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__sqlalchemy__engine___processors_cy -#define __PYX_HAVE_API__sqlalchemy__engine___processors_cy -/* Early includes */ -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_VERSION_HEX >= 0x030C00A7 - #ifndef _PyLong_SIGN_MASK - #define _PyLong_SIGN_MASK 3 - #endif - #ifndef _PyLong_NON_SIZE_BITS - #define _PyLong_NON_SIZE_BITS 3 - #endif - #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) - #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) - #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) - #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) - #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_SignedDigitCount(x)\ - ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) - #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) - #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) - #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) - #else - #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) - #endif - typedef Py_ssize_t __Pyx_compact_pylong; - typedef size_t __Pyx_compact_upylong; - #else - #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) - #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) - #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) - #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) - #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) - #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) - #define __Pyx_PyLong_CompactValue(x)\ - ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) - typedef sdigit __Pyx_compact_pylong; - typedef digit __Pyx_compact_upylong; - #endif - #if PY_VERSION_HEX >= 0x030C00A5 - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) - #else - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) - #endif -#endif -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -#include -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = (char) c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#include -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -#if !CYTHON_USE_MODULE_STATE -static PyObject *__pyx_m = NULL; -#endif -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm = __FILE__; -static const char *__pyx_filename; - -/* #### Code section: filename_table ### */ - -static const char *__pyx_f[] = { - "lib/sqlalchemy/engine/_processors_cy.py", - "", -}; -/* #### Code section: utility_code_proto_before_types ### */ -/* ForceInitThreads.proto */ -#ifndef __PYX_FORCE_INIT_THREADS - #define __PYX_FORCE_INIT_THREADS 0 -#endif - -/* #### Code section: numeric_typedefs ### */ -/* #### Code section: complex_type_declarations ### */ -/* #### Code section: type_declarations ### */ - -/*--- Type declarations ---*/ -struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory; - -/* "sqlalchemy/engine/_processors_cy.py":78 - * - * @cython.cclass - * class to_decimal_processor_factory: # <<<<<<<<<<<<<< - * type_: type - * format_: str - */ -struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory { - PyObject_HEAD - PyTypeObject *type_; - PyObject *format_; -}; - -/* #### Code section: utility_code_proto ### */ - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, Py_ssize_t); - void (*DECREF)(void*, PyObject*, Py_ssize_t); - void (*GOTREF)(void*, PyObject*, Py_ssize_t); - void (*GIVEREF)(void*, PyObject*, Py_ssize_t); - void* (*SetupContext)(const char*, Py_ssize_t, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - } - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) - #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() -#endif - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContextNogil() - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_Py_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; Py_XDECREF(tmp);\ - } while (0) -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* TupleAndListFromArray.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); -static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); -#endif - -/* IncludeStringH.proto */ -#include - -/* BytesEquals.proto */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); - -/* UnicodeEquals.proto */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); - -/* fastcall.proto */ -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) -#elif CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) -#else - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) -#endif -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) - #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) -#else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg - #define __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) -#define __Pyx_KwValues_VARARGS(args, nargs) NULL -#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) -#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) -#if CYTHON_METH_FASTCALL - #define __Pyx_Arg_FASTCALL(args, i) args[i] - #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) - #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) - static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); - #else - #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) - #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs - to have the same reference counting */ - #define __Pyx_Arg_XDECREF_FASTCALL(arg) -#else - #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS - #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS - #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS - #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS - #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS - #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) - #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) -#else -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) -#endif - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, - const char* function_name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* PyObject_Unicode.proto */ -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyObject_Unicode(obj)\ - (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Str(obj)) -#else -#define __Pyx_PyObject_Unicode(obj)\ - (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Unicode(obj)) -#endif - -/* pybytes_as_double.proto */ -static double __Pyx_SlowPyString_AsDouble(PyObject *obj); -static double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length); -static CYTHON_INLINE double __Pyx_PyBytes_AsDouble(PyObject *obj) { - char* as_c_string; - Py_ssize_t size; -#if CYTHON_ASSUME_SAFE_MACROS - as_c_string = PyBytes_AS_STRING(obj); - size = PyBytes_GET_SIZE(obj); -#else - if (PyBytes_AsStringAndSize(obj, &as_c_string, &size) < 0) { - return (double)-1; - } -#endif - return __Pyx__PyBytes_AsDouble(obj, as_c_string, size); -} -static CYTHON_INLINE double __Pyx_PyByteArray_AsDouble(PyObject *obj) { - char* as_c_string; - Py_ssize_t size; -#if CYTHON_ASSUME_SAFE_MACROS - as_c_string = PyByteArray_AS_STRING(obj); - size = PyByteArray_GET_SIZE(obj); -#else - as_c_string = PyByteArray_AsString(obj); - if (as_c_string == NULL) { - return (double)-1; - } - size = PyByteArray_Size(obj); -#endif - return __Pyx__PyBytes_AsDouble(obj, as_c_string, size); -} - -/* pyunicode_as_double.proto */ -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY && CYTHON_ASSUME_SAFE_MACROS -static const char* __Pyx__PyUnicode_AsDouble_Copy(const void* data, const int kind, char* buffer, Py_ssize_t start, Py_ssize_t end) { - int last_was_punctuation; - Py_ssize_t i; - last_was_punctuation = 1; - for (i=start; i <= end; i++) { - Py_UCS4 chr = PyUnicode_READ(kind, data, i); - int is_punctuation = (chr == '_') | (chr == '.'); - *buffer = (char)chr; - buffer += (chr != '_'); - if (unlikely(chr > 127)) goto parse_failure; - if (unlikely(last_was_punctuation & is_punctuation)) goto parse_failure; - last_was_punctuation = is_punctuation; - } - if (unlikely(last_was_punctuation)) goto parse_failure; - *buffer = '\0'; - return buffer; -parse_failure: - return NULL; -} -static double __Pyx__PyUnicode_AsDouble_inf_nan(const void* data, int kind, Py_ssize_t start, Py_ssize_t length) { - int matches = 1; - Py_UCS4 chr; - Py_UCS4 sign = PyUnicode_READ(kind, data, start); - int is_signed = (sign == '-') | (sign == '+'); - start += is_signed; - length -= is_signed; - switch (PyUnicode_READ(kind, data, start)) { - #ifdef Py_NAN - case 'n': - case 'N': - if (unlikely(length != 3)) goto parse_failure; - chr = PyUnicode_READ(kind, data, start+1); - matches &= (chr == 'a') | (chr == 'A'); - chr = PyUnicode_READ(kind, data, start+2); - matches &= (chr == 'n') | (chr == 'N'); - if (unlikely(!matches)) goto parse_failure; - return (sign == '-') ? -Py_NAN : Py_NAN; - #endif - case 'i': - case 'I': - if (unlikely(length < 3)) goto parse_failure; - chr = PyUnicode_READ(kind, data, start+1); - matches &= (chr == 'n') | (chr == 'N'); - chr = PyUnicode_READ(kind, data, start+2); - matches &= (chr == 'f') | (chr == 'F'); - if (likely(length == 3 && matches)) - return (sign == '-') ? -Py_HUGE_VAL : Py_HUGE_VAL; - if (unlikely(length != 8)) goto parse_failure; - chr = PyUnicode_READ(kind, data, start+3); - matches &= (chr == 'i') | (chr == 'I'); - chr = PyUnicode_READ(kind, data, start+4); - matches &= (chr == 'n') | (chr == 'N'); - chr = PyUnicode_READ(kind, data, start+5); - matches &= (chr == 'i') | (chr == 'I'); - chr = PyUnicode_READ(kind, data, start+6); - matches &= (chr == 't') | (chr == 'T'); - chr = PyUnicode_READ(kind, data, start+7); - matches &= (chr == 'y') | (chr == 'Y'); - if (unlikely(!matches)) goto parse_failure; - return (sign == '-') ? -Py_HUGE_VAL : Py_HUGE_VAL; - case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': - break; - default: - goto parse_failure; - } - return 0.0; -parse_failure: - return -1.0; -} -static double __Pyx_PyUnicode_AsDouble_WithSpaces(PyObject *obj) { - double value; - const char *last; - char *end; - Py_ssize_t start, length = PyUnicode_GET_LENGTH(obj); - const int kind = PyUnicode_KIND(obj); - const void* data = PyUnicode_DATA(obj); - start = 0; - while (Py_UNICODE_ISSPACE(PyUnicode_READ(kind, data, start))) - start++; - while (start < length - 1 && Py_UNICODE_ISSPACE(PyUnicode_READ(kind, data, length - 1))) - length--; - length -= start; - if (unlikely(length <= 0)) goto fallback; - value = __Pyx__PyUnicode_AsDouble_inf_nan(data, kind, start, length); - if (unlikely(value == -1.0)) goto fallback; - if (value != 0.0) return value; - if (length < 40) { - char number[40]; - last = __Pyx__PyUnicode_AsDouble_Copy(data, kind, number, start, start + length); - if (unlikely(!last)) goto fallback; - value = PyOS_string_to_double(number, &end, NULL); - } else { - char *number = (char*) PyMem_Malloc((length + 1) * sizeof(char)); - if (unlikely(!number)) goto fallback; - last = __Pyx__PyUnicode_AsDouble_Copy(data, kind, number, start, start + length); - if (unlikely(!last)) { - PyMem_Free(number); - goto fallback; - } - value = PyOS_string_to_double(number, &end, NULL); - PyMem_Free(number); - } - if (likely(end == last) || (value == (double)-1 && PyErr_Occurred())) { - return value; - } -fallback: - return __Pyx_SlowPyString_AsDouble(obj); -} -#endif -static CYTHON_INLINE double __Pyx_PyUnicode_AsDouble(PyObject *obj) { -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY && CYTHON_ASSUME_SAFE_MACROS - if (unlikely(__Pyx_PyUnicode_READY(obj) == -1)) - return (double)-1; - if (likely(PyUnicode_IS_ASCII(obj))) { - const char *s; - Py_ssize_t length; - s = PyUnicode_AsUTF8AndSize(obj, &length); - return __Pyx__PyBytes_AsDouble(obj, s, length); - } - return __Pyx_PyUnicode_AsDouble_WithSpaces(obj); -#else - return __Pyx_SlowPyString_AsDouble(obj); -#endif -} - -/* pynumber_float.proto */ -static CYTHON_INLINE PyObject* __Pyx__PyNumber_Float(PyObject* obj); -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : __Pyx__PyNumber_Float(x)) - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#if PY_VERSION_HEX >= 0x030C00A6 -#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) -#else -#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) -#endif -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) -#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) do {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#if !CYTHON_VECTORCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif -#if !CYTHON_VECTORCALL -#if PY_VERSION_HEX >= 0x03080000 - #include "frameobject.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif - #define __Pxy_PyFrame_Initialize_Offsets() - #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) -#else - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif -#endif -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectFastCall.proto */ -#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); - -/* ArgTypeTest.proto */ -#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ - ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ - __Pyx__ArgTypeTest(obj, type, name, exact)) -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); - -/* PyObjectFormatSimple.proto */ -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - PyObject_Format(s, f)) -#elif PY_MAJOR_VERSION < 3 - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - likely(PyString_CheckExact(s)) ? PyUnicode_FromEncodedObject(s, NULL, "strict") :\ - PyObject_Format(s, f)) -#elif CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - likely(PyLong_CheckExact(s)) ? PyLong_Type.tp_repr(s) :\ - likely(PyFloat_CheckExact(s)) ? PyFloat_Type.tp_repr(s) :\ - PyObject_Format(s, f)) -#else - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - PyObject_Format(s, f)) -#endif - -/* JoinPyUnicode.proto */ -static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, - Py_UCS4 max_char); - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* KeywordStringCheck.proto */ -static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); - -/* GetAttr3.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); - -/* RaiseUnexpectedTypeError.proto */ -static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); - -/* PySequenceContains.proto */ -static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { - int result = PySequence_Contains(seq, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* GetAttr.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); - -/* HasAttr.proto */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); - -/* IncludeStructmemberH.proto */ -#include - -/* FixUpExtensionType.proto */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); -#endif - -/* PyObjectCallNoArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod0.proto */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); - -/* ValidateBasesTuple.proto */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); -#endif - -/* PyType_Ready.proto */ -CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* SetupReduce.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_setup_reduce(PyObject* type_obj); -#endif - -/* FetchSharedCythonModule.proto */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void); - -/* FetchCommonType.proto */ -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); -#else -static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); -#endif - -/* PyMethodNew.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - typesModule = PyImport_ImportModule("types"); - if (!typesModule) return NULL; - methodType = PyObject_GetAttrString(typesModule, "MethodType"); - Py_DECREF(typesModule); - if (!methodType) return NULL; - result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); - Py_DECREF(methodType); - return result; -} -#elif PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - return PyMethod_New(func, self); -} -#else - #define __Pyx_PyMethod_New PyMethod_New -#endif - -/* PyVectorcallFastCallDict.proto */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); -#endif - -/* CythonFunctionShared.proto */ -#define __Pyx_CyFunction_USED -#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 -#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 -#define __Pyx_CYFUNCTION_CCLASS 0x04 -#define __Pyx_CYFUNCTION_COROUTINE 0x08 -#define __Pyx_CyFunction_GetClosure(f)\ - (((__pyx_CyFunctionObject *) (f))->func_closure) -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_CyFunction_GetClassObj(f)\ - (((__pyx_CyFunctionObject *) (f))->func_classobj) -#else - #define __Pyx_CyFunction_GetClassObj(f)\ - ((PyObject*) ((PyCMethodObject *) (f))->mm_class) -#endif -#define __Pyx_CyFunction_SetClassObj(f, classobj)\ - __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) -#define __Pyx_CyFunction_Defaults(type, f)\ - ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) -#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ - ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) -typedef struct { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject_HEAD - PyObject *func; -#elif PY_VERSION_HEX < 0x030900B1 - PyCFunctionObject func; -#else - PyCMethodObject func; -#endif -#if CYTHON_BACKPORT_VECTORCALL - __pyx_vectorcallfunc func_vectorcall; -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_weakreflist; -#endif - PyObject *func_dict; - PyObject *func_name; - PyObject *func_qualname; - PyObject *func_doc; - PyObject *func_globals; - PyObject *func_code; - PyObject *func_closure; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_classobj; -#endif - void *defaults; - int defaults_pyobjects; - size_t defaults_size; - int flags; - PyObject *defaults_tuple; - PyObject *defaults_kwdict; - PyObject *(*defaults_getter)(PyObject *); - PyObject *func_annotations; - PyObject *func_is_coroutine; -} __pyx_CyFunctionObject; -#undef __Pyx_CyOrPyCFunction_Check -#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) -#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) -#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); -#undef __Pyx_IsSameCFunction -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, - size_t size, - int pyobjects); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, - PyObject *tuple); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, - PyObject *dict); -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, - PyObject *dict); -static int __pyx_CyFunction_init(PyObject *module); -#if CYTHON_METH_FASTCALL -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -#if CYTHON_BACKPORT_VECTORCALL -#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) -#else -#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) -#endif -#endif - -/* CythonFunction.proto */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); -#endif - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* GCCDiagnostics.proto */ -#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) -#define __Pyx_HAS_GCC_DIAGNOSTIC -#endif - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* FormatTypeName.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -typedef PyObject *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%U" -static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); -#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) -#else -typedef const char *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%.200s" -#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) -#define __Pyx_DECREF_TypeName(obj) -#endif - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static unsigned long __Pyx_get_runtime_version(void); -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -/* #### Code section: module_declarations ### */ - -/* Module declarations from "cython" */ - -/* Module declarations from "sqlalchemy.engine._processors_cy" */ -static PyObject *__pyx_f_10sqlalchemy_6engine_14_processors_cy___pyx_unpickle_to_decimal_processor_factory__set_state(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *, PyObject *); /*proto*/ -/* #### Code section: typeinfo ### */ -/* #### Code section: before_global_var ### */ -#define __Pyx_MODULE_NAME "sqlalchemy.engine._processors_cy" -extern int __pyx_module_is_main_sqlalchemy__engine___processors_cy; -int __pyx_module_is_main_sqlalchemy__engine___processors_cy = 0; - -/* Implementation of "sqlalchemy.engine._processors_cy" */ -/* #### Code section: global_var ### */ -/* #### Code section: string_decls ### */ -static const char __pyx_k_[] = "%."; -static const char __pyx_k_f[] = "f"; -static const char __pyx_k__3[] = "."; -static const char __pyx_k_gc[] = "gc"; -static const char __pyx_k_Any[] = "Any"; -static const char __pyx_k__19[] = "?"; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_bool[] = "bool"; -static const char __pyx_k_date[] = "date"; -static const char __pyx_k_dict[] = "__dict__"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_self[] = "self"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_time[] = "time"; -static const char __pyx_k_type[] = "type_"; -static const char __pyx_k_scale[] = "scale"; -static const char __pyx_k_slots[] = "__slots__"; -static const char __pyx_k_state[] = "state"; -static const char __pyx_k_value[] = "value"; -static const char __pyx_k_dict_2[] = "_dict"; -static const char __pyx_k_enable[] = "enable"; -static const char __pyx_k_format[] = "format_"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_pickle[] = "pickle"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_return[] = "return"; -static const char __pyx_k_to_str[] = "to_str"; -static const char __pyx_k_typing[] = "typing"; -static const char __pyx_k_update[] = "update"; -static const char __pyx_k_disable[] = "disable"; -static const char __pyx_k_Optional[] = "Optional"; -static const char __pyx_k_date_cls[] = "date_cls"; -static const char __pyx_k_datetime[] = "datetime"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_pyx_type[] = "__pyx_type"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_time_cls[] = "time_cls"; -static const char __pyx_k_to_float[] = "to_float"; -static const char __pyx_k_isenabled[] = "isenabled"; -static const char __pyx_k_pyx_state[] = "__pyx_state"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_pyx_result[] = "__pyx_result"; -static const char __pyx_k_PickleError[] = "PickleError"; -static const char __pyx_k_is_compiled[] = "_is_compiled"; -static const char __pyx_k_str_to_date[] = "str_to_date"; -static const char __pyx_k_str_to_time[] = "str_to_time"; -static const char __pyx_k_Optional_str[] = "Optional[str]"; -static const char __pyx_k_datetime_cls[] = "datetime_cls"; -static const char __pyx_k_is_coroutine[] = "_is_coroutine"; -static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; -static const char __pyx_k_stringsource[] = ""; -static const char __pyx_k_use_setstate[] = "use_setstate"; -static const char __pyx_k_Optional_bool[] = "Optional[bool]"; -static const char __pyx_k_fromisoformat[] = "fromisoformat"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_Optional_float[] = "Optional[float]"; -static const char __pyx_k_int_to_boolean[] = "int_to_boolean"; -static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_str_to_datetime[] = "str_to_datetime"; -static const char __pyx_k_Optional_date_cls[] = "Optional[date_cls]"; -static const char __pyx_k_Optional_time_cls[] = "Optional[time_cls]"; -static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_Optional_datetime_cls[] = "Optional[datetime_cls]"; -static const char __pyx_k_to_decimal_processor_factory[] = "to_decimal_processor_factory"; -static const char __pyx_k_pyx_unpickle_to_decimal_proces[] = "__pyx_unpickle_to_decimal_processor_factory"; -static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))"; -static const char __pyx_k_lib_sqlalchemy_engine__processor[] = "lib/sqlalchemy/engine/_processors_cy.py"; -static const char __pyx_k_sqlalchemy_engine__processors_cy[] = "sqlalchemy.engine._processors_cy"; -static const char __pyx_k_to_decimal_processor_factory___r[] = "to_decimal_processor_factory.__reduce_cython__"; -static const char __pyx_k_to_decimal_processor_factory___s[] = "to_decimal_processor_factory.__setstate_cython__"; -/* #### Code section: decls ### */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_2int_to_boolean(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_4to_str(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_6to_float(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_8str_to_datetime(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_10str_to_time(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_12str_to_date(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value); /* proto */ -static int __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory___init__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyTypeObject *__pyx_v_type_, PyObject *__pyx_v_scale); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_2__call__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_4__reduce_cython__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_6__setstate_cython__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_14__pyx_unpickle_to_decimal_processor_factory(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_tp_new_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -/* #### Code section: late_includes ### */ -/* #### Code section: module_state ### */ -typedef struct { - PyObject *__pyx_d; - PyObject *__pyx_b; - PyObject *__pyx_cython_runtime; - PyObject *__pyx_empty_tuple; - PyObject *__pyx_empty_bytes; - PyObject *__pyx_empty_unicode; - #ifdef __Pyx_CyFunction_USED - PyTypeObject *__pyx_CyFunctionType; - #endif - #ifdef __Pyx_FusedFunction_USED - PyTypeObject *__pyx_FusedFunctionType; - #endif - #ifdef __Pyx_Generator_USED - PyTypeObject *__pyx_GeneratorType; - #endif - #ifdef __Pyx_IterableCoroutine_USED - PyTypeObject *__pyx_IterableCoroutineType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineAwaitType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineType; - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - PyObject *__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory; - #endif - PyTypeObject *__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory; - PyObject *__pyx_kp_u_; - PyObject *__pyx_n_s_Any; - PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; - PyObject *__pyx_n_s_Optional; - PyObject *__pyx_kp_s_Optional_bool; - PyObject *__pyx_kp_s_Optional_date_cls; - PyObject *__pyx_kp_s_Optional_datetime_cls; - PyObject *__pyx_kp_s_Optional_float; - PyObject *__pyx_kp_s_Optional_str; - PyObject *__pyx_kp_s_Optional_time_cls; - PyObject *__pyx_n_s_PickleError; - PyObject *__pyx_n_s__19; - PyObject *__pyx_kp_u__3; - PyObject *__pyx_n_s_asyncio_coroutines; - PyObject *__pyx_n_s_bool; - PyObject *__pyx_n_s_cline_in_traceback; - PyObject *__pyx_n_s_date; - PyObject *__pyx_n_s_date_cls; - PyObject *__pyx_n_s_datetime; - PyObject *__pyx_n_s_datetime_cls; - PyObject *__pyx_n_s_dict; - PyObject *__pyx_n_s_dict_2; - PyObject *__pyx_kp_u_disable; - PyObject *__pyx_kp_u_enable; - PyObject *__pyx_n_u_f; - PyObject *__pyx_n_u_format; - PyObject *__pyx_n_s_fromisoformat; - PyObject *__pyx_kp_u_gc; - PyObject *__pyx_n_s_getstate; - PyObject *__pyx_n_s_import; - PyObject *__pyx_n_s_int_to_boolean; - PyObject *__pyx_n_s_is_compiled; - PyObject *__pyx_n_s_is_coroutine; - PyObject *__pyx_kp_u_isenabled; - PyObject *__pyx_kp_s_lib_sqlalchemy_engine__processor; - PyObject *__pyx_n_s_main; - PyObject *__pyx_n_s_name; - PyObject *__pyx_n_s_new; - PyObject *__pyx_n_s_pickle; - PyObject *__pyx_n_s_pyx_PickleError; - PyObject *__pyx_n_s_pyx_checksum; - PyObject *__pyx_n_s_pyx_result; - PyObject *__pyx_n_s_pyx_state; - PyObject *__pyx_n_s_pyx_type; - PyObject *__pyx_n_s_pyx_unpickle_to_decimal_proces; - PyObject *__pyx_n_s_reduce; - PyObject *__pyx_n_s_reduce_cython; - PyObject *__pyx_n_s_reduce_ex; - PyObject *__pyx_n_s_return; - PyObject *__pyx_n_s_scale; - PyObject *__pyx_n_s_self; - PyObject *__pyx_n_s_setstate; - PyObject *__pyx_n_s_setstate_cython; - PyObject *__pyx_n_s_slots; - PyObject *__pyx_n_s_sqlalchemy_engine__processors_cy; - PyObject *__pyx_n_s_state; - PyObject *__pyx_n_s_str_to_date; - PyObject *__pyx_n_s_str_to_datetime; - PyObject *__pyx_n_s_str_to_time; - PyObject *__pyx_kp_s_stringsource; - PyObject *__pyx_n_s_test; - PyObject *__pyx_n_s_time; - PyObject *__pyx_n_s_time_cls; - PyObject *__pyx_n_s_to_decimal_processor_factory; - PyObject *__pyx_n_s_to_decimal_processor_factory___r; - PyObject *__pyx_n_s_to_decimal_processor_factory___s; - PyObject *__pyx_n_s_to_float; - PyObject *__pyx_n_s_to_str; - PyObject *__pyx_n_s_type; - PyObject *__pyx_n_u_type; - PyObject *__pyx_n_s_typing; - PyObject *__pyx_n_s_update; - PyObject *__pyx_n_s_use_setstate; - PyObject *__pyx_n_s_value; - PyObject *__pyx_int_61481721; - PyObject *__pyx_int_190945570; - PyObject *__pyx_int_248337392; - PyObject *__pyx_tuple__2; - PyObject *__pyx_tuple__5; - PyObject *__pyx_tuple__12; - PyObject *__pyx_tuple__13; - PyObject *__pyx_tuple__15; - PyObject *__pyx_tuple__17; - PyObject *__pyx_codeobj__4; - PyObject *__pyx_codeobj__6; - PyObject *__pyx_codeobj__7; - PyObject *__pyx_codeobj__8; - PyObject *__pyx_codeobj__9; - PyObject *__pyx_codeobj__10; - PyObject *__pyx_codeobj__11; - PyObject *__pyx_codeobj__14; - PyObject *__pyx_codeobj__16; - PyObject *__pyx_codeobj__18; -} __pyx_mstate; - -#if CYTHON_USE_MODULE_STATE -#ifdef __cplusplus -namespace { - extern struct PyModuleDef __pyx_moduledef; -} /* anonymous namespace */ -#else -static struct PyModuleDef __pyx_moduledef; -#endif - -#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) - -#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) - -#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) -#else -static __pyx_mstate __pyx_mstate_global_static = -#ifdef __cplusplus - {}; -#else - {0}; -#endif -static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; -#endif -/* #### Code section: module_state_clear ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_clear(PyObject *m) { - __pyx_mstate *clear_module_state = __pyx_mstate(m); - if (!clear_module_state) return 0; - Py_CLEAR(clear_module_state->__pyx_d); - Py_CLEAR(clear_module_state->__pyx_b); - Py_CLEAR(clear_module_state->__pyx_cython_runtime); - Py_CLEAR(clear_module_state->__pyx_empty_tuple); - Py_CLEAR(clear_module_state->__pyx_empty_bytes); - Py_CLEAR(clear_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_CLEAR(clear_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); - #endif - Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); - Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); - Py_CLEAR(clear_module_state->__pyx_kp_u_); - Py_CLEAR(clear_module_state->__pyx_n_s_Any); - Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_CLEAR(clear_module_state->__pyx_n_s_Optional); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_bool); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_date_cls); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_datetime_cls); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_float); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_str); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_time_cls); - Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); - Py_CLEAR(clear_module_state->__pyx_n_s__19); - Py_CLEAR(clear_module_state->__pyx_kp_u__3); - Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); - Py_CLEAR(clear_module_state->__pyx_n_s_bool); - Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); - Py_CLEAR(clear_module_state->__pyx_n_s_date); - Py_CLEAR(clear_module_state->__pyx_n_s_date_cls); - Py_CLEAR(clear_module_state->__pyx_n_s_datetime); - Py_CLEAR(clear_module_state->__pyx_n_s_datetime_cls); - Py_CLEAR(clear_module_state->__pyx_n_s_dict); - Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); - Py_CLEAR(clear_module_state->__pyx_kp_u_disable); - Py_CLEAR(clear_module_state->__pyx_kp_u_enable); - Py_CLEAR(clear_module_state->__pyx_n_u_f); - Py_CLEAR(clear_module_state->__pyx_n_u_format); - Py_CLEAR(clear_module_state->__pyx_n_s_fromisoformat); - Py_CLEAR(clear_module_state->__pyx_kp_u_gc); - Py_CLEAR(clear_module_state->__pyx_n_s_getstate); - Py_CLEAR(clear_module_state->__pyx_n_s_import); - Py_CLEAR(clear_module_state->__pyx_n_s_int_to_boolean); - Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); - Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); - Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); - Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_engine__processor); - Py_CLEAR(clear_module_state->__pyx_n_s_main); - Py_CLEAR(clear_module_state->__pyx_n_s_name); - Py_CLEAR(clear_module_state->__pyx_n_s_new); - Py_CLEAR(clear_module_state->__pyx_n_s_pickle); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_result); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_state); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_type); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_to_decimal_proces); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce_ex); - Py_CLEAR(clear_module_state->__pyx_n_s_return); - Py_CLEAR(clear_module_state->__pyx_n_s_scale); - Py_CLEAR(clear_module_state->__pyx_n_s_self); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_slots); - Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_engine__processors_cy); - Py_CLEAR(clear_module_state->__pyx_n_s_state); - Py_CLEAR(clear_module_state->__pyx_n_s_str_to_date); - Py_CLEAR(clear_module_state->__pyx_n_s_str_to_datetime); - Py_CLEAR(clear_module_state->__pyx_n_s_str_to_time); - Py_CLEAR(clear_module_state->__pyx_kp_s_stringsource); - Py_CLEAR(clear_module_state->__pyx_n_s_test); - Py_CLEAR(clear_module_state->__pyx_n_s_time); - Py_CLEAR(clear_module_state->__pyx_n_s_time_cls); - Py_CLEAR(clear_module_state->__pyx_n_s_to_decimal_processor_factory); - Py_CLEAR(clear_module_state->__pyx_n_s_to_decimal_processor_factory___r); - Py_CLEAR(clear_module_state->__pyx_n_s_to_decimal_processor_factory___s); - Py_CLEAR(clear_module_state->__pyx_n_s_to_float); - Py_CLEAR(clear_module_state->__pyx_n_s_to_str); - Py_CLEAR(clear_module_state->__pyx_n_s_type); - Py_CLEAR(clear_module_state->__pyx_n_u_type); - Py_CLEAR(clear_module_state->__pyx_n_s_typing); - Py_CLEAR(clear_module_state->__pyx_n_s_update); - Py_CLEAR(clear_module_state->__pyx_n_s_use_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_value); - Py_CLEAR(clear_module_state->__pyx_int_61481721); - Py_CLEAR(clear_module_state->__pyx_int_190945570); - Py_CLEAR(clear_module_state->__pyx_int_248337392); - Py_CLEAR(clear_module_state->__pyx_tuple__2); - Py_CLEAR(clear_module_state->__pyx_tuple__5); - Py_CLEAR(clear_module_state->__pyx_tuple__12); - Py_CLEAR(clear_module_state->__pyx_tuple__13); - Py_CLEAR(clear_module_state->__pyx_tuple__15); - Py_CLEAR(clear_module_state->__pyx_tuple__17); - Py_CLEAR(clear_module_state->__pyx_codeobj__4); - Py_CLEAR(clear_module_state->__pyx_codeobj__6); - Py_CLEAR(clear_module_state->__pyx_codeobj__7); - Py_CLEAR(clear_module_state->__pyx_codeobj__8); - Py_CLEAR(clear_module_state->__pyx_codeobj__9); - Py_CLEAR(clear_module_state->__pyx_codeobj__10); - Py_CLEAR(clear_module_state->__pyx_codeobj__11); - Py_CLEAR(clear_module_state->__pyx_codeobj__14); - Py_CLEAR(clear_module_state->__pyx_codeobj__16); - Py_CLEAR(clear_module_state->__pyx_codeobj__18); - return 0; -} -#endif -/* #### Code section: module_state_traverse ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { - __pyx_mstate *traverse_module_state = __pyx_mstate(m); - if (!traverse_module_state) return 0; - Py_VISIT(traverse_module_state->__pyx_d); - Py_VISIT(traverse_module_state->__pyx_b); - Py_VISIT(traverse_module_state->__pyx_cython_runtime); - Py_VISIT(traverse_module_state->__pyx_empty_tuple); - Py_VISIT(traverse_module_state->__pyx_empty_bytes); - Py_VISIT(traverse_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_VISIT(traverse_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); - #endif - Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); - Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); - Py_VISIT(traverse_module_state->__pyx_kp_u_); - Py_VISIT(traverse_module_state->__pyx_n_s_Any); - Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_VISIT(traverse_module_state->__pyx_n_s_Optional); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_bool); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_date_cls); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_datetime_cls); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_float); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_str); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_time_cls); - Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); - Py_VISIT(traverse_module_state->__pyx_n_s__19); - Py_VISIT(traverse_module_state->__pyx_kp_u__3); - Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); - Py_VISIT(traverse_module_state->__pyx_n_s_bool); - Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); - Py_VISIT(traverse_module_state->__pyx_n_s_date); - Py_VISIT(traverse_module_state->__pyx_n_s_date_cls); - Py_VISIT(traverse_module_state->__pyx_n_s_datetime); - Py_VISIT(traverse_module_state->__pyx_n_s_datetime_cls); - Py_VISIT(traverse_module_state->__pyx_n_s_dict); - Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); - Py_VISIT(traverse_module_state->__pyx_kp_u_disable); - Py_VISIT(traverse_module_state->__pyx_kp_u_enable); - Py_VISIT(traverse_module_state->__pyx_n_u_f); - Py_VISIT(traverse_module_state->__pyx_n_u_format); - Py_VISIT(traverse_module_state->__pyx_n_s_fromisoformat); - Py_VISIT(traverse_module_state->__pyx_kp_u_gc); - Py_VISIT(traverse_module_state->__pyx_n_s_getstate); - Py_VISIT(traverse_module_state->__pyx_n_s_import); - Py_VISIT(traverse_module_state->__pyx_n_s_int_to_boolean); - Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); - Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); - Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); - Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_engine__processor); - Py_VISIT(traverse_module_state->__pyx_n_s_main); - Py_VISIT(traverse_module_state->__pyx_n_s_name); - Py_VISIT(traverse_module_state->__pyx_n_s_new); - Py_VISIT(traverse_module_state->__pyx_n_s_pickle); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_result); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_state); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_type); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_to_decimal_proces); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce_ex); - Py_VISIT(traverse_module_state->__pyx_n_s_return); - Py_VISIT(traverse_module_state->__pyx_n_s_scale); - Py_VISIT(traverse_module_state->__pyx_n_s_self); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_slots); - Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_engine__processors_cy); - Py_VISIT(traverse_module_state->__pyx_n_s_state); - Py_VISIT(traverse_module_state->__pyx_n_s_str_to_date); - Py_VISIT(traverse_module_state->__pyx_n_s_str_to_datetime); - Py_VISIT(traverse_module_state->__pyx_n_s_str_to_time); - Py_VISIT(traverse_module_state->__pyx_kp_s_stringsource); - Py_VISIT(traverse_module_state->__pyx_n_s_test); - Py_VISIT(traverse_module_state->__pyx_n_s_time); - Py_VISIT(traverse_module_state->__pyx_n_s_time_cls); - Py_VISIT(traverse_module_state->__pyx_n_s_to_decimal_processor_factory); - Py_VISIT(traverse_module_state->__pyx_n_s_to_decimal_processor_factory___r); - Py_VISIT(traverse_module_state->__pyx_n_s_to_decimal_processor_factory___s); - Py_VISIT(traverse_module_state->__pyx_n_s_to_float); - Py_VISIT(traverse_module_state->__pyx_n_s_to_str); - Py_VISIT(traverse_module_state->__pyx_n_s_type); - Py_VISIT(traverse_module_state->__pyx_n_u_type); - Py_VISIT(traverse_module_state->__pyx_n_s_typing); - Py_VISIT(traverse_module_state->__pyx_n_s_update); - Py_VISIT(traverse_module_state->__pyx_n_s_use_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_value); - Py_VISIT(traverse_module_state->__pyx_int_61481721); - Py_VISIT(traverse_module_state->__pyx_int_190945570); - Py_VISIT(traverse_module_state->__pyx_int_248337392); - Py_VISIT(traverse_module_state->__pyx_tuple__2); - Py_VISIT(traverse_module_state->__pyx_tuple__5); - Py_VISIT(traverse_module_state->__pyx_tuple__12); - Py_VISIT(traverse_module_state->__pyx_tuple__13); - Py_VISIT(traverse_module_state->__pyx_tuple__15); - Py_VISIT(traverse_module_state->__pyx_tuple__17); - Py_VISIT(traverse_module_state->__pyx_codeobj__4); - Py_VISIT(traverse_module_state->__pyx_codeobj__6); - Py_VISIT(traverse_module_state->__pyx_codeobj__7); - Py_VISIT(traverse_module_state->__pyx_codeobj__8); - Py_VISIT(traverse_module_state->__pyx_codeobj__9); - Py_VISIT(traverse_module_state->__pyx_codeobj__10); - Py_VISIT(traverse_module_state->__pyx_codeobj__11); - Py_VISIT(traverse_module_state->__pyx_codeobj__14); - Py_VISIT(traverse_module_state->__pyx_codeobj__16); - Py_VISIT(traverse_module_state->__pyx_codeobj__18); - return 0; -} -#endif -/* #### Code section: module_state_defines ### */ -#define __pyx_d __pyx_mstate_global->__pyx_d -#define __pyx_b __pyx_mstate_global->__pyx_b -#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime -#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple -#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes -#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode -#ifdef __Pyx_CyFunction_USED -#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType -#endif -#ifdef __Pyx_FusedFunction_USED -#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType -#endif -#ifdef __Pyx_Generator_USED -#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType -#endif -#ifdef __Pyx_IterableCoroutine_USED -#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#define __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory __pyx_mstate_global->__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory -#endif -#define __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory __pyx_mstate_global->__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory -#define __pyx_kp_u_ __pyx_mstate_global->__pyx_kp_u_ -#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any -#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 -#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional -#define __pyx_kp_s_Optional_bool __pyx_mstate_global->__pyx_kp_s_Optional_bool -#define __pyx_kp_s_Optional_date_cls __pyx_mstate_global->__pyx_kp_s_Optional_date_cls -#define __pyx_kp_s_Optional_datetime_cls __pyx_mstate_global->__pyx_kp_s_Optional_datetime_cls -#define __pyx_kp_s_Optional_float __pyx_mstate_global->__pyx_kp_s_Optional_float -#define __pyx_kp_s_Optional_str __pyx_mstate_global->__pyx_kp_s_Optional_str -#define __pyx_kp_s_Optional_time_cls __pyx_mstate_global->__pyx_kp_s_Optional_time_cls -#define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError -#define __pyx_n_s__19 __pyx_mstate_global->__pyx_n_s__19 -#define __pyx_kp_u__3 __pyx_mstate_global->__pyx_kp_u__3 -#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines -#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool -#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback -#define __pyx_n_s_date __pyx_mstate_global->__pyx_n_s_date -#define __pyx_n_s_date_cls __pyx_mstate_global->__pyx_n_s_date_cls -#define __pyx_n_s_datetime __pyx_mstate_global->__pyx_n_s_datetime -#define __pyx_n_s_datetime_cls __pyx_mstate_global->__pyx_n_s_datetime_cls -#define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict -#define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 -#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable -#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable -#define __pyx_n_u_f __pyx_mstate_global->__pyx_n_u_f -#define __pyx_n_u_format __pyx_mstate_global->__pyx_n_u_format -#define __pyx_n_s_fromisoformat __pyx_mstate_global->__pyx_n_s_fromisoformat -#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc -#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate -#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import -#define __pyx_n_s_int_to_boolean __pyx_mstate_global->__pyx_n_s_int_to_boolean -#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled -#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine -#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled -#define __pyx_kp_s_lib_sqlalchemy_engine__processor __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_engine__processor -#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main -#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name -#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new -#define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle -#define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError -#define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum -#define __pyx_n_s_pyx_result __pyx_mstate_global->__pyx_n_s_pyx_result -#define __pyx_n_s_pyx_state __pyx_mstate_global->__pyx_n_s_pyx_state -#define __pyx_n_s_pyx_type __pyx_mstate_global->__pyx_n_s_pyx_type -#define __pyx_n_s_pyx_unpickle_to_decimal_proces __pyx_mstate_global->__pyx_n_s_pyx_unpickle_to_decimal_proces -#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce -#define __pyx_n_s_reduce_cython __pyx_mstate_global->__pyx_n_s_reduce_cython -#define __pyx_n_s_reduce_ex __pyx_mstate_global->__pyx_n_s_reduce_ex -#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return -#define __pyx_n_s_scale __pyx_mstate_global->__pyx_n_s_scale -#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self -#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate -#define __pyx_n_s_setstate_cython __pyx_mstate_global->__pyx_n_s_setstate_cython -#define __pyx_n_s_slots __pyx_mstate_global->__pyx_n_s_slots -#define __pyx_n_s_sqlalchemy_engine__processors_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_engine__processors_cy -#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state -#define __pyx_n_s_str_to_date __pyx_mstate_global->__pyx_n_s_str_to_date -#define __pyx_n_s_str_to_datetime __pyx_mstate_global->__pyx_n_s_str_to_datetime -#define __pyx_n_s_str_to_time __pyx_mstate_global->__pyx_n_s_str_to_time -#define __pyx_kp_s_stringsource __pyx_mstate_global->__pyx_kp_s_stringsource -#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test -#define __pyx_n_s_time __pyx_mstate_global->__pyx_n_s_time -#define __pyx_n_s_time_cls __pyx_mstate_global->__pyx_n_s_time_cls -#define __pyx_n_s_to_decimal_processor_factory __pyx_mstate_global->__pyx_n_s_to_decimal_processor_factory -#define __pyx_n_s_to_decimal_processor_factory___r __pyx_mstate_global->__pyx_n_s_to_decimal_processor_factory___r -#define __pyx_n_s_to_decimal_processor_factory___s __pyx_mstate_global->__pyx_n_s_to_decimal_processor_factory___s -#define __pyx_n_s_to_float __pyx_mstate_global->__pyx_n_s_to_float -#define __pyx_n_s_to_str __pyx_mstate_global->__pyx_n_s_to_str -#define __pyx_n_s_type __pyx_mstate_global->__pyx_n_s_type -#define __pyx_n_u_type __pyx_mstate_global->__pyx_n_u_type -#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing -#define __pyx_n_s_update __pyx_mstate_global->__pyx_n_s_update -#define __pyx_n_s_use_setstate __pyx_mstate_global->__pyx_n_s_use_setstate -#define __pyx_n_s_value __pyx_mstate_global->__pyx_n_s_value -#define __pyx_int_61481721 __pyx_mstate_global->__pyx_int_61481721 -#define __pyx_int_190945570 __pyx_mstate_global->__pyx_int_190945570 -#define __pyx_int_248337392 __pyx_mstate_global->__pyx_int_248337392 -#define __pyx_tuple__2 __pyx_mstate_global->__pyx_tuple__2 -#define __pyx_tuple__5 __pyx_mstate_global->__pyx_tuple__5 -#define __pyx_tuple__12 __pyx_mstate_global->__pyx_tuple__12 -#define __pyx_tuple__13 __pyx_mstate_global->__pyx_tuple__13 -#define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 -#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 -#define __pyx_codeobj__4 __pyx_mstate_global->__pyx_codeobj__4 -#define __pyx_codeobj__6 __pyx_mstate_global->__pyx_codeobj__6 -#define __pyx_codeobj__7 __pyx_mstate_global->__pyx_codeobj__7 -#define __pyx_codeobj__8 __pyx_mstate_global->__pyx_codeobj__8 -#define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 -#define __pyx_codeobj__10 __pyx_mstate_global->__pyx_codeobj__10 -#define __pyx_codeobj__11 __pyx_mstate_global->__pyx_codeobj__11 -#define __pyx_codeobj__14 __pyx_mstate_global->__pyx_codeobj__14 -#define __pyx_codeobj__16 __pyx_mstate_global->__pyx_codeobj__16 -#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 -/* #### Code section: module_code ### */ - -/* "sqlalchemy/engine/_processors_cy.py":27 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -PyDoc_STRVAR(__pyx_doc_10sqlalchemy_6engine_14_processors_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_6engine_14_processors_cy__is_compiled}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy__is_compiled(__pyx_self); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled", 1); - - /* "sqlalchemy/engine/_processors_cy.py":29 - * def _is_compiled() -> bool: - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_True); - __pyx_r = Py_True; - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":27 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_processors_cy.py":35 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def int_to_boolean(value: Any) -> Optional[bool]: - * if value is None: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean = {"int_to_boolean", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_value = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("int_to_boolean (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 35, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "int_to_boolean") < 0)) __PYX_ERR(0, 35, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_value = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("int_to_boolean", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 35, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.int_to_boolean", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_2int_to_boolean(__pyx_self, __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_2int_to_boolean(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("int_to_boolean", 1); - - /* "sqlalchemy/engine/_processors_cy.py":37 - * @cython.annotation_typing(False) - * def int_to_boolean(value: Any) -> Optional[bool]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return True if value else False - */ - __pyx_t_1 = (__pyx_v_value == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_processors_cy.py":38 - * def int_to_boolean(value: Any) -> Optional[bool]: - * if value is None: - * return None # <<<<<<<<<<<<<< - * return True if value else False - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":37 - * @cython.annotation_typing(False) - * def int_to_boolean(value: Any) -> Optional[bool]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return True if value else False - */ - } - - /* "sqlalchemy/engine/_processors_cy.py":39 - * if value is None: - * return None - * return True if value else False # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 39, __pyx_L1_error) - if (__pyx_t_1) { - __Pyx_INCREF(Py_True); - __pyx_t_2 = Py_True; - } else { - __Pyx_INCREF(Py_False); - __pyx_t_2 = Py_False; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":35 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def int_to_boolean(value: Any) -> Optional[bool]: - * if value is None: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.int_to_boolean", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_processors_cy.py":42 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def to_str(value: Any) -> Optional[str]: - * if value is None: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_5to_str(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_5to_str = {"to_str", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_5to_str, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_5to_str(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_value = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("to_str (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 42, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "to_str") < 0)) __PYX_ERR(0, 42, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_value = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("to_str", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 42, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_str", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_4to_str(__pyx_self, __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_4to_str(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("to_str", 1); - - /* "sqlalchemy/engine/_processors_cy.py":44 - * @cython.annotation_typing(False) - * def to_str(value: Any) -> Optional[str]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return str(value) - */ - __pyx_t_1 = (__pyx_v_value == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_processors_cy.py":45 - * def to_str(value: Any) -> Optional[str]: - * if value is None: - * return None # <<<<<<<<<<<<<< - * return str(value) - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":44 - * @cython.annotation_typing(False) - * def to_str(value: Any) -> Optional[str]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return str(value) - */ - } - - /* "sqlalchemy/engine/_processors_cy.py":46 - * if value is None: - * return None - * return str(value) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_Unicode(__pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 46, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":42 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def to_str(value: Any) -> Optional[str]: - * if value is None: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_str", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_processors_cy.py":49 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def to_float(value: Any) -> Optional[float]: - * if value is None: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_7to_float(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_7to_float = {"to_float", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_7to_float, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_7to_float(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_value = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("to_float (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 49, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "to_float") < 0)) __PYX_ERR(0, 49, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_value = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("to_float", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 49, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_float", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_6to_float(__pyx_self, __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_6to_float(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("to_float", 1); - - /* "sqlalchemy/engine/_processors_cy.py":51 - * @cython.annotation_typing(False) - * def to_float(value: Any) -> Optional[float]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return float(value) - */ - __pyx_t_1 = (__pyx_v_value == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_processors_cy.py":52 - * def to_float(value: Any) -> Optional[float]: - * if value is None: - * return None # <<<<<<<<<<<<<< - * return float(value) - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":51 - * @cython.annotation_typing(False) - * def to_float(value: Any) -> Optional[float]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return float(value) - */ - } - - /* "sqlalchemy/engine/_processors_cy.py":53 - * if value is None: - * return None - * return float(value) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyNumber_Float(__pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":49 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def to_float(value: Any) -> Optional[float]: - * if value is None: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_float", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_processors_cy.py":56 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: - * if value is None: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime = {"str_to_datetime", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_value = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("str_to_datetime (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 56, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "str_to_datetime") < 0)) __PYX_ERR(0, 56, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_value = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("str_to_datetime", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 56, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_datetime", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_8str_to_datetime(__pyx_self, __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_8str_to_datetime(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("str_to_datetime", 1); - - /* "sqlalchemy/engine/_processors_cy.py":58 - * @cython.annotation_typing(False) - * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return datetime_cls.fromisoformat(value) - */ - __pyx_t_1 = (__pyx_v_value == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_processors_cy.py":59 - * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: - * if value is None: - * return None # <<<<<<<<<<<<<< - * return datetime_cls.fromisoformat(value) - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":58 - * @cython.annotation_typing(False) - * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return datetime_cls.fromisoformat(value) - */ - } - - /* "sqlalchemy/engine/_processors_cy.py":60 - * if value is None: - * return None - * return datetime_cls.fromisoformat(value) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_datetime_cls); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_fromisoformat); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_value}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":56 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: - * if value is None: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_datetime", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_processors_cy.py":63 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_time(value: Optional[str]) -> Optional[time_cls]: - * if value is None: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_11str_to_time(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_11str_to_time = {"str_to_time", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_11str_to_time, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_11str_to_time(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_value = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("str_to_time (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 63, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "str_to_time") < 0)) __PYX_ERR(0, 63, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_value = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("str_to_time", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 63, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_time", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_10str_to_time(__pyx_self, __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_10str_to_time(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("str_to_time", 1); - - /* "sqlalchemy/engine/_processors_cy.py":65 - * @cython.annotation_typing(False) - * def str_to_time(value: Optional[str]) -> Optional[time_cls]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return time_cls.fromisoformat(value) - */ - __pyx_t_1 = (__pyx_v_value == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_processors_cy.py":66 - * def str_to_time(value: Optional[str]) -> Optional[time_cls]: - * if value is None: - * return None # <<<<<<<<<<<<<< - * return time_cls.fromisoformat(value) - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":65 - * @cython.annotation_typing(False) - * def str_to_time(value: Optional[str]) -> Optional[time_cls]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return time_cls.fromisoformat(value) - */ - } - - /* "sqlalchemy/engine/_processors_cy.py":67 - * if value is None: - * return None - * return time_cls.fromisoformat(value) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_time_cls); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_fromisoformat); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_value}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":63 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_time(value: Optional[str]) -> Optional[time_cls]: - * if value is None: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_time", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_processors_cy.py":70 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_date(value: Optional[str]) -> Optional[date_cls]: - * if value is None: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_13str_to_date(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_13str_to_date = {"str_to_date", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_13str_to_date, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_13str_to_date(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_value = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("str_to_date (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 70, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "str_to_date") < 0)) __PYX_ERR(0, 70, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_value = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("str_to_date", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 70, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_date", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_12str_to_date(__pyx_self, __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_12str_to_date(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("str_to_date", 1); - - /* "sqlalchemy/engine/_processors_cy.py":72 - * @cython.annotation_typing(False) - * def str_to_date(value: Optional[str]) -> Optional[date_cls]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return date_cls.fromisoformat(value) - */ - __pyx_t_1 = (__pyx_v_value == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_processors_cy.py":73 - * def str_to_date(value: Optional[str]) -> Optional[date_cls]: - * if value is None: - * return None # <<<<<<<<<<<<<< - * return date_cls.fromisoformat(value) - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":72 - * @cython.annotation_typing(False) - * def str_to_date(value: Optional[str]) -> Optional[date_cls]: - * if value is None: # <<<<<<<<<<<<<< - * return None - * return date_cls.fromisoformat(value) - */ - } - - /* "sqlalchemy/engine/_processors_cy.py":74 - * if value is None: - * return None - * return date_cls.fromisoformat(value) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_date_cls); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_fromisoformat); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_value}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":70 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_date(value: Optional[str]) -> Optional[date_cls]: - * if value is None: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.str_to_date", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_processors_cy.py":84 - * __slots__ = ("type_", "format_") - * - * def __init__(self, type_: type, scale: int): # <<<<<<<<<<<<<< - * self.type_ = type_ - * self.format_ = f"%.{scale}f" - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyTypeObject *__pyx_v_type_ = 0; - PyObject *__pyx_v_scale = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[2] = {0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_type,&__pyx_n_s_scale,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_VARARGS(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_type)) != 0)) { - (void)__Pyx_Arg_NewRef_VARARGS(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 84, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_scale)) != 0)) { - (void)__Pyx_Arg_NewRef_VARARGS(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 84, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 2, 2, 1); __PYX_ERR(0, 84, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 84, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 2)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - values[1] = __Pyx_Arg_VARARGS(__pyx_args, 1); - } - __pyx_v_type_ = ((PyTypeObject*)values[0]); - __pyx_v_scale = ((PyObject*)values[1]); - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 84, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_type_), (&PyType_Type), 0, "type_", 1))) __PYX_ERR(0, 84, __pyx_L1_error) - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_scale), (&PyInt_Type), 0, "scale", 1))) __PYX_ERR(0, 84, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory___init__(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v_self), __pyx_v_type_, __pyx_v_scale); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = -1; - __pyx_L0:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory___init__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyTypeObject *__pyx_v_type_, PyObject *__pyx_v_scale) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - Py_UCS4 __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 1); - - /* "sqlalchemy/engine/_processors_cy.py":85 - * - * def __init__(self, type_: type, scale: int): - * self.type_ = type_ # <<<<<<<<<<<<<< - * self.format_ = f"%.{scale}f" - * - */ - __Pyx_INCREF((PyObject *)__pyx_v_type_); - __Pyx_GIVEREF((PyObject *)__pyx_v_type_); - __Pyx_GOTREF((PyObject *)__pyx_v_self->type_); - __Pyx_DECREF((PyObject *)__pyx_v_self->type_); - __pyx_v_self->type_ = __pyx_v_type_; - - /* "sqlalchemy/engine/_processors_cy.py":86 - * def __init__(self, type_: type, scale: int): - * self.type_ = type_ - * self.format_ = f"%.{scale}f" # <<<<<<<<<<<<<< - * - * def __call__(self, value: Optional[Any]) -> object: - */ - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = 0; - __pyx_t_3 = 127; - __Pyx_INCREF(__pyx_kp_u_); - __pyx_t_2 += 2; - __Pyx_GIVEREF(__pyx_kp_u_); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_kp_u_); - __pyx_t_4 = __Pyx_PyObject_FormatSimple(__pyx_v_scale, __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; - __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_4); - __pyx_t_4 = 0; - __Pyx_INCREF(__pyx_n_u_f); - __pyx_t_2 += 1; - __Pyx_GIVEREF(__pyx_n_u_f); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_n_u_f); - __pyx_t_4 = __Pyx_PyUnicode_Join(__pyx_t_1, 3, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_GOTREF(__pyx_v_self->format_); - __Pyx_DECREF(__pyx_v_self->format_); - __pyx_v_self->format_ = ((PyObject*)__pyx_t_4); - __pyx_t_4 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":84 - * __slots__ = ("type_", "format_") - * - * def __init__(self, type_: type, scale: int): # <<<<<<<<<<<<<< - * self.type_ = type_ - * self.format_ = f"%.{scale}f" - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_processors_cy.py":88 - * self.format_ = f"%.{scale}f" - * - * def __call__(self, value: Optional[Any]) -> object: # <<<<<<<<<<<<<< - * if value is None: - * return None - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_value = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_VARARGS(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 88, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__call__") < 0)) __PYX_ERR(0, 88, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - } - __pyx_v_value = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__call__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 88, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_2__call__(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v_self), __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_2__call__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__call__", 1); - - /* "sqlalchemy/engine/_processors_cy.py":89 - * - * def __call__(self, value: Optional[Any]) -> object: - * if value is None: # <<<<<<<<<<<<<< - * return None - * else: - */ - __pyx_t_1 = (__pyx_v_value == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_processors_cy.py":90 - * def __call__(self, value: Optional[Any]) -> object: - * if value is None: - * return None # <<<<<<<<<<<<<< - * else: - * return self.type_(self.format_ % value) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - - /* "sqlalchemy/engine/_processors_cy.py":89 - * - * def __call__(self, value: Optional[Any]) -> object: - * if value is None: # <<<<<<<<<<<<<< - * return None - * else: - */ - } - - /* "sqlalchemy/engine/_processors_cy.py":92 - * return None - * else: - * return self.type_(self.format_ % value) # <<<<<<<<<<<<<< - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyUnicode_FormatSafe(__pyx_v_self->format_, __pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_v_self->type_), __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - } - - /* "sqlalchemy/engine/_processors_cy.py":88 - * self.format_ = f"%.{scale}f" - * - * def __call__(self, value: Optional[Any]) -> object: # <<<<<<<<<<<<<< - * if value is None: - * return None - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_4__reduce_cython__(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_4__reduce_cython__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 1); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self.format_, self.type_) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v_self->format_); - __Pyx_GIVEREF(__pyx_v_self->format_); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->format_)) __PYX_ERR(1, 5, __pyx_L1_error); - __Pyx_INCREF((PyObject *)__pyx_v_self->type_); - __Pyx_GIVEREF((PyObject *)__pyx_v_self->type_); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_self->type_))) __PYX_ERR(1, 5, __pyx_L1_error); - __pyx_v_state = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self.format_, self.type_) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v__dict = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":7 - * state = (self.format_, self.type_) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_2 = (__pyx_v__dict != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(1, 8, __pyx_L1_error); - __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = self.format_ is not None or self.type_ is not None - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self.format_, self.type_) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = self.format_ is not None or self.type_ is not None # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state - */ - /*else*/ { - __pyx_t_4 = (__pyx_v_self->format_ != ((PyObject*)Py_None)); - if (!__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_4 = (__pyx_v_self->type_ != ((PyTypeObject*)Py_None)); - __pyx_t_2 = __pyx_t_4; - __pyx_L4_bool_binop_done:; - __pyx_v_use_setstate = __pyx_t_2; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = self.format_ is not None or self.type_ is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state - * else: - */ - if (__pyx_v_use_setstate) { - - /* "(tree fragment)":13 - * use_setstate = self.format_ is not None or self.type_ is not None - * if use_setstate: - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_to_decimal_proces); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_61481721); - __Pyx_GIVEREF(__pyx_int_61481721); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_61481721)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_GIVEREF(__pyx_t_3); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_3 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = self.format_ is not None or self.type_ is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, None), state - * else: - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_to_decimal_proces); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_61481721); - __Pyx_GIVEREF(__pyx_int_61481721); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_61481721)) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_5); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_5)) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_5 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 16, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(1, 16, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v___pyx_state = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(1, 16, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_6__setstate_cython__(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v_self), __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_6__setstate_cython__(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 1); - - /* "(tree fragment)":17 - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_14_processors_cy___pyx_unpickle_to_decimal_processor_factory__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.to_decimal_processor_factory.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_to_decimal_processor_factory(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory = {"__pyx_unpickle_to_decimal_processor_factory", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[3] = {0,0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_to_decimal_processor_factory (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_to_decimal_processor_factory", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_to_decimal_processor_factory", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_to_decimal_processor_factory") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 3)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_to_decimal_processor_factory", 1, 3, 3, __pyx_nargs); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.__pyx_unpickle_to_decimal_processor_factory", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_14_processors_cy_14__pyx_unpickle_to_decimal_processor_factory(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_14_processors_cy_14__pyx_unpickle_to_decimal_processor_factory(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_to_decimal_processor_factory", 1); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum - */ - __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__2, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum - * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(1, 5, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_t_1); - __pyx_v___pyx_PickleError = __pyx_t_1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum # <<<<<<<<<<<<<< - * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum - * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_v___pyx_result = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum - * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_2 = (__pyx_v___pyx_state != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":9 - * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_14_processors_cy___pyx_unpickle_to_decimal_processor_factory__set_state(((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum - * __pyx_result = to_decimal_processor_factory.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): - * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_to_decimal_processor_factory(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.__pyx_unpickle_to_decimal_processor_factory", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_10sqlalchemy_6engine_14_processors_cy___pyx_unpickle_to_decimal_processor_factory__set_state(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - Py_ssize_t __pyx_t_3; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - unsigned int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_to_decimal_processor_factory__set_state", 1); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): - * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_t_1))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->format_); - __Pyx_DECREF(__pyx_v___pyx_result->format_); - __pyx_v___pyx_result->format_ = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyType_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("type", __pyx_t_1))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF((PyObject *)__pyx_v___pyx_result->type_); - __Pyx_DECREF((PyObject *)__pyx_v___pyx_result->type_); - __pyx_v___pyx_result->type_ = ((PyTypeObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): - * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_3 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_4 = (__pyx_t_3 > 2); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_2 = __pyx_t_4; - __pyx_L4_bool_binop_done:; - if (__pyx_t_2) { - - /* "(tree fragment)":14 - * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[2]) # <<<<<<<<<<<<<< - */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_7 = NULL; - __pyx_t_8 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_8 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): - * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[2]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_to_decimal_processor_factory__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_to_decimal_processor_factory__set_state(to_decimal_processor_factory __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result.format_ = __pyx_state[0]; __pyx_result.type_ = __pyx_state[1] - * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("sqlalchemy.engine._processors_cy.__pyx_unpickle_to_decimal_processor_factory__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_tp_new_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *p; - PyObject *o; - #if CYTHON_COMPILING_IN_LIMITED_API - allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); - o = alloc_func(t, 0); - #else - if (likely(!__Pyx_PyType_HasFeature(t, Py_TPFLAGS_IS_ABSTRACT))) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - #endif - p = ((struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)o); - p->type_ = ((PyTypeObject*)Py_None); Py_INCREF(Py_None); - p->format_ = ((PyObject*)Py_None); Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyObject *o) { - struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *p = (struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely((PY_VERSION_HEX >= 0x03080000 || __Pyx_PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)) && __Pyx_PyObject_GetSlot(o, tp_finalize, destructor)) && !__Pyx_PyObject_GC_IsFinalized(o)) { - if (__Pyx_PyObject_GetSlot(o, tp_dealloc, destructor) == __pyx_tp_dealloc_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - } - #endif - PyObject_GC_UnTrack(o); - Py_CLEAR(p->type_); - Py_CLEAR(p->format_); - #if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY - (*Py_TYPE(o)->tp_free)(o); - #else - { - freefunc tp_free = (freefunc)PyType_GetSlot(Py_TYPE(o), Py_tp_free); - if (tp_free) tp_free(o); - } - #endif -} - -static int __pyx_tp_traverse_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *p = (struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)o; - if (p->type_) { - e = (*v)(((PyObject *)p->type_), a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *p = (struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory *)o; - tmp = ((PyObject*)p->type_); - p->type_ = ((PyTypeObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyMethodDef __pyx_methods_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory[] = { - {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {0, 0, 0, 0} -}; -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_slots[] = { - {Py_tp_dealloc, (void *)__pyx_tp_dealloc_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, - {Py_tp_call, (void *)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_3__call__}, - {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, - {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, - {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, - {Py_tp_init, (void *)__pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_1__init__}, - {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory}, - {0, 0}, -}; -static PyType_Spec __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_spec = { - "sqlalchemy.engine._processors_cy.to_decimal_processor_factory", - sizeof(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory), - 0, - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, - __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_slots, -}; -#else - -static PyTypeObject __pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory = { - PyVarObject_HEAD_INIT(0, 0) - "sqlalchemy.engine._processors_cy.""to_decimal_processor_factory", /*tp_name*/ - sizeof(struct __pyx_obj_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - __pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_3__call__, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_traverse*/ - __pyx_tp_clear_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - #if !CYTHON_USE_TYPE_SPECS - 0, /*tp_dictoffset*/ - #endif - __pyx_pw_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - #if CYTHON_USE_TP_FINALIZE - 0, /*tp_finalize*/ - #else - NULL, /*tp_finalize*/ - #endif - #endif - #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, /*tp_vectorcall*/ - #endif - #if __PYX_NEED_TP_PRINT_SLOT == 1 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030C0000 - 0, /*tp_watched*/ - #endif - #if PY_VERSION_HEX >= 0x030d00A4 - 0, /*tp_versions_used*/ - #endif - #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, /*tp_pypy_flags*/ - #endif -}; -#endif - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif -/* #### Code section: pystring_table ### */ - -static int __Pyx_CreateStringTabAndInitStrings(void) { - __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, - {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, - {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, - {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, - {&__pyx_kp_s_Optional_bool, __pyx_k_Optional_bool, sizeof(__pyx_k_Optional_bool), 0, 0, 1, 0}, - {&__pyx_kp_s_Optional_date_cls, __pyx_k_Optional_date_cls, sizeof(__pyx_k_Optional_date_cls), 0, 0, 1, 0}, - {&__pyx_kp_s_Optional_datetime_cls, __pyx_k_Optional_datetime_cls, sizeof(__pyx_k_Optional_datetime_cls), 0, 0, 1, 0}, - {&__pyx_kp_s_Optional_float, __pyx_k_Optional_float, sizeof(__pyx_k_Optional_float), 0, 0, 1, 0}, - {&__pyx_kp_s_Optional_str, __pyx_k_Optional_str, sizeof(__pyx_k_Optional_str), 0, 0, 1, 0}, - {&__pyx_kp_s_Optional_time_cls, __pyx_k_Optional_time_cls, sizeof(__pyx_k_Optional_time_cls), 0, 0, 1, 0}, - {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s__19, __pyx_k__19, sizeof(__pyx_k__19), 0, 0, 1, 1}, - {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, - {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, - {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_date, __pyx_k_date, sizeof(__pyx_k_date), 0, 0, 1, 1}, - {&__pyx_n_s_date_cls, __pyx_k_date_cls, sizeof(__pyx_k_date_cls), 0, 0, 1, 1}, - {&__pyx_n_s_datetime, __pyx_k_datetime, sizeof(__pyx_k_datetime), 0, 0, 1, 1}, - {&__pyx_n_s_datetime_cls, __pyx_k_datetime_cls, sizeof(__pyx_k_datetime_cls), 0, 0, 1, 1}, - {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, - {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, - {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, - {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, - {&__pyx_n_u_f, __pyx_k_f, sizeof(__pyx_k_f), 0, 1, 0, 1}, - {&__pyx_n_u_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 1, 0, 1}, - {&__pyx_n_s_fromisoformat, __pyx_k_fromisoformat, sizeof(__pyx_k_fromisoformat), 0, 0, 1, 1}, - {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_int_to_boolean, __pyx_k_int_to_boolean, sizeof(__pyx_k_int_to_boolean), 0, 0, 1, 1}, - {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, - {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, - {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, - {&__pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_k_lib_sqlalchemy_engine__processor, sizeof(__pyx_k_lib_sqlalchemy_engine__processor), 0, 0, 1, 0}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_to_decimal_proces, __pyx_k_pyx_unpickle_to_decimal_proces, sizeof(__pyx_k_pyx_unpickle_to_decimal_proces), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, - {&__pyx_n_s_scale, __pyx_k_scale, sizeof(__pyx_k_scale), 0, 0, 1, 1}, - {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_slots, __pyx_k_slots, sizeof(__pyx_k_slots), 0, 0, 1, 1}, - {&__pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_k_sqlalchemy_engine__processors_cy, sizeof(__pyx_k_sqlalchemy_engine__processors_cy), 0, 0, 1, 1}, - {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, - {&__pyx_n_s_str_to_date, __pyx_k_str_to_date, sizeof(__pyx_k_str_to_date), 0, 0, 1, 1}, - {&__pyx_n_s_str_to_datetime, __pyx_k_str_to_datetime, sizeof(__pyx_k_str_to_datetime), 0, 0, 1, 1}, - {&__pyx_n_s_str_to_time, __pyx_k_str_to_time, sizeof(__pyx_k_str_to_time), 0, 0, 1, 1}, - {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_time, __pyx_k_time, sizeof(__pyx_k_time), 0, 0, 1, 1}, - {&__pyx_n_s_time_cls, __pyx_k_time_cls, sizeof(__pyx_k_time_cls), 0, 0, 1, 1}, - {&__pyx_n_s_to_decimal_processor_factory, __pyx_k_to_decimal_processor_factory, sizeof(__pyx_k_to_decimal_processor_factory), 0, 0, 1, 1}, - {&__pyx_n_s_to_decimal_processor_factory___r, __pyx_k_to_decimal_processor_factory___r, sizeof(__pyx_k_to_decimal_processor_factory___r), 0, 0, 1, 1}, - {&__pyx_n_s_to_decimal_processor_factory___s, __pyx_k_to_decimal_processor_factory___s, sizeof(__pyx_k_to_decimal_processor_factory___s), 0, 0, 1, 1}, - {&__pyx_n_s_to_float, __pyx_k_to_float, sizeof(__pyx_k_to_float), 0, 0, 1, 1}, - {&__pyx_n_s_to_str, __pyx_k_to_str, sizeof(__pyx_k_to_str), 0, 0, 1, 1}, - {&__pyx_n_s_type, __pyx_k_type, sizeof(__pyx_k_type), 0, 0, 1, 1}, - {&__pyx_n_u_type, __pyx_k_type, sizeof(__pyx_k_type), 0, 1, 0, 1}, - {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, - {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, - {&__pyx_n_s_use_setstate, __pyx_k_use_setstate, sizeof(__pyx_k_use_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} - }; - return __Pyx_InitStrings(__pyx_string_tab); -} -/* #### Code section: cached_builtins ### */ -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - return 0; -} -/* #### Code section: cached_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0x3aa22f9, 0xecd53f0, 0xb619922): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0x3aa22f9, 0xecd53f0, 0xb619922) = (format_, type_))" % __pyx_checksum - */ - __pyx_tuple__2 = PyTuple_Pack(3, __pyx_int_61481721, __pyx_int_248337392, __pyx_int_190945570); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__2); - __Pyx_GIVEREF(__pyx_tuple__2); - - /* "sqlalchemy/engine/_processors_cy.py":27 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_codeobj__4 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_is_compiled, 27, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__4)) __PYX_ERR(0, 27, __pyx_L1_error) - - /* "sqlalchemy/engine/_processors_cy.py":35 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def int_to_boolean(value: Any) -> Optional[bool]: - * if value is None: - */ - __pyx_tuple__5 = PyTuple_Pack(1, __pyx_n_s_value); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__5); - __Pyx_GIVEREF(__pyx_tuple__5); - __pyx_codeobj__6 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_int_to_boolean, 35, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__6)) __PYX_ERR(0, 35, __pyx_L1_error) - - /* "sqlalchemy/engine/_processors_cy.py":42 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def to_str(value: Any) -> Optional[str]: - * if value is None: - */ - __pyx_codeobj__7 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_to_str, 42, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__7)) __PYX_ERR(0, 42, __pyx_L1_error) - - /* "sqlalchemy/engine/_processors_cy.py":49 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def to_float(value: Any) -> Optional[float]: - * if value is None: - */ - __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_to_float, 49, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(0, 49, __pyx_L1_error) - - /* "sqlalchemy/engine/_processors_cy.py":56 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: - * if value is None: - */ - __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_str_to_datetime, 56, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 56, __pyx_L1_error) - - /* "sqlalchemy/engine/_processors_cy.py":63 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_time(value: Optional[str]) -> Optional[time_cls]: - * if value is None: - */ - __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_str_to_time, 63, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 63, __pyx_L1_error) - - /* "sqlalchemy/engine/_processors_cy.py":70 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_date(value: Optional[str]) -> Optional[date_cls]: - * if value is None: - */ - __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__processor, __pyx_n_s_str_to_date, 70, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 70, __pyx_L1_error) - - /* "sqlalchemy/engine/_processors_cy.py":82 - * format_: str - * - * __slots__ = ("type_", "format_") # <<<<<<<<<<<<<< - * - * def __init__(self, type_: type, scale: int): - */ - __pyx_tuple__12 = PyTuple_Pack(2, __pyx_n_u_type, __pyx_n_u_format); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 82, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__12); - __Pyx_GIVEREF(__pyx_tuple__12); - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_tuple__13 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__13); - __Pyx_GIVEREF(__pyx_tuple__13); - __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(1, 1, __pyx_L1_error) - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) - */ - __pyx_tuple__15 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); - __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(1, 16, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __pyx_unpickle_to_decimal_processor_factory(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_tuple__17 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__17); - __Pyx_GIVEREF(__pyx_tuple__17); - __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_to_decimal_proces, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} -/* #### Code section: init_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { - if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_61481721 = PyInt_FromLong(61481721L); if (unlikely(!__pyx_int_61481721)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_190945570 = PyInt_FromLong(190945570L); if (unlikely(!__pyx_int_190945570)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_248337392 = PyInt_FromLong(248337392L); if (unlikely(!__pyx_int_248337392)) __PYX_ERR(0, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: init_globals ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - return 0; -} -/* #### Code section: init_module ### */ - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_spec, NULL); if (unlikely(!__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory)) __PYX_ERR(0, 78, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory_spec, __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) < 0) __PYX_ERR(0, 78, __pyx_L1_error) - #else - __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory = &__pyx_type_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - #endif - #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) < 0) __PYX_ERR(0, 78, __pyx_L1_error) - #endif - #if PY_MAJOR_VERSION < 3 - __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory->tp_print = 0; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory->tp_dictoffset && __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory->tp_getattro == PyObject_GenericGetAttr)) { - __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory->tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_to_decimal_processor_factory, (PyObject *) __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) < 0) __PYX_ERR(0, 78, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory) < 0) __PYX_ERR(0, 78, __pyx_L1_error) - #endif - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__processors_cy(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__processors_cy}, - {0, NULL} -}; -#endif - -#ifdef __cplusplus -namespace { - struct PyModuleDef __pyx_moduledef = - #else - static struct PyModuleDef __pyx_moduledef = - #endif - { - PyModuleDef_HEAD_INIT, - "_processors_cy", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #elif CYTHON_USE_MODULE_STATE - sizeof(__pyx_mstate), /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - #if CYTHON_USE_MODULE_STATE - __pyx_m_traverse, /* m_traverse */ - __pyx_m_clear, /* m_clear */ - NULL /* m_free */ - #else - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ - #endif - }; - #ifdef __cplusplus -} /* anonymous namespace */ -#endif -#endif - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_processors_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_processors_cy(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__processors_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__processors_cy(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) -#else -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) -#endif -{ - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { -#if CYTHON_COMPILING_IN_LIMITED_API - result = PyModule_AddObject(module, to_name, value); -#else - result = PyDict_SetItemString(moddict, to_name, value); -#endif - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - CYTHON_UNUSED_VAR(def); - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - moddict = module; -#else - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; -#endif - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__processors_cy(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - int stringtab_initialized = 0; - #if CYTHON_USE_MODULE_STATE - int pystate_addmodule_run = 0; - #endif - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_processors_cy' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_processors_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #elif CYTHON_USE_MODULE_STATE - __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - { - int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_processors_cy" pseudovariable */ - if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - pystate_addmodule_run = 1; - } - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #endif - CYTHON_UNUSED_VAR(__pyx_t_1); - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__processors_cy(void)", 0); - if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - PyEval_InitThreads(); - #endif - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - stringtab_initialized = 1; - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_sqlalchemy__engine___processors_cy) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "sqlalchemy.engine._processors_cy")) { - if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.engine._processors_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_type_import_code(); - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "sqlalchemy/engine/_processors_cy.py":10 - * from __future__ import annotations - * - * from datetime import date as date_cls # <<<<<<<<<<<<<< - * from datetime import datetime as datetime_cls - * from datetime import time as time_cls - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_date); - __Pyx_GIVEREF(__pyx_n_s_date); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_date)) __PYX_ERR(0, 10, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_datetime, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_date); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_date_cls, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":11 - * - * from datetime import date as date_cls - * from datetime import datetime as datetime_cls # <<<<<<<<<<<<<< - * from datetime import time as time_cls - * from typing import Any - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_datetime); - __Pyx_GIVEREF(__pyx_n_s_datetime); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_datetime)) __PYX_ERR(0, 11, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_datetime, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_datetime); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_datetime_cls, __pyx_t_3) < 0) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":12 - * from datetime import date as date_cls - * from datetime import datetime as datetime_cls - * from datetime import time as time_cls # <<<<<<<<<<<<<< - * from typing import Any - * from typing import Optional - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_time); - __Pyx_GIVEREF(__pyx_n_s_time); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_time)) __PYX_ERR(0, 12, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_datetime, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_time); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_time_cls, __pyx_t_2) < 0) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":13 - * from datetime import datetime as datetime_cls - * from datetime import time as time_cls - * from typing import Any # <<<<<<<<<<<<<< - * from typing import Optional - * - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Any); - __Pyx_GIVEREF(__pyx_n_s_Any); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Any)) __PYX_ERR(0, 13, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Any); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_3) < 0) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":14 - * from datetime import time as time_cls - * from typing import Any - * from typing import Optional # <<<<<<<<<<<<<< - * - * # START GENERATED CYTHON IMPORT - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Optional); - __Pyx_GIVEREF(__pyx_n_s_Optional); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 14, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Optional); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_2) < 0) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":18 - * # START GENERATED CYTHON IMPORT - * # This section is automatically generated by the script tools/cython_imports.py - * try: # <<<<<<<<<<<<<< - * # NOTE: the cython compiler needs this "import cython" in the file, it - * # can't be only "from sqlalchemy.util import cython" with the fallback - */ - { - (void)__pyx_t_1; (void)__pyx_t_4; (void)__pyx_t_5; /* mark used */ - /*try:*/ { - - /* "sqlalchemy/engine/_processors_cy.py":22 - * # can't be only "from sqlalchemy.util import cython" with the fallback - * # in that module - * import cython # <<<<<<<<<<<<<< - * except ModuleNotFoundError: - * from sqlalchemy.util import cython - */ - } - } - - /* "sqlalchemy/engine/_processors_cy.py":27 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 27, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 27, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__4)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 27, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_2) < 0) __PYX_ERR(0, 27, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":35 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def int_to_boolean(value: Any) -> Optional[bool]: - * if value is None: - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 35, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Optional_bool) < 0) __PYX_ERR(0, 35, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_3int_to_boolean, 0, __pyx_n_s_int_to_boolean, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__6)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_int_to_boolean, __pyx_t_3) < 0) __PYX_ERR(0, 35, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":42 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def to_str(value: Any) -> Optional[str]: - * if value is None: - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 42, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 42, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_Optional_str) < 0) __PYX_ERR(0, 42, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_5to_str, 0, __pyx_n_s_to_str, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__7)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 42, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_to_str, __pyx_t_2) < 0) __PYX_ERR(0, 42, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":49 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def to_float(value: Any) -> Optional[float]: - * if value is None: - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 49, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 49, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Optional_float) < 0) __PYX_ERR(0, 49, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_7to_float, 0, __pyx_n_s_to_float, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__8)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 49, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_to_float, __pyx_t_3) < 0) __PYX_ERR(0, 49, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":56 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_datetime(value: Optional[str]) -> Optional[datetime_cls]: - * if value is None: - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_kp_s_Optional_str) < 0) __PYX_ERR(0, 56, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_Optional_datetime_cls) < 0) __PYX_ERR(0, 56, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_9str_to_datetime, 0, __pyx_n_s_str_to_datetime, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__9)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_str_to_datetime, __pyx_t_2) < 0) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":63 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_time(value: Optional[str]) -> Optional[time_cls]: - * if value is None: - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 63, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_value, __pyx_kp_s_Optional_str) < 0) __PYX_ERR(0, 63, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Optional_time_cls) < 0) __PYX_ERR(0, 63, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_11str_to_time, 0, __pyx_n_s_str_to_time, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__10)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 63, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_str_to_time, __pyx_t_3) < 0) __PYX_ERR(0, 63, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":70 - * - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def str_to_date(value: Optional[str]) -> Optional[date_cls]: - * if value is None: - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_kp_s_Optional_str) < 0) __PYX_ERR(0, 70, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_Optional_date_cls) < 0) __PYX_ERR(0, 70, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_13str_to_date, 0, __pyx_n_s_str_to_date, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__11)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_str_to_date, __pyx_t_2) < 0) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":82 - * format_: str - * - * __slots__ = ("type_", "format_") # <<<<<<<<<<<<<< - * - * def __init__(self, type_: type, scale: int): - */ - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, __pyx_n_s_slots, __pyx_tuple__12) < 0) __PYX_ERR(0, 82, __pyx_L1_error) - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_5__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_to_decimal_processor_factory___r, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__14)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, __pyx_n_s_reduce_cython, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_to_decimal_processor_factory, (type(self), 0x3aa22f9, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_to_decimal_processor_factory__set_state(self, __pyx_state) - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_28to_decimal_processor_factory_7__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_to_decimal_processor_factory___s, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__16)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory, __pyx_n_s_setstate_cython, __pyx_t_2) < 0) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_14_processors_cy_to_decimal_processor_factory); - - /* "(tree fragment)":1 - * def __pyx_unpickle_to_decimal_processor_factory(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_14_processors_cy_15__pyx_unpickle_to_decimal_processor_factory, 0, __pyx_n_s_pyx_unpickle_to_decimal_proces, NULL, __pyx_n_s_sqlalchemy_engine__processors_cy, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_to_decimal_proces, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_processors_cy.py":1 - * # engine/_processors_cy.py # <<<<<<<<<<<<<< - * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors - * # - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - if (__pyx_m) { - if (__pyx_d && stringtab_initialized) { - __Pyx_AddTraceback("init sqlalchemy.engine._processors_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - #if !CYTHON_USE_MODULE_STATE - Py_CLEAR(__pyx_m); - #else - Py_DECREF(__pyx_m); - if (pystate_addmodule_run) { - PyObject *tp, *value, *tb; - PyErr_Fetch(&tp, &value, &tb); - PyState_RemoveModule(&__pyx_moduledef); - PyErr_Restore(tp, value, tb); - } - #endif - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init sqlalchemy.engine._processors_cy"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} -/* #### Code section: cleanup_globals ### */ -/* #### Code section: cleanup_module ### */ -/* #### Code section: main_method ### */ -/* #### Code section: utility_code_pragmas ### */ -#ifdef _MSC_VER -#pragma warning( push ) -/* Warning 4127: conditional expression is constant - * Cython uses constant conditional expressions to allow in inline functions to be optimized at - * compile-time, so this warning is not useful - */ -#pragma warning( disable : 4127 ) -#endif - - - -/* #### Code section: utility_code_def ### */ - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* TupleAndListFromArray */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { - PyObject *v; - Py_ssize_t i; - for (i = 0; i < length; i++) { - v = dest[i] = src[i]; - Py_INCREF(v); - } -} -static CYTHON_INLINE PyObject * -__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - Py_INCREF(__pyx_empty_tuple); - return __pyx_empty_tuple; - } - res = PyTuple_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); - return res; -} -static CYTHON_INLINE PyObject * -__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - return PyList_New(0); - } - res = PyList_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); - return res; -} -#endif - -/* BytesEquals */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else - if (s1 == s2) { - return (equals == Py_EQ); - } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { - const char *ps1, *ps2; - Py_ssize_t length = PyBytes_GET_SIZE(s1); - if (length != PyBytes_GET_SIZE(s2)) - return (equals == Py_NE); - ps1 = PyBytes_AS_STRING(s1); - ps2 = PyBytes_AS_STRING(s2); - if (ps1[0] != ps2[0]) { - return (equals == Py_NE); - } else if (length == 1) { - return (equals == Py_EQ); - } else { - int result; -#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) - Py_hash_t hash1, hash2; - hash1 = ((PyBytesObject*)s1)->ob_shash; - hash2 = ((PyBytesObject*)s2)->ob_shash; - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - return (equals == Py_NE); - } -#endif - result = memcmp(ps1, ps2, (size_t)length); - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { - return (equals == Py_NE); - } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { - return (equals == Py_NE); - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -#endif -} - -/* UnicodeEquals */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else -#if PY_MAJOR_VERSION < 3 - PyObject* owned_ref = NULL; -#endif - int s1_is_unicode, s2_is_unicode; - if (s1 == s2) { - goto return_eq; - } - s1_is_unicode = PyUnicode_CheckExact(s1); - s2_is_unicode = PyUnicode_CheckExact(s2); -#if PY_MAJOR_VERSION < 3 - if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { - owned_ref = PyUnicode_FromObject(s2); - if (unlikely(!owned_ref)) - return -1; - s2 = owned_ref; - s2_is_unicode = 1; - } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { - owned_ref = PyUnicode_FromObject(s1); - if (unlikely(!owned_ref)) - return -1; - s1 = owned_ref; - s1_is_unicode = 1; - } else if (((!s2_is_unicode) & (!s1_is_unicode))) { - return __Pyx_PyBytes_Equals(s1, s2, equals); - } -#endif - if (s1_is_unicode & s2_is_unicode) { - Py_ssize_t length; - int kind; - void *data1, *data2; - if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) - return -1; - length = __Pyx_PyUnicode_GET_LENGTH(s1); - if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { - goto return_ne; - } -#if CYTHON_USE_UNICODE_INTERNALS - { - Py_hash_t hash1, hash2; - #if CYTHON_PEP393_ENABLED - hash1 = ((PyASCIIObject*)s1)->hash; - hash2 = ((PyASCIIObject*)s2)->hash; - #else - hash1 = ((PyUnicodeObject*)s1)->hash; - hash2 = ((PyUnicodeObject*)s2)->hash; - #endif - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - goto return_ne; - } - } -#endif - kind = __Pyx_PyUnicode_KIND(s1); - if (kind != __Pyx_PyUnicode_KIND(s2)) { - goto return_ne; - } - data1 = __Pyx_PyUnicode_DATA(s1); - data2 = __Pyx_PyUnicode_DATA(s2); - if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { - goto return_ne; - } else if (length == 1) { - goto return_eq; - } else { - int result = memcmp(data1, data2, (size_t)(length * kind)); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & s2_is_unicode) { - goto return_ne; - } else if ((s2 == Py_None) & s1_is_unicode) { - goto return_ne; - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -return_eq: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ); -return_ne: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_NE); -#endif -} - -/* fastcall */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) -{ - Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); - for (i = 0; i < n; i++) - { - if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; - } - for (i = 0; i < n; i++) - { - int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); - if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; - return kwvalues[i]; - } - } - return NULL; -} -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { - Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); - PyObject *dict; - dict = PyDict_New(); - if (unlikely(!dict)) - return NULL; - for (i=0; i= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); - while (1) { - Py_XDECREF(key); key = NULL; - Py_XDECREF(value); value = NULL; - if (kwds_is_tuple) { - Py_ssize_t size; -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(kwds); -#else - size = PyTuple_Size(kwds); - if (size < 0) goto bad; -#endif - if (pos >= size) break; -#if CYTHON_AVOID_BORROWED_REFS - key = __Pyx_PySequence_ITEM(kwds, pos); - if (!key) goto bad; -#elif CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kwds, pos); -#else - key = PyTuple_GetItem(kwds, pos); - if (!key) goto bad; -#endif - value = kwvalues[pos]; - pos++; - } - else - { - if (!PyDict_Next(kwds, &pos, &key, &value)) break; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - } - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); - Py_DECREF(key); -#endif - key = NULL; - value = NULL; - continue; - } -#if !CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - Py_INCREF(value); - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = ( - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key) - ); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - Py_XDECREF(key); - Py_XDECREF(value); - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - Py_XDECREF(key); - Py_XDECREF(value); - return -1; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* pybytes_as_double */ -static double __Pyx_SlowPyString_AsDouble(PyObject *obj) { - PyObject *float_value; -#if PY_MAJOR_VERSION >= 3 - float_value = PyFloat_FromString(obj); -#else - float_value = PyFloat_FromString(obj, 0); -#endif - if (likely(float_value)) { -#if CYTHON_ASSUME_SAFE_MACROS - double value = PyFloat_AS_DOUBLE(float_value); -#else - double value = PyFloat_AsDouble(float_value); -#endif - Py_DECREF(float_value); - return value; - } - return (double)-1; -} -static const char* __Pyx__PyBytes_AsDouble_Copy(const char* start, char* buffer, Py_ssize_t length) { - int last_was_punctuation = 1; - Py_ssize_t i; - for (i=0; i < length; i++) { - char chr = start[i]; - int is_punctuation = (chr == '_') | (chr == '.') | (chr == 'e') | (chr == 'E'); - *buffer = chr; - buffer += (chr != '_'); - if (unlikely(last_was_punctuation & is_punctuation)) goto parse_failure; - last_was_punctuation = is_punctuation; - } - if (unlikely(last_was_punctuation)) goto parse_failure; - *buffer = '\0'; - return buffer; -parse_failure: - return NULL; -} -static double __Pyx__PyBytes_AsDouble_inf_nan(const char* start, Py_ssize_t length) { - int matches = 1; - char sign = start[0]; - int is_signed = (sign == '+') | (sign == '-'); - start += is_signed; - length -= is_signed; - switch (start[0]) { - #ifdef Py_NAN - case 'n': - case 'N': - if (unlikely(length != 3)) goto parse_failure; - matches &= (start[1] == 'a' || start[1] == 'A'); - matches &= (start[2] == 'n' || start[2] == 'N'); - if (unlikely(!matches)) goto parse_failure; - return (sign == '-') ? -Py_NAN : Py_NAN; - #endif - case 'i': - case 'I': - if (unlikely(length < 3)) goto parse_failure; - matches &= (start[1] == 'n' || start[1] == 'N'); - matches &= (start[2] == 'f' || start[2] == 'F'); - if (likely(length == 3 && matches)) - return (sign == '-') ? -Py_HUGE_VAL : Py_HUGE_VAL; - if (unlikely(length != 8)) goto parse_failure; - matches &= (start[3] == 'i' || start[3] == 'I'); - matches &= (start[4] == 'n' || start[4] == 'N'); - matches &= (start[5] == 'i' || start[5] == 'I'); - matches &= (start[6] == 't' || start[6] == 'T'); - matches &= (start[7] == 'y' || start[7] == 'Y'); - if (unlikely(!matches)) goto parse_failure; - return (sign == '-') ? -Py_HUGE_VAL : Py_HUGE_VAL; - case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': - break; - default: - goto parse_failure; - } - return 0.0; -parse_failure: - return -1.0; -} -static CYTHON_INLINE int __Pyx__PyBytes_AsDouble_IsSpace(char ch) { - return (ch == 0x20) | !((ch < 0x9) | (ch > 0xd)); -} -CYTHON_UNUSED static double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length) { - double value; - Py_ssize_t i, digits; - const char *last = start + length; - char *end; - while (__Pyx__PyBytes_AsDouble_IsSpace(*start)) - start++; - while (start < last - 1 && __Pyx__PyBytes_AsDouble_IsSpace(last[-1])) - last--; - length = last - start; - if (unlikely(length <= 0)) goto fallback; - value = __Pyx__PyBytes_AsDouble_inf_nan(start, length); - if (unlikely(value == -1.0)) goto fallback; - if (value != 0.0) return value; - digits = 0; - for (i=0; i < length; digits += start[i++] != '_'); - if (likely(digits == length)) { - value = PyOS_string_to_double(start, &end, NULL); - } else if (digits < 40) { - char number[40]; - last = __Pyx__PyBytes_AsDouble_Copy(start, number, length); - if (unlikely(!last)) goto fallback; - value = PyOS_string_to_double(number, &end, NULL); - } else { - char *number = (char*) PyMem_Malloc((digits + 1) * sizeof(char)); - if (unlikely(!number)) goto fallback; - last = __Pyx__PyBytes_AsDouble_Copy(start, number, length); - if (unlikely(!last)) { - PyMem_Free(number); - goto fallback; - } - value = PyOS_string_to_double(number, &end, NULL); - PyMem_Free(number); - } - if (likely(end == last) || (value == (double)-1 && PyErr_Occurred())) { - return value; - } -fallback: - return __Pyx_SlowPyString_AsDouble(obj); -} - -/* pynumber_float */ -static CYTHON_INLINE PyObject* __Pyx__PyNumber_Float(PyObject* obj) { - double val; - if (PyLong_CheckExact(obj)) { -#if CYTHON_USE_PYLONG_INTERNALS - if (likely(__Pyx_PyLong_IsCompact(obj))) { - val = (double) __Pyx_PyLong_CompactValue(obj); - goto no_error; - } -#endif - val = PyLong_AsDouble(obj); - } else if (PyUnicode_CheckExact(obj)) { - val = __Pyx_PyUnicode_AsDouble(obj); - } else if (PyBytes_CheckExact(obj)) { - val = __Pyx_PyBytes_AsDouble(obj); - } else if (PyByteArray_CheckExact(obj)) { - val = __Pyx_PyByteArray_AsDouble(obj); - } else { - return PyNumber_Float(obj); - } - if (unlikely(val == -1 && PyErr_Occurred())) { - return NULL; - } -#if CYTHON_USE_PYLONG_INTERNALS -no_error: -#endif - return PyFloat_FromDouble(val); -} - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030C00A6 - PyObject *current_exception = tstate->current_exception; - if (unlikely(!current_exception)) return 0; - exc_type = (PyObject*) Py_TYPE(current_exception); - if (exc_type == err) return 1; -#else - exc_type = tstate->curexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; -#endif - #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(exc_type); - #endif - if (unlikely(PyTuple_Check(err))) { - result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - } else { - result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); - } - #if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(exc_type); - #endif - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject *tmp_value; - assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); - if (value) { - #if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) - #endif - PyException_SetTraceback(value, tb); - } - tmp_value = tstate->current_exception; - tstate->current_exception = value; - Py_XDECREF(tmp_value); - Py_XDECREF(type); - Py_XDECREF(tb); -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#endif -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject* exc_value; - exc_value = tstate->current_exception; - tstate->current_exception = 0; - *value = exc_value; - *type = NULL; - *tb = NULL; - if (exc_value) { - *type = (PyObject*) Py_TYPE(exc_value); - Py_INCREF(*type); - #if CYTHON_COMPILING_IN_CPYTHON - *tb = ((PyBaseExceptionObject*) exc_value)->traceback; - Py_XINCREF(*tb); - #else - *tb = PyException_GetTraceback(exc_value); - #endif - } -#else - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#endif -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* PyObjectGetAttrStrNoError */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - (void) PyObject_GetOptionalAttr(obj, attr_name, &result); - return result; -#else -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -#endif -} - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); - if (unlikely(!result) && !PyErr_Occurred()) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#elif CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(!__pyx_m)) { - return NULL; - } - result = PyObject_GetAttr(__pyx_m, name); - if (likely(result)) { - return result; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { - return NULL; - } - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { - return NULL; - } - #endif - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = Py_TYPE(func)->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); - self = __Pyx_CyOrPyCFunction_GET_SELF(func); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectFastCall */ -#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API -static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { - PyObject *argstuple; - PyObject *result = 0; - size_t i; - argstuple = PyTuple_New((Py_ssize_t)nargs); - if (unlikely(!argstuple)) return NULL; - for (i = 0; i < nargs; i++) { - Py_INCREF(args[i]); - if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; - } - result = __Pyx_PyObject_Call(func, argstuple, kwargs); - bad: - Py_DECREF(argstuple); - return result; -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { - Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); -#if CYTHON_COMPILING_IN_CPYTHON - if (nargs == 0 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) - return __Pyx_PyObject_CallMethO(func, NULL); - } - else if (nargs == 1 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) - return __Pyx_PyObject_CallMethO(func, args[0]); - } -#endif - #if PY_VERSION_HEX < 0x030800B1 - #if CYTHON_FAST_PYCCALL - if (PyCFunction_Check(func)) { - if (kwargs) { - return _PyCFunction_FastCallDict(func, args, nargs, kwargs); - } else { - return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); - } - } - #if PY_VERSION_HEX >= 0x030700A1 - if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { - return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); - } - #endif - #endif - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); - } - #endif - #endif - if (kwargs == NULL) { - #if CYTHON_VECTORCALL - #if PY_VERSION_HEX < 0x03090000 - vectorcallfunc f = _PyVectorcall_Function(func); - #else - vectorcallfunc f = PyVectorcall_Function(func); - #endif - if (f) { - return f(func, args, (size_t)nargs, NULL); - } - #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL - if (__Pyx_CyFunction_CheckExact(func)) { - __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); - if (f) return f(func, args, (size_t)nargs, NULL); - } - #endif - } - if (nargs == 0) { - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); - } - #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API - return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); - #else - return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); - #endif -} - -/* ArgTypeTest */ -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) -{ - __Pyx_TypeName type_name; - __Pyx_TypeName obj_type_name; - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - else if (exact) { - #if PY_MAJOR_VERSION == 2 - if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; - #endif - } - else { - if (likely(__Pyx_TypeCheck(obj, type))) return 1; - } - type_name = __Pyx_PyType_GetName(type); - obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, - "Argument '%.200s' has incorrect type (expected " __Pyx_FMT_TYPENAME - ", got " __Pyx_FMT_TYPENAME ")", name, type_name, obj_type_name); - __Pyx_DECREF_TypeName(type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* JoinPyUnicode */ -static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, - Py_UCS4 max_char) { -#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - PyObject *result_uval; - int result_ukind, kind_shift; - Py_ssize_t i, char_pos; - void *result_udata; - CYTHON_MAYBE_UNUSED_VAR(max_char); -#if CYTHON_PEP393_ENABLED - result_uval = PyUnicode_New(result_ulength, max_char); - if (unlikely(!result_uval)) return NULL; - result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; - kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; - result_udata = PyUnicode_DATA(result_uval); -#else - result_uval = PyUnicode_FromUnicode(NULL, result_ulength); - if (unlikely(!result_uval)) return NULL; - result_ukind = sizeof(Py_UNICODE); - kind_shift = (result_ukind == 4) ? 2 : result_ukind - 1; - result_udata = PyUnicode_AS_UNICODE(result_uval); -#endif - assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); - char_pos = 0; - for (i=0; i < value_count; i++) { - int ukind; - Py_ssize_t ulength; - void *udata; - PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); - if (unlikely(__Pyx_PyUnicode_READY(uval))) - goto bad; - ulength = __Pyx_PyUnicode_GET_LENGTH(uval); - if (unlikely(!ulength)) - continue; - if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) - goto overflow; - ukind = __Pyx_PyUnicode_KIND(uval); - udata = __Pyx_PyUnicode_DATA(uval); - if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { - memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); - } else { - #if PY_VERSION_HEX >= 0x030d0000 - if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; - #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) - _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); - #else - Py_ssize_t j; - for (j=0; j < ulength; j++) { - Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); - __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); - } - #endif - } - char_pos += ulength; - } - return result_uval; -overflow: - PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); -bad: - Py_DECREF(result_uval); - return NULL; -#else - CYTHON_UNUSED_VAR(max_char); - CYTHON_UNUSED_VAR(result_ulength); - CYTHON_UNUSED_VAR(value_count); - return PyUnicode_Join(__pyx_empty_unicode, value_tuple); -#endif -} - -/* PyObjectCallOneArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *args[2] = {NULL, arg}; - return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* KeywordStringCheck */ -static int __Pyx_CheckKeywordStrings( - PyObject *kw, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; -#if CYTHON_COMPILING_IN_PYPY - if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) - goto invalid_keyword; - return 1; -#else - if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { - Py_ssize_t kwsize; -#if CYTHON_ASSUME_SAFE_MACROS - kwsize = PyTuple_GET_SIZE(kw); -#else - kwsize = PyTuple_Size(kw); - if (kwsize < 0) return 0; -#endif - if (unlikely(kwsize == 0)) - return 1; - if (!kw_allowed) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, 0); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - goto invalid_keyword; - } -#if PY_VERSION_HEX < 0x03090000 - for (pos = 0; pos < kwsize; pos++) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, pos); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } -#endif - return 1; - } - while (PyDict_Next(kw, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_Check(key))) - #endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } - if (!kw_allowed && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - return 0; -#endif -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - -/* GetAttr3 */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static PyObject *__Pyx_GetAttr3Default(PyObject *d) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - return NULL; - __Pyx_PyErr_Clear(); - Py_INCREF(d); - return d; -} -#endif -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - int res = PyObject_GetOptionalAttr(o, n, &r); - return (res != 0) ? r : __Pyx_NewRef(d); -#else - #if CYTHON_USE_TYPE_SLOTS - if (likely(PyString_Check(n))) { - r = __Pyx_PyObject_GetAttrStrNoError(o, n); - if (unlikely(!r) && likely(!PyErr_Occurred())) { - r = __Pyx_NewRef(d); - } - return r; - } - #endif - r = PyObject_GetAttr(o, n); - return (likely(r)) ? r : __Pyx_GetAttr3Default(d); -#endif -} - -/* RaiseUnexpectedTypeError */ -static int -__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) -{ - __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, - expected, obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *module = 0; - PyObject *empty_dict = 0; - PyObject *empty_list = 0; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (unlikely(!py_import)) - goto bad; - if (!from_list) { - empty_list = PyList_New(0); - if (unlikely(!empty_list)) - goto bad; - from_list = empty_list; - } - #endif - empty_dict = PyDict_New(); - if (unlikely(!empty_dict)) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, 1); - if (unlikely(!module)) { - if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (unlikely(!py_level)) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, level); - #endif - } - } -bad: - Py_XDECREF(empty_dict); - Py_XDECREF(empty_list); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - const char* module_name_str = 0; - PyObject* module_name = 0; - PyObject* module_dot = 0; - PyObject* full_name = 0; - PyErr_Clear(); - module_name_str = PyModule_GetName(module); - if (unlikely(!module_name_str)) { goto modbad; } - module_name = PyUnicode_FromString(module_name_str); - if (unlikely(!module_name)) { goto modbad; } - module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__3); - if (unlikely(!module_dot)) { goto modbad; } - full_name = PyUnicode_Concat(module_dot, name); - if (unlikely(!full_name)) { goto modbad; } - #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) - { - PyObject *modules = PyImport_GetModuleDict(); - if (unlikely(!modules)) - goto modbad; - value = PyObject_GetItem(modules, full_name); - } - #else - value = PyImport_GetModule(full_name); - #endif - modbad: - Py_XDECREF(full_name); - Py_XDECREF(module_dot); - Py_XDECREF(module_name); - } - if (unlikely(!value)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - __Pyx_PyThreadState_declare - CYTHON_UNUSED_VAR(cause); - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { - #if PY_VERSION_HEX >= 0x030C00A6 - PyException_SetTraceback(value, tb); - #elif CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (unlikely(!j)) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; - PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; - if (mm && mm->mp_subscript) { - PyObject *r, *key = PyInt_FromSsize_t(i); - if (unlikely(!key)) return NULL; - r = mm->mp_subscript(o, key); - Py_DECREF(key); - return r; - } - if (likely(sm && sm->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { - Py_ssize_t l = sm->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return sm->sq_item(o, i); - } - } -#else - if (is_list || !PyMapping_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* GetAttr */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { -#if CYTHON_USE_TYPE_SLOTS -#if PY_MAJOR_VERSION >= 3 - if (likely(PyUnicode_Check(n))) -#else - if (likely(PyString_Check(n))) -#endif - return __Pyx_PyObject_GetAttrStr(o, n); -#endif - return PyObject_GetAttr(o, n); -} - -/* HasAttr */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { - PyObject *r; - if (unlikely(!__Pyx_PyBaseString_Check(n))) { - PyErr_SetString(PyExc_TypeError, - "hasattr(): attribute name must be string"); - return -1; - } - r = __Pyx_GetAttr(o, n); - if (!r) { - PyErr_Clear(); - return 0; - } else { - Py_DECREF(r); - return 1; - } -} - -/* FixUpExtensionType */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { -#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - CYTHON_UNUSED_VAR(spec); - CYTHON_UNUSED_VAR(type); -#else - const PyType_Slot *slot = spec->slots; - while (slot && slot->slot && slot->slot != Py_tp_members) - slot++; - if (slot && slot->slot == Py_tp_members) { - int changed = 0; -#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) - const -#endif - PyMemberDef *memb = (PyMemberDef*) slot->pfunc; - while (memb && memb->name) { - if (memb->name[0] == '_' && memb->name[1] == '_') { -#if PY_VERSION_HEX < 0x030900b1 - if (strcmp(memb->name, "__weaklistoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_weaklistoffset = memb->offset; - changed = 1; - } - else if (strcmp(memb->name, "__dictoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_dictoffset = memb->offset; - changed = 1; - } -#if CYTHON_METH_FASTCALL - else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); -#if PY_VERSION_HEX >= 0x030800b4 - type->tp_vectorcall_offset = memb->offset; -#else - type->tp_print = (printfunc) memb->offset; -#endif - changed = 1; - } -#endif -#else - if ((0)); -#endif -#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON - else if (strcmp(memb->name, "__module__") == 0) { - PyObject *descr; - assert(memb->type == T_OBJECT); - assert(memb->flags == 0 || memb->flags == READONLY); - descr = PyDescr_NewMember(type, memb); - if (unlikely(!descr)) - return -1; - if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { - Py_DECREF(descr); - return -1; - } - Py_DECREF(descr); - changed = 1; - } -#endif - } - memb++; - } - if (changed) - PyType_Modified(type); - } -#endif - return 0; -} -#endif - -/* PyObjectCallNoArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { - PyObject *arg[2] = {NULL, NULL}; - return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - __Pyx_TypeName type_name; - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR - if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) -#elif PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (likely(descr != NULL)) { - *method = descr; - return 0; - } - type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod0 */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { - PyObject *method = NULL, *result = NULL; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_CallOneArg(method, obj); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) goto bad; - result = __Pyx_PyObject_CallNoArg(method); - Py_DECREF(method); -bad: - return result; -} - -/* ValidateBasesTuple */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { - Py_ssize_t i, n; -#if CYTHON_ASSUME_SAFE_MACROS - n = PyTuple_GET_SIZE(bases); -#else - n = PyTuple_Size(bases); - if (n < 0) return -1; -#endif - for (i = 1; i < n; i++) - { -#if CYTHON_AVOID_BORROWED_REFS - PyObject *b0 = PySequence_GetItem(bases, i); - if (!b0) return -1; -#elif CYTHON_ASSUME_SAFE_MACROS - PyObject *b0 = PyTuple_GET_ITEM(bases, i); -#else - PyObject *b0 = PyTuple_GetItem(bases, i); - if (!b0) return -1; -#endif - PyTypeObject *b; -#if PY_MAJOR_VERSION < 3 - if (PyClass_Check(b0)) - { - PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", - PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } -#endif - b = (PyTypeObject*) b0; - if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); - __Pyx_DECREF_TypeName(b_name); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - if (dictoffset == 0) - { - Py_ssize_t b_dictoffset = 0; -#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY - b_dictoffset = b->tp_dictoffset; -#else - PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); - if (!py_b_dictoffset) goto dictoffset_return; - b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); - Py_DECREF(py_b_dictoffset); - if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; -#endif - if (b_dictoffset) { - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "extension type '%.200s' has no __dict__ slot, " - "but base type '" __Pyx_FMT_TYPENAME "' has: " - "either add 'cdef dict __dict__' to the extension type " - "or add '__slots__ = [...]' to the base type", - type_name, b_name); - __Pyx_DECREF_TypeName(b_name); - } -#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) - dictoffset_return: -#endif -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - } -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - } - return 0; -} -#endif - -/* PyType_Ready */ -static int __Pyx_PyType_Ready(PyTypeObject *t) { -#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) - (void)__Pyx_PyObject_CallMethod0; -#if CYTHON_USE_TYPE_SPECS - (void)__Pyx_validate_bases_tuple; -#endif - return PyType_Ready(t); -#else - int r; - PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); - if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) - return -1; -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - { - int gc_was_enabled; - #if PY_VERSION_HEX >= 0x030A00b1 - gc_was_enabled = PyGC_Disable(); - (void)__Pyx_PyObject_CallMethod0; - #else - PyObject *ret, *py_status; - PyObject *gc = NULL; - #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) - gc = PyImport_GetModule(__pyx_kp_u_gc); - #endif - if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); - if (unlikely(!gc)) return -1; - py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); - if (unlikely(!py_status)) { - Py_DECREF(gc); - return -1; - } - gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); - Py_DECREF(py_status); - if (gc_was_enabled > 0) { - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); - if (unlikely(!ret)) { - Py_DECREF(gc); - return -1; - } - Py_DECREF(ret); - } else if (unlikely(gc_was_enabled == -1)) { - Py_DECREF(gc); - return -1; - } - #endif - t->tp_flags |= Py_TPFLAGS_HEAPTYPE; -#if PY_VERSION_HEX >= 0x030A0000 - t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; -#endif -#else - (void)__Pyx_PyObject_CallMethod0; -#endif - r = PyType_Ready(t); -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; - #if PY_VERSION_HEX >= 0x030A00b1 - if (gc_was_enabled) - PyGC_Enable(); - #else - if (gc_was_enabled) { - PyObject *tp, *v, *tb; - PyErr_Fetch(&tp, &v, &tb); - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); - if (likely(ret || r == -1)) { - Py_XDECREF(ret); - PyErr_Restore(tp, v, tb); - } else { - Py_XDECREF(tp); - Py_XDECREF(v); - Py_XDECREF(tb); - r = -1; - } - } - Py_DECREF(gc); - #endif - } -#endif - return r; -#endif -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, attr_name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(attr_name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* SetupReduce */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStrNoError(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_getstate = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; - PyObject *getstate = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); -#else - getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); - if (!getstate && PyErr_Occurred()) { - goto __PYX_BAD; - } -#endif - if (getstate) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); -#else - object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); - if (!object_getstate && PyErr_Occurred()) { - goto __PYX_BAD; - } -#endif - if (object_getstate != getstate) { - goto __PYX_GOOD; - } - } -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) { - __Pyx_TypeName type_obj_name = - __Pyx_PyType_GetName((PyTypeObject*)type_obj); - PyErr_Format(PyExc_RuntimeError, - "Unable to initialize pickling for " __Pyx_FMT_TYPENAME, type_obj_name); - __Pyx_DECREF_TypeName(type_obj_name); - } - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); - Py_XDECREF(object_getstate); - Py_XDECREF(getstate); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} -#endif - -/* FetchSharedCythonModule */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void) { - return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); -} - -/* FetchCommonType */ -static int __Pyx_VerifyCachedType(PyObject *cached_type, - const char *name, - Py_ssize_t basicsize, - Py_ssize_t expected_basicsize) { - if (!PyType_Check(cached_type)) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s is not a type object", name); - return -1; - } - if (basicsize != expected_basicsize) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s has the wrong size, try recompiling", - name); - return -1; - } - return 0; -} -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { - PyObject* abi_module; - const char* object_name; - PyTypeObject *cached_type = NULL; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - object_name = strrchr(type->tp_name, '.'); - object_name = object_name ? object_name+1 : type->tp_name; - cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - if (__Pyx_VerifyCachedType( - (PyObject *)cached_type, - object_name, - cached_type->tp_basicsize, - type->tp_basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - if (PyType_Ready(type) < 0) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) - goto bad; - Py_INCREF(type); - cached_type = type; -done: - Py_DECREF(abi_module); - return cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#else -static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { - PyObject *abi_module, *cached_type = NULL; - const char* object_name = strrchr(spec->name, '.'); - object_name = object_name ? object_name+1 : spec->name; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - cached_type = PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - Py_ssize_t basicsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_basicsize; - py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); - if (unlikely(!py_basicsize)) goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; -#else - basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; -#endif - if (__Pyx_VerifyCachedType( - cached_type, - object_name, - basicsize, - spec->basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - CYTHON_UNUSED_VAR(module); - cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); - if (unlikely(!cached_type)) goto bad; - if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; -done: - Py_DECREF(abi_module); - assert(cached_type == NULL || PyType_Check(cached_type)); - return (PyTypeObject *) cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#endif - -/* PyVectorcallFastCallDict */ -#if CYTHON_METH_FASTCALL -static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - PyObject *res = NULL; - PyObject *kwnames; - PyObject **newargs; - PyObject **kwvalues; - Py_ssize_t i, pos; - size_t j; - PyObject *key, *value; - unsigned long keys_are_strings; - Py_ssize_t nkw = PyDict_GET_SIZE(kw); - newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); - if (unlikely(newargs == NULL)) { - PyErr_NoMemory(); - return NULL; - } - for (j = 0; j < nargs; j++) newargs[j] = args[j]; - kwnames = PyTuple_New(nkw); - if (unlikely(kwnames == NULL)) { - PyMem_Free(newargs); - return NULL; - } - kwvalues = newargs + nargs; - pos = i = 0; - keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; - while (PyDict_Next(kw, &pos, &key, &value)) { - keys_are_strings &= Py_TYPE(key)->tp_flags; - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(kwnames, i, key); - kwvalues[i] = value; - i++; - } - if (unlikely(!keys_are_strings)) { - PyErr_SetString(PyExc_TypeError, "keywords must be strings"); - goto cleanup; - } - res = vc(func, newargs, nargs, kwnames); -cleanup: - Py_DECREF(kwnames); - for (i = 0; i < nkw; i++) - Py_DECREF(kwvalues[i]); - PyMem_Free(newargs); - return res; -} -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { - return vc(func, args, nargs, NULL); - } - return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); -} -#endif - -/* CythonFunctionShared */ -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - if (__Pyx_CyFunction_Check(func)) { - return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; - } else if (PyCFunction_Check(func)) { - return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; - } - return 0; -} -#else -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -} -#endif -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - __Pyx_Py_XDECREF_SET( - __Pyx_CyFunction_GetClassObj(f), - ((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#else - __Pyx_Py_XDECREF_SET( - ((PyCMethodObject *) (f))->mm_class, - (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#endif -} -static PyObject * -__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) -{ - CYTHON_UNUSED_VAR(closure); - if (unlikely(op->func_doc == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); - if (unlikely(!op->func_doc)) return NULL; -#else - if (((PyCFunctionObject*)op)->m_ml->ml_doc) { -#if PY_MAJOR_VERSION >= 3 - op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#else - op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#endif - if (unlikely(op->func_doc == NULL)) - return NULL; - } else { - Py_INCREF(Py_None); - return Py_None; - } -#endif - } - Py_INCREF(op->func_doc); - return op->func_doc; -} -static int -__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (value == NULL) { - value = Py_None; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_doc, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_name == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_name = PyObject_GetAttrString(op->func, "__name__"); -#elif PY_MAJOR_VERSION >= 3 - op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#else - op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#endif - if (unlikely(op->func_name == NULL)) - return NULL; - } - Py_INCREF(op->func_name); - return op->func_name; -} -static int -__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__name__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_name, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_qualname); - return op->func_qualname; -} -static int -__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__qualname__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_qualname, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_dict == NULL)) { - op->func_dict = PyDict_New(); - if (unlikely(op->func_dict == NULL)) - return NULL; - } - Py_INCREF(op->func_dict); - return op->func_dict; -} -static int -__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(value == NULL)) { - PyErr_SetString(PyExc_TypeError, - "function's dictionary may not be deleted"); - return -1; - } - if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "setting function's dictionary to a non-dict"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_dict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_globals); - return op->func_globals; -} -static PyObject * -__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(op); - CYTHON_UNUSED_VAR(context); - Py_INCREF(Py_None); - return Py_None; -} -static PyObject * -__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) -{ - PyObject* result = (op->func_code) ? op->func_code : Py_None; - CYTHON_UNUSED_VAR(context); - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { - int result = 0; - PyObject *res = op->defaults_getter((PyObject *) op); - if (unlikely(!res)) - return -1; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - op->defaults_tuple = PyTuple_GET_ITEM(res, 0); - Py_INCREF(op->defaults_tuple); - op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); - Py_INCREF(op->defaults_kwdict); - #else - op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); - if (unlikely(!op->defaults_tuple)) result = -1; - else { - op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); - if (unlikely(!op->defaults_kwdict)) result = -1; - } - #endif - Py_DECREF(res); - return result; -} -static int -__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__defaults__ must be set to a tuple object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_tuple; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_tuple; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__kwdefaults__ must be set to a dict object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_kwdict; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_kwdict; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value || value == Py_None) { - value = NULL; - } else if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__annotations__ must be set to a dict object"); - return -1; - } - Py_XINCREF(value); - __Pyx_Py_XDECREF_SET(op->func_annotations, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->func_annotations; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - result = PyDict_New(); - if (unlikely(!result)) return NULL; - op->func_annotations = result; - } - Py_INCREF(result); - return result; -} -static PyObject * -__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { - int is_coroutine; - CYTHON_UNUSED_VAR(context); - if (op->func_is_coroutine) { - return __Pyx_NewRef(op->func_is_coroutine); - } - is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; -#if PY_VERSION_HEX >= 0x03050000 - if (is_coroutine) { - PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; - fromlist = PyList_New(1); - if (unlikely(!fromlist)) return NULL; - Py_INCREF(marker); -#if CYTHON_ASSUME_SAFE_MACROS - PyList_SET_ITEM(fromlist, 0, marker); -#else - if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { - Py_DECREF(marker); - Py_DECREF(fromlist); - return NULL; - } -#endif - module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); - Py_DECREF(fromlist); - if (unlikely(!module)) goto ignore; - op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); - Py_DECREF(module); - if (likely(op->func_is_coroutine)) { - return __Pyx_NewRef(op->func_is_coroutine); - } -ignore: - PyErr_Clear(); - } -#endif - op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); - return __Pyx_NewRef(op->func_is_coroutine); -} -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject * -__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_GetAttrString(op->func, "__module__"); -} -static int -__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_SetAttrString(op->func, "__module__", value); -} -#endif -static PyGetSetDef __pyx_CyFunction_getsets[] = { - {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, - {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, - {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, - {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, -#if CYTHON_COMPILING_IN_LIMITED_API - {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, -#endif - {0, 0, 0, 0, 0} -}; -static PyMemberDef __pyx_CyFunction_members[] = { -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, -#endif -#if CYTHON_USE_TYPE_SPECS - {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, -#if CYTHON_METH_FASTCALL -#if CYTHON_BACKPORT_VECTORCALL - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, -#else -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, -#endif -#endif -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, -#else - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, -#endif -#endif - {0, 0, 0, 0, 0} -}; -static PyObject * -__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) -{ - CYTHON_UNUSED_VAR(args); -#if PY_MAJOR_VERSION >= 3 - Py_INCREF(m->func_qualname); - return m->func_qualname; -#else - return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); -#endif -} -static PyMethodDef __pyx_CyFunction_methods[] = { - {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, - {0, 0, 0, 0} -}; -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) -#else -#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) -#endif -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { -#if !CYTHON_COMPILING_IN_LIMITED_API - PyCFunctionObject *cf = (PyCFunctionObject*) op; -#endif - if (unlikely(op == NULL)) - return NULL; -#if CYTHON_COMPILING_IN_LIMITED_API - op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); - if (unlikely(!op->func)) return NULL; -#endif - op->flags = flags; - __Pyx_CyFunction_weakreflist(op) = NULL; -#if !CYTHON_COMPILING_IN_LIMITED_API - cf->m_ml = ml; - cf->m_self = (PyObject *) op; -#endif - Py_XINCREF(closure); - op->func_closure = closure; -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_XINCREF(module); - cf->m_module = module; -#endif - op->func_dict = NULL; - op->func_name = NULL; - Py_INCREF(qualname); - op->func_qualname = qualname; - op->func_doc = NULL; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - op->func_classobj = NULL; -#else - ((PyCMethodObject*)op)->mm_class = NULL; -#endif - op->func_globals = globals; - Py_INCREF(op->func_globals); - Py_XINCREF(code); - op->func_code = code; - op->defaults_pyobjects = 0; - op->defaults_size = 0; - op->defaults = NULL; - op->defaults_tuple = NULL; - op->defaults_kwdict = NULL; - op->defaults_getter = NULL; - op->func_annotations = NULL; - op->func_is_coroutine = NULL; -#if CYTHON_METH_FASTCALL - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { - case METH_NOARGS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; - break; - case METH_O: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; - break; - case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; - break; - case METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; - break; - case METH_VARARGS | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = NULL; - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - Py_DECREF(op); - return NULL; - } -#endif - return (PyObject *) op; -} -static int -__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) -{ - Py_CLEAR(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_CLEAR(m->func); -#else - Py_CLEAR(((PyCFunctionObject*)m)->m_module); -#endif - Py_CLEAR(m->func_dict); - Py_CLEAR(m->func_name); - Py_CLEAR(m->func_qualname); - Py_CLEAR(m->func_doc); - Py_CLEAR(m->func_globals); - Py_CLEAR(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API -#if PY_VERSION_HEX < 0x030900B1 - Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); -#else - { - PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; - ((PyCMethodObject *) (m))->mm_class = NULL; - Py_XDECREF(cls); - } -#endif -#endif - Py_CLEAR(m->defaults_tuple); - Py_CLEAR(m->defaults_kwdict); - Py_CLEAR(m->func_annotations); - Py_CLEAR(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_XDECREF(pydefaults[i]); - PyObject_Free(m->defaults); - m->defaults = NULL; - } - return 0; -} -static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - if (__Pyx_CyFunction_weakreflist(m) != NULL) - PyObject_ClearWeakRefs((PyObject *) m); - __Pyx_CyFunction_clear(m); - __Pyx_PyHeapTypeObject_GC_Del(m); -} -static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - PyObject_GC_UnTrack(m); - __Pyx__CyFunction_dealloc(m); -} -static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) -{ - Py_VISIT(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(m->func); -#else - Py_VISIT(((PyCFunctionObject*)m)->m_module); -#endif - Py_VISIT(m->func_dict); - Py_VISIT(m->func_name); - Py_VISIT(m->func_qualname); - Py_VISIT(m->func_doc); - Py_VISIT(m->func_globals); - Py_VISIT(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); -#endif - Py_VISIT(m->defaults_tuple); - Py_VISIT(m->defaults_kwdict); - Py_VISIT(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_VISIT(pydefaults[i]); - } - return 0; -} -static PyObject* -__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) -{ -#if PY_MAJOR_VERSION >= 3 - return PyUnicode_FromFormat("", - op->func_qualname, (void *)op); -#else - return PyString_FromFormat("", - PyString_AsString(op->func_qualname), (void *)op); -#endif -} -static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *f = ((__pyx_CyFunctionObject*)func)->func; - PyObject *py_name = NULL; - PyCFunction meth; - int flags; - meth = PyCFunction_GetFunction(f); - if (unlikely(!meth)) return NULL; - flags = PyCFunction_GetFlags(f); - if (unlikely(flags < 0)) return NULL; -#else - PyCFunctionObject* f = (PyCFunctionObject*)func; - PyCFunction meth = f->m_ml->ml_meth; - int flags = f->m_ml->ml_flags; -#endif - Py_ssize_t size; - switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { - case METH_VARARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) - return (*meth)(self, arg); - break; - case METH_VARARGS | METH_KEYWORDS: - return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); - case METH_NOARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 0)) - return (*meth)(self, NULL); -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - case METH_O: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 1)) { - PyObject *result, *arg0; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - arg0 = PyTuple_GET_ITEM(arg, 0); - #else - arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; - #endif - result = (*meth)(self, arg0); - #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) - Py_DECREF(arg0); - #endif - return result; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - return NULL; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", - py_name); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", - f->m_ml->ml_name); -#endif - return NULL; -} -static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *self, *result; -#if CYTHON_COMPILING_IN_LIMITED_API - self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); - if (unlikely(!self) && PyErr_Occurred()) return NULL; -#else - self = ((PyCFunctionObject*)func)->m_self; -#endif - result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); - return result; -} -static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { - PyObject *result; - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; -#if CYTHON_METH_FASTCALL - __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); - if (vc) { -#if CYTHON_ASSUME_SAFE_MACROS - return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); -#else - (void) &__Pyx_PyVectorcall_FastCallDict; - return PyVectorcall_Call(func, args, kw); -#endif - } -#endif - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - Py_ssize_t argc; - PyObject *new_args; - PyObject *self; -#if CYTHON_ASSUME_SAFE_MACROS - argc = PyTuple_GET_SIZE(args); -#else - argc = PyTuple_Size(args); - if (unlikely(!argc) < 0) return NULL; -#endif - new_args = PyTuple_GetSlice(args, 1, argc); - if (unlikely(!new_args)) - return NULL; - self = PyTuple_GetItem(args, 0); - if (unlikely(!self)) { - Py_DECREF(new_args); -#if PY_MAJOR_VERSION > 2 - PyErr_Format(PyExc_TypeError, - "unbound method %.200S() needs an argument", - cyfunc->func_qualname); -#else - PyErr_SetString(PyExc_TypeError, - "unbound method needs an argument"); -#endif - return NULL; - } - result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); - Py_DECREF(new_args); - } else { - result = __Pyx_CyFunction_Call(func, args, kw); - } - return result; -} -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) -{ - int ret = 0; - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - if (unlikely(nargs < 1)) { - PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", - ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - ret = 1; - } - if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - return ret; -} -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 0)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, NULL); -} -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 1)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, args[0]); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; - PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); -} -#endif -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_CyFunctionType_slots[] = { - {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, - {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, - {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, - {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, - {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, - {Py_tp_methods, (void *)__pyx_CyFunction_methods}, - {Py_tp_members, (void *)__pyx_CyFunction_members}, - {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, - {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, - {0, 0}, -}; -static PyType_Spec __pyx_CyFunctionType_spec = { - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - __pyx_CyFunctionType_slots -}; -#else -static PyTypeObject __pyx_CyFunctionType_type = { - PyVarObject_HEAD_INIT(0, 0) - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, - (destructor) __Pyx_CyFunction_dealloc, -#if !CYTHON_METH_FASTCALL - 0, -#elif CYTHON_BACKPORT_VECTORCALL - (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), -#else - offsetof(PyCFunctionObject, vectorcall), -#endif - 0, - 0, -#if PY_MAJOR_VERSION < 3 - 0, -#else - 0, -#endif - (reprfunc) __Pyx_CyFunction_repr, - 0, - 0, - 0, - 0, - __Pyx_CyFunction_CallAsMethod, - 0, - 0, - 0, - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - 0, - (traverseproc) __Pyx_CyFunction_traverse, - (inquiry) __Pyx_CyFunction_clear, - 0, -#if PY_VERSION_HEX < 0x030500A0 - offsetof(__pyx_CyFunctionObject, func_weakreflist), -#else - offsetof(PyCFunctionObject, m_weakreflist), -#endif - 0, - 0, - __pyx_CyFunction_methods, - __pyx_CyFunction_members, - __pyx_CyFunction_getsets, - 0, - 0, - __Pyx_PyMethod_New, - 0, - offsetof(__pyx_CyFunctionObject, func_dict), - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -#if PY_VERSION_HEX >= 0x030400a1 - 0, -#endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, -#endif -#if __PYX_NEED_TP_PRINT_SLOT - 0, -#endif -#if PY_VERSION_HEX >= 0x030C0000 - 0, -#endif -#if PY_VERSION_HEX >= 0x030d00A4 - 0, -#endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, -#endif -}; -#endif -static int __pyx_CyFunction_init(PyObject *module) { -#if CYTHON_USE_TYPE_SPECS - __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); -#else - CYTHON_UNUSED_VAR(module); - __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); -#endif - if (unlikely(__pyx_CyFunctionType == NULL)) { - return -1; - } - return 0; -} -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults = PyObject_Malloc(size); - if (unlikely(!m->defaults)) - return PyErr_NoMemory(); - memset(m->defaults, 0, size); - m->defaults_pyobjects = pyobjects; - m->defaults_size = size; - return m->defaults; -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_tuple = tuple; - Py_INCREF(tuple); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_kwdict = dict; - Py_INCREF(dict); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->func_annotations = dict; - Py_INCREF(dict); -} - -/* CythonFunction */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { - PyObject *op = __Pyx_CyFunction_Init( - PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), - ml, flags, qualname, closure, module, globals, code - ); - if (likely(op)) { - PyObject_GC_Track(op); - } - return op; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - CYTHON_MAYBE_UNUSED_VAR(tstate); - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} -#endif - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, - PyObject *firstlineno, PyObject *name) { - PyObject *replace = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; - replace = PyObject_GetAttrString(code, "replace"); - if (likely(replace)) { - PyObject *result; - result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); - Py_DECREF(replace); - return result; - } - PyErr_Clear(); - #if __PYX_LIMITED_VERSION_HEX < 0x030780000 - { - PyObject *compiled = NULL, *result = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; - compiled = Py_CompileString( - "out = type(code)(\n" - " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" - " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" - " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" - " code.co_lnotab)\n", "", Py_file_input); - if (!compiled) return NULL; - result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); - Py_DECREF(compiled); - if (!result) PyErr_Print(); - Py_DECREF(result); - result = PyDict_GetItemString(scratch_dict, "out"); - if (result) Py_INCREF(result); - return result; - } - #else - return NULL; - #endif -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; - PyObject *replace = NULL, *getframe = NULL, *frame = NULL; - PyObject *exc_type, *exc_value, *exc_traceback; - int success = 0; - if (c_line) { - (void) __pyx_cfilenm; - (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); - } - PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); - code_object = Py_CompileString("_getframe()", filename, Py_eval_input); - if (unlikely(!code_object)) goto bad; - py_py_line = PyLong_FromLong(py_line); - if (unlikely(!py_py_line)) goto bad; - py_funcname = PyUnicode_FromString(funcname); - if (unlikely(!py_funcname)) goto bad; - dict = PyDict_New(); - if (unlikely(!dict)) goto bad; - { - PyObject *old_code_object = code_object; - code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); - Py_DECREF(old_code_object); - } - if (unlikely(!code_object)) goto bad; - getframe = PySys_GetObject("_getframe"); - if (unlikely(!getframe)) goto bad; - if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; - frame = PyEval_EvalCode(code_object, dict, dict); - if (unlikely(!frame) || frame == Py_None) goto bad; - success = 1; - bad: - PyErr_Restore(exc_type, exc_value, exc_traceback); - Py_XDECREF(code_object); - Py_XDECREF(py_py_line); - Py_XDECREF(py_funcname); - Py_XDECREF(dict); - Py_XDECREF(replace); - if (success) { - PyTraceBack_Here( - (struct _frame*)frame); - } - Py_XDECREF(frame); -} -#else -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = NULL; - PyObject *py_funcname = NULL; - #if PY_MAJOR_VERSION < 3 - PyObject *py_srcfile = NULL; - py_srcfile = PyString_FromString(filename); - if (!py_srcfile) goto bad; - #endif - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - funcname = PyUnicode_AsUTF8(py_funcname); - if (!funcname) goto bad; - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - if (!py_funcname) goto bad; - #endif - } - #if PY_MAJOR_VERSION < 3 - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - #else - py_code = PyCode_NewEmpty(filename, funcname, py_line); - #endif - Py_XDECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_funcname); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_srcfile); - #endif - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject *ptype, *pvalue, *ptraceback; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) { - /* If the code object creation fails, then we should clear the - fetched exception references and propagate the new exception */ - Py_XDECREF(ptype); - Py_XDECREF(pvalue); - Py_XDECREF(ptraceback); - goto bad; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} -#endif - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(long) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(long) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(long) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - long val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (long) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (long) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (long) -1; - } else { - stepval = v; - } - v = NULL; - val = (long) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((long) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((long) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (long) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* FormatTypeName */ -#if CYTHON_COMPILING_IN_LIMITED_API -static __Pyx_TypeName -__Pyx_PyType_GetName(PyTypeObject* tp) -{ - PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, - __pyx_n_s_name); - if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { - PyErr_Clear(); - Py_XDECREF(name); - name = __Pyx_NewRef(__pyx_n_s__19); - } - return name; -} -#endif - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const int neg_one = (int) -1, const_zero = (int) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(int) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(int) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(int) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - int val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (int) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (int) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (int) -1; - } else { - stepval = v; - } - v = NULL; - val = (int) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((int) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((int) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (int) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (cls == a || cls == b) return 1; - mro = cls->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - PyObject *base = PyTuple_GET_ITEM(mro, i); - if (base == (PyObject *)a || base == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - if (exc_type1) { - return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); - } else { - return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030B00A4 - return Py_Version & ~0xFFUL; -#else - const char* rt_version = Py_GetVersion(); - unsigned long version = 0; - unsigned long factor = 0x01000000UL; - unsigned int digit = 0; - int i = 0; - while (factor) { - while ('0' <= rt_version[i] && rt_version[i] <= '9') { - digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); - ++i; - } - version += factor * digit; - if (rt_version[i] != '.') - break; - digit = 0; - factor >>= 8; - ++i; - } - return version; -#endif -} -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { - const unsigned long MAJOR_MINOR = 0xFFFF0000UL; - if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) - return 0; - if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) - return 1; - { - char message[200]; - PyOS_snprintf(message, sizeof(message), - "compile time Python version %d.%d " - "of module '%.100s' " - "%s " - "runtime version %d.%d", - (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), - __Pyx_MODULE_NAME, - (allow_newer) ? "was newer than" : "does not match", - (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) - ); - return PyErr_WarnEx(NULL, message, 1); - } -} - -/* InitStrings */ -#if PY_MAJOR_VERSION >= 3 -static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { - if (t.is_unicode | t.is_str) { - if (t.intern) { - *str = PyUnicode_InternFromString(t.s); - } else if (t.encoding) { - *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); - } else { - *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); - } - } else { - *str = PyBytes_FromStringAndSize(t.s, t.n - 1); - } - if (!*str) - return -1; - if (PyObject_Hash(*str) == -1) - return -1; - return 0; -} -#endif -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { - while (t->p) { - #if PY_MAJOR_VERSION >= 3 - __Pyx_InitString(*t, t->p); - #else - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - #endif - ++t; - } - return 0; -} - -#include -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { - size_t len = strlen(s); - if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { - PyErr_SetString(PyExc_OverflowError, "byte string is too long"); - return -1; - } - return (Py_ssize_t) len; -} -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return __Pyx_PyUnicode_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return PyByteArray_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { - __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " - "The ability to return an instance of a strict subclass of int is deprecated, " - "and may be removed in a future version of Python.", - result_type_name)) { - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; - } - __Pyx_DECREF_TypeName(result_type_name); - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", - type_name, type_name, result_type_name); - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - if (likely(__Pyx_PyLong_IsCompact(b))) { - return __Pyx_PyLong_CompactValue(b); - } else { - const digit* digits = __Pyx_PyLong_Digits(b); - const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { - if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { - return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); -#if PY_MAJOR_VERSION < 3 - } else if (likely(PyInt_CheckExact(o))) { - return PyInt_AS_LONG(o); -#endif - } else { - Py_ssize_t ival; - PyObject *x; - x = PyNumber_Index(o); - if (!x) return -1; - ival = PyInt_AsLong(x); - Py_DECREF(x); - return ival; - } -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -/* #### Code section: utility_code_pragmas_end ### */ -#ifdef _MSC_VER -#pragma warning( pop ) -#endif - - - -/* #### Code section: end ### */ -#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/engine/_row_cy.c b/lib/sqlalchemy/engine/_row_cy.c deleted file mode 100644 index 1690f68a817..00000000000 --- a/lib/sqlalchemy/engine/_row_cy.c +++ /dev/null @@ -1,11171 +0,0 @@ -/* Generated by Cython 3.0.11 */ - -/* BEGIN: Cython Metadata -{ - "distutils": { - "name": "sqlalchemy.engine._row_cy", - "sources": [ - "lib/sqlalchemy/engine/_row_cy.py" - ] - }, - "module_name": "sqlalchemy.engine._row_cy" -} -END: Cython Metadata */ - -#ifndef PY_SSIZE_T_CLEAN -#define PY_SSIZE_T_CLEAN -#endif /* PY_SSIZE_T_CLEAN */ -#if defined(CYTHON_LIMITED_API) && 0 - #ifndef Py_LIMITED_API - #if CYTHON_LIMITED_API+0 > 0x03030000 - #define Py_LIMITED_API CYTHON_LIMITED_API - #else - #define Py_LIMITED_API 0x03030000 - #endif - #endif -#endif - -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.7+ or Python 3.3+. -#else -#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API -#define __PYX_EXTRA_ABI_MODULE_NAME "limited" -#else -#define __PYX_EXTRA_ABI_MODULE_NAME "" -#endif -#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME -#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI -#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x03000BF0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #define HAVE_LONG_LONG -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX -#if defined(GRAALVM_PYTHON) - /* For very preliminary testing purposes. Most variables are set the same as PyPy. - The existence of this section does not imply that anything works or is even tested */ - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 1 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(PYPY_VERSION) - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #if PY_VERSION_HEX < 0x03090000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(CYTHON_LIMITED_API) - #ifdef Py_LIMITED_API - #undef __PYX_LIMITED_VERSION_HEX - #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API - #endif - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 1 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_CLINE_IN_TRACEBACK - #define CYTHON_CLINE_IN_TRACEBACK 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 1 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #endif - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 1 - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #ifndef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 1 - #endif - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 - #endif -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #ifndef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) - #endif - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #if PY_VERSION_HEX < 0x030400a1 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #elif !defined(CYTHON_USE_TP_FINALIZE) - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #if PY_VERSION_HEX < 0x030600B1 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #elif !defined(CYTHON_USE_DICT_VERSIONS) - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) - #endif - #if PY_VERSION_HEX < 0x030700A3 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #elif !defined(CYTHON_USE_EXC_INFO_STACK) - #define CYTHON_USE_EXC_INFO_STACK 1 - #endif - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 1 - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if !defined(CYTHON_VECTORCALL) -#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) -#endif -#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_MAJOR_VERSION < 3 - #include "longintrepr.h" - #endif - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(maybe_unused) - #define CYTHON_UNUSED [[maybe_unused]] - #endif - #endif - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR - #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_USE_CPP_STD_MOVE - #if defined(__cplusplus) && (\ - __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) - #define CYTHON_USE_CPP_STD_MOVE 1 - #else - #define CYTHON_USE_CPP_STD_MOVE 0 - #endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned short uint16_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - #endif - #endif - #if _MSC_VER < 1300 - #ifdef _WIN64 - typedef unsigned long long __pyx_uintptr_t; - #else - typedef unsigned int __pyx_uintptr_t; - #endif - #else - #ifdef _WIN64 - typedef unsigned __int64 __pyx_uintptr_t; - #else - typedef unsigned __int32 __pyx_uintptr_t; - #endif - #endif -#else - #include - typedef uintptr_t __pyx_uintptr_t; -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif -#ifdef __cplusplus - template - struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; - #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) -#else - #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) -#endif -#if CYTHON_COMPILING_IN_PYPY == 1 - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) -#else - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) -#endif -#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_DefaultClassType PyClass_Type - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" - #define __Pyx_DefaultClassType PyType_Type -#if CYTHON_COMPILING_IN_LIMITED_API - static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyObject *exception_table = NULL; - PyObject *types_module=NULL, *code_type=NULL, *result=NULL; - #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; - PyObject *py_minor_version = NULL; - #endif - long minor_version = 0; - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); - #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; - #else - if (!(version_info = PySys_GetObject("version_info"))) goto end; - if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; - minor_version = PyLong_AsLong(py_minor_version); - Py_DECREF(py_minor_version); - if (minor_version == -1 && PyErr_Occurred()) goto end; - #endif - if (!(types_module = PyImport_ImportModule("types"))) goto end; - if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; - if (minor_version <= 7) { - (void)p; - result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else if (minor_version <= 10) { - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else { - if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); - } - end: - Py_XDECREF(code_type); - Py_XDECREF(exception_table); - Py_XDECREF(types_module); - if (type) { - PyErr_Restore(type, value, traceback); - } - return result; - } - #ifndef CO_OPTIMIZED - #define CO_OPTIMIZED 0x0001 - #endif - #ifndef CO_NEWLOCALS - #define CO_NEWLOCALS 0x0002 - #endif - #ifndef CO_VARARGS - #define CO_VARARGS 0x0004 - #endif - #ifndef CO_VARKEYWORDS - #define CO_VARKEYWORDS 0x0008 - #endif - #ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x0200 - #endif - #ifndef CO_GENERATOR - #define CO_GENERATOR 0x0020 - #endif - #ifndef CO_COROUTINE - #define CO_COROUTINE 0x0080 - #endif -#elif PY_VERSION_HEX >= 0x030B0000 - static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); - if (!empty_bytes) return NULL; - result = - #if PY_VERSION_HEX >= 0x030C0000 - PyUnstable_Code_NewWithPosOnlyArgs - #else - PyCode_NewWithPosOnlyArgs - #endif - (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); - Py_DECREF(empty_bytes); - return result; - } -#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif -#endif -#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) - #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) -#else - #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) - #define __Pyx_Py_Is(x, y) Py_Is(x, y) -#else - #define __Pyx_Py_Is(x, y) ((x) == (y)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) - #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) -#else - #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) - #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) -#else - #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) - #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) -#else - #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) -#endif -#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) -#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) -#else - #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) -#endif -#ifndef CO_COROUTINE - #define CO_COROUTINE 0x80 -#endif -#ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x200 -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef Py_TPFLAGS_SEQUENCE - #define Py_TPFLAGS_SEQUENCE 0 -#endif -#ifndef Py_TPFLAGS_MAPPING - #define Py_TPFLAGS_MAPPING 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #if PY_VERSION_HEX >= 0x030d00A4 - # define __Pyx_PyCFunctionFast PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords - #else - # define __Pyx_PyCFunctionFast _PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords - #endif -#endif -#if CYTHON_METH_FASTCALL - #define __Pyx_METH_FASTCALL METH_FASTCALL - #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast - #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords -#else - #define __Pyx_METH_FASTCALL METH_VARARGS - #define __Pyx_PyCFunction_FastCall PyCFunction - #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords -#endif -#if CYTHON_VECTORCALL - #define __pyx_vectorcallfunc vectorcallfunc - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET - #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) -#elif CYTHON_BACKPORT_VECTORCALL - typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, - size_t nargsf, PyObject *kwnames); - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) -#else - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) -#endif -#if PY_MAJOR_VERSION >= 0x030900B1 -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) -#else -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) -#endif -#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) -#elif !CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) -#endif -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) -static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { - return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; -} -#endif -static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { -#if CYTHON_COMPILING_IN_LIMITED_API - return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; -#else - return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -#endif -} -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) -#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) - typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); -#else - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) - #define __Pyx_PyCMethod PyCMethod -#endif -#ifndef METH_METHOD - #define METH_METHOD 0x200 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyThreadState_Current PyThreadState_Get() -#elif !CYTHON_FAST_THREAD_STATE - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) -{ - void *result; - result = PyModule_GetState(op); - if (!result) - Py_FatalError("Couldn't find the module state"); - return result; -} -#endif -#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) -#else - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if PY_MAJOR_VERSION < 3 - #if CYTHON_COMPILING_IN_PYPY - #if PYPY_VERSION_NUM < 0x07030600 - #if defined(__cplusplus) && __cplusplus >= 201402L - [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] - #elif defined(__GNUC__) || defined(__clang__) - __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) - #elif defined(_MSC_VER) - __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) - #endif - static CYTHON_INLINE int PyGILState_Check(void) { - return 0; - } - #else // PYPY_VERSION_NUM < 0x07030600 - #endif // PYPY_VERSION_NUM < 0x07030600 - #else - static CYTHON_INLINE int PyGILState_Check(void) { - PyThreadState * tstate = _PyThreadState_Current; - return tstate && (tstate == PyGILState_GetThisThreadState()); - } - #endif -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { - PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); - if (res == NULL) PyErr_Clear(); - return res; -} -#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) -#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#else -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { -#if CYTHON_COMPILING_IN_PYPY - return PyDict_GetItem(dict, name); -#else - PyDictEntry *ep; - PyDictObject *mp = (PyDictObject*) dict; - long hash = ((PyStringObject *) name)->ob_shash; - assert(hash != -1); - ep = (mp->ma_lookup)(mp, name, hash); - if (ep == NULL) { - return NULL; - } - return ep->me_value; -#endif -} -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#endif -#if CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) - #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) - #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) -#else - #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) - #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) - #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) -#else - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) -#endif -#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 -#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE((PyObject*)obj);\ - assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ - PyObject_GC_Del(obj);\ - Py_DECREF(type);\ -} -#else -#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) - #define __Pyx_PyUnicode_DATA(u) ((void*)u) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) -#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_READY(op) (0) - #else - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #endif - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #else - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #endif - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #if !defined(PyUnicode_DecodeUnicodeEscape) - #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) - #endif - #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) - #undef PyUnicode_Contains - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) - #endif - #if !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) - #endif - #if !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) - #endif -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#if CYTHON_COMPILING_IN_CPYTHON - #define __Pyx_PySequence_ListKeepNew(obj)\ - (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) -#else - #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) -#else - #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) - #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) -#endif -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) -#else - static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { - PyObject *module = PyImport_AddModule(name); - Py_XINCREF(module); - return module; - } -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define __Pyx_Py3Int_Check(op) PyLong_Check(op) - #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#else - #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) - #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) - #if !defined(_USE_MATH_DEFINES) - #define _USE_MATH_DEFINES - #endif -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifdef CYTHON_EXTERN_C - #undef __PYX_EXTERN_C - #define __PYX_EXTERN_C CYTHON_EXTERN_C -#elif defined(__PYX_EXTERN_C) - #ifdef _MSC_VER - #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") - #else - #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. - #endif -#else - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__sqlalchemy__engine___row_cy -#define __PYX_HAVE_API__sqlalchemy__engine___row_cy -/* Early includes */ -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_VERSION_HEX >= 0x030C00A7 - #ifndef _PyLong_SIGN_MASK - #define _PyLong_SIGN_MASK 3 - #endif - #ifndef _PyLong_NON_SIZE_BITS - #define _PyLong_NON_SIZE_BITS 3 - #endif - #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) - #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) - #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) - #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) - #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_SignedDigitCount(x)\ - ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) - #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) - #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) - #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) - #else - #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) - #endif - typedef Py_ssize_t __Pyx_compact_pylong; - typedef size_t __Pyx_compact_upylong; - #else - #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) - #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) - #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) - #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) - #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) - #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) - #define __Pyx_PyLong_CompactValue(x)\ - ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) - typedef sdigit __Pyx_compact_pylong; - typedef digit __Pyx_compact_upylong; - #endif - #if PY_VERSION_HEX >= 0x030C00A5 - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) - #else - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) - #endif -#endif -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -#include -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = (char) c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#include -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -#if !CYTHON_USE_MODULE_STATE -static PyObject *__pyx_m = NULL; -#endif -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm = __FILE__; -static const char *__pyx_filename; - -/* #### Code section: filename_table ### */ - -static const char *__pyx_f[] = { - "lib/sqlalchemy/engine/_row_cy.py", -}; -/* #### Code section: utility_code_proto_before_types ### */ -/* ForceInitThreads.proto */ -#ifndef __PYX_FORCE_INIT_THREADS - #define __PYX_FORCE_INIT_THREADS 0 -#endif - -/* #### Code section: numeric_typedefs ### */ -/* #### Code section: complex_type_declarations ### */ -/* #### Code section: type_declarations ### */ - -/*--- Type declarations ---*/ -struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow; - -/* "sqlalchemy/engine/_row_cy.py":45 - * - * @cython.cclass - * class BaseRow: # <<<<<<<<<<<<<< - * __slots__ = ("_parent", "_data", "_key_to_index") - * - */ -struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow { - PyObject_HEAD - struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_vtab; - PyObject *_parent; - PyObject *_key_to_index; - PyObject *_data; -}; - - - -struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow { - PyObject *(*_set_attrs)(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, PyObject *, PyObject *); - PyObject *(*_get_by_key_impl)(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, int); -}; -static struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_vtabptr_10sqlalchemy_6engine_7_row_cy_BaseRow; -static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, PyObject *, PyObject *); -/* #### Code section: utility_code_proto ### */ - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, Py_ssize_t); - void (*DECREF)(void*, PyObject*, Py_ssize_t); - void (*GOTREF)(void*, PyObject*, Py_ssize_t); - void (*GIVEREF)(void*, PyObject*, Py_ssize_t); - void* (*SetupContext)(const char*, Py_ssize_t, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - } - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) - #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() -#endif - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContextNogil() - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_Py_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; Py_XDECREF(tmp);\ - } while (0) -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#if PY_VERSION_HEX >= 0x030C00A6 -#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) -#else -#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) -#endif -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) -#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* TupleAndListFromArray.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); -static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); -#endif - -/* IncludeStringH.proto */ -#include - -/* BytesEquals.proto */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); - -/* UnicodeEquals.proto */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); - -/* fastcall.proto */ -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) -#elif CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) -#else - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) -#endif -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) - #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) -#else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg - #define __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) -#define __Pyx_KwValues_VARARGS(args, nargs) NULL -#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) -#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) -#if CYTHON_METH_FASTCALL - #define __Pyx_Arg_FASTCALL(args, i) args[i] - #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) - #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) - static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); - #else - #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) - #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs - to have the same reference counting */ - #define __Pyx_Arg_XDECREF_FASTCALL(arg) -#else - #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS - #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS - #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS - #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS - #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS - #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) - #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) -#else -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) -#endif - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, - const char* function_name); - -/* ArgTypeTest.proto */ -#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ - ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ - __Pyx__ArgTypeTest(obj, type, name, exact)) -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); - -/* KeywordStringCheck.proto */ -static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) do {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#if !CYTHON_VECTORCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif -#if !CYTHON_VECTORCALL -#if PY_VERSION_HEX >= 0x03080000 - #include "frameobject.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif - #define __Pxy_PyFrame_Initialize_Offsets() - #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) -#else - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif -#endif -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectFastCall.proto */ -#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); - -/* DictGetItem.proto */ -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY -static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); -#define __Pyx_PyObject_Dict_GetItem(obj, name)\ - (likely(PyDict_CheckExact(obj)) ?\ - __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) -#else -#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) -#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) -#endif - -/* RaiseUnexpectedTypeError.proto */ -static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* ObjectGetItem.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject *key); -#else -#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) -#endif - -/* dict_getitem_default.proto */ -static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); - -/* UnpackUnboundCMethod.proto */ -typedef struct { - PyObject *type; - PyObject **method_name; - PyCFunction func; - PyObject *method; - int flag; -} __Pyx_CachedCFunction; - -/* CallUnboundCMethod1.proto */ -static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); -#else -#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) -#endif - -/* CallUnboundCMethod2.proto */ -static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 -static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); -#else -#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2) -#endif - -/* AssertionsEnabled.proto */ -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define __Pyx_init_assertions_enabled() (0) - #define __pyx_assertions_enabled() (1) -#elif CYTHON_COMPILING_IN_LIMITED_API || (CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030C0000) - static int __pyx_assertions_enabled_flag; - #define __pyx_assertions_enabled() (__pyx_assertions_enabled_flag) - static int __Pyx_init_assertions_enabled(void) { - PyObject *builtins, *debug, *debug_str; - int flag; - builtins = PyEval_GetBuiltins(); - if (!builtins) goto bad; - debug_str = PyUnicode_FromStringAndSize("__debug__", 9); - if (!debug_str) goto bad; - debug = PyObject_GetItem(builtins, debug_str); - Py_DECREF(debug_str); - if (!debug) goto bad; - flag = PyObject_IsTrue(debug); - Py_DECREF(debug); - if (flag == -1) goto bad; - __pyx_assertions_enabled_flag = flag; - return 0; - bad: - __pyx_assertions_enabled_flag = 1; - return -1; - } -#else - #define __Pyx_init_assertions_enabled() (0) - #define __pyx_assertions_enabled() (!Py_OptimizeFlag) -#endif - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* SetItemInt.proto */ -#define __Pyx_SetItemInt(o, i, v, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_SetItemInt_Fast(o, (Py_ssize_t)i, v, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) :\ - __Pyx_SetItemInt_Generic(o, to_py_func(i), v))) -static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v); -static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v, - int is_list, int wraparound, int boundscheck); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* IncludeStructmemberH.proto */ -#include - -/* FixUpExtensionType.proto */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); -#endif - -/* PyObjectCallNoArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod0.proto */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); - -/* ValidateBasesTuple.proto */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); -#endif - -/* PyType_Ready.proto */ -CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); - -/* SetVTable.proto */ -static int __Pyx_SetVtable(PyTypeObject* typeptr , void* vtable); - -/* GetVTable.proto */ -static void* __Pyx_GetVtable(PyTypeObject *type); - -/* MergeVTables.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_MergeVtables(PyTypeObject *type); -#endif - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* FetchSharedCythonModule.proto */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void); - -/* FetchCommonType.proto */ -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); -#else -static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); -#endif - -/* PyMethodNew.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - typesModule = PyImport_ImportModule("types"); - if (!typesModule) return NULL; - methodType = PyObject_GetAttrString(typesModule, "MethodType"); - Py_DECREF(typesModule); - if (!methodType) return NULL; - result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); - Py_DECREF(methodType); - return result; -} -#elif PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - return PyMethod_New(func, self); -} -#else - #define __Pyx_PyMethod_New PyMethod_New -#endif - -/* PyVectorcallFastCallDict.proto */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); -#endif - -/* CythonFunctionShared.proto */ -#define __Pyx_CyFunction_USED -#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 -#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 -#define __Pyx_CYFUNCTION_CCLASS 0x04 -#define __Pyx_CYFUNCTION_COROUTINE 0x08 -#define __Pyx_CyFunction_GetClosure(f)\ - (((__pyx_CyFunctionObject *) (f))->func_closure) -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_CyFunction_GetClassObj(f)\ - (((__pyx_CyFunctionObject *) (f))->func_classobj) -#else - #define __Pyx_CyFunction_GetClassObj(f)\ - ((PyObject*) ((PyCMethodObject *) (f))->mm_class) -#endif -#define __Pyx_CyFunction_SetClassObj(f, classobj)\ - __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) -#define __Pyx_CyFunction_Defaults(type, f)\ - ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) -#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ - ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) -typedef struct { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject_HEAD - PyObject *func; -#elif PY_VERSION_HEX < 0x030900B1 - PyCFunctionObject func; -#else - PyCMethodObject func; -#endif -#if CYTHON_BACKPORT_VECTORCALL - __pyx_vectorcallfunc func_vectorcall; -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_weakreflist; -#endif - PyObject *func_dict; - PyObject *func_name; - PyObject *func_qualname; - PyObject *func_doc; - PyObject *func_globals; - PyObject *func_code; - PyObject *func_closure; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_classobj; -#endif - void *defaults; - int defaults_pyobjects; - size_t defaults_size; - int flags; - PyObject *defaults_tuple; - PyObject *defaults_kwdict; - PyObject *(*defaults_getter)(PyObject *); - PyObject *func_annotations; - PyObject *func_is_coroutine; -} __pyx_CyFunctionObject; -#undef __Pyx_CyOrPyCFunction_Check -#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) -#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) -#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); -#undef __Pyx_IsSameCFunction -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, - size_t size, - int pyobjects); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, - PyObject *tuple); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, - PyObject *dict); -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, - PyObject *dict); -static int __pyx_CyFunction_init(PyObject *module); -#if CYTHON_METH_FASTCALL -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -#if CYTHON_BACKPORT_VECTORCALL -#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) -#else -#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) -#endif -#endif - -/* CythonFunction.proto */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); -#endif - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* FormatTypeName.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -typedef PyObject *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%U" -static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); -#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) -#else -typedef const char *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%.200s" -#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) -#define __Pyx_DECREF_TypeName(obj) -#endif - -/* GCCDiagnostics.proto */ -#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) -#define __Pyx_HAS_GCC_DIAGNOSTIC -#endif - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static unsigned long __Pyx_get_runtime_version(void); -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -/* #### Code section: module_declarations ### */ -static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_key_to_index, PyObject *__pyx_v_data); /* proto*/ -static PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__get_by_key_impl(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key, int __pyx_v_attr_err); /* proto*/ - -/* Module declarations from "cython" */ - -/* Module declarations from "sqlalchemy.engine._row_cy" */ -static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy__apply_processors(PyObject *, PyObject *); /*proto*/ -/* #### Code section: typeinfo ### */ -/* #### Code section: before_global_var ### */ -#define __Pyx_MODULE_NAME "sqlalchemy.engine._row_cy" -extern int __pyx_module_is_main_sqlalchemy__engine___row_cy; -int __pyx_module_is_main_sqlalchemy__engine___row_cy = 0; - -/* Implementation of "sqlalchemy.engine._row_cy" */ -/* #### Code section: global_var ### */ -static PyObject *__pyx_builtin_AssertionError; -static PyObject *__pyx_builtin_range; -/* #### Code section: string_decls ### */ -static const char __pyx_k_[] = "."; -static const char __pyx_k_gc[] = "gc"; -static const char __pyx_k_Any[] = "Any"; -static const char __pyx_k__15[] = "?"; -static const char __pyx_k_cls[] = "cls"; -static const char __pyx_k_get[] = "get"; -static const char __pyx_k_key[] = "key"; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_obj[] = "obj"; -static const char __pyx_k_Dict[] = "Dict"; -static const char __pyx_k_List[] = "List"; -static const char __pyx_k_None[] = "None"; -static const char __pyx_k_Type[] = "Type"; -static const char __pyx_k_bool[] = "bool"; -static const char __pyx_k_data[] = "data"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_self[] = "self"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_Tuple[] = "Tuple"; -static const char __pyx_k_class[] = "__class__"; -static const char __pyx_k_range[] = "range"; -static const char __pyx_k_slots[] = "__slots__"; -static const char __pyx_k_state[] = "state"; -static const char __pyx_k_data_2[] = "_data"; -static const char __pyx_k_enable[] = "enable"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_parent[] = "parent"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_result[] = "result"; -static const char __pyx_k_return[] = "return"; -static const char __pyx_k_typing[] = "typing"; -static const char __pyx_k_BaseRow[] = "BaseRow"; -static const char __pyx_k_KeyType[] = "_KeyType"; -static const char __pyx_k_disable[] = "disable"; -static const char __pyx_k_Iterator[] = "Iterator"; -static const char __pyx_k_List_Any[] = "List[Any]"; -static const char __pyx_k_Optional[] = "Optional"; -static const char __pyx_k_Sequence[] = "Sequence"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_parent_2[] = "_parent"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_Tuple_Any[] = "Tuple[Any, ...]"; -static const char __pyx_k_isenabled[] = "isenabled"; -static const char __pyx_k_processors[] = "processors"; -static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; -static const char __pyx_k_is_compiled[] = "_is_compiled"; -static const char __pyx_k_values_impl[] = "_values_impl"; -static const char __pyx_k_Dict_str_Any[] = "Dict[str, Any]"; -static const char __pyx_k_Type_BaseRow[] = "Type[BaseRow]"; -static const char __pyx_k_is_coroutine[] = "_is_coroutine"; -static const char __pyx_k_key_to_index[] = "key_to_index"; -static const char __pyx_k_TYPE_CHECKING[] = "TYPE_CHECKING"; -static const char __pyx_k_Tuple_Any_Any[] = "Tuple[Any, Any]"; -static const char __pyx_k_class_getitem[] = "__class_getitem__"; -static const char __pyx_k_key_not_found[] = "_key_not_found"; -static const char __pyx_k_AssertionError[] = "AssertionError"; -static const char __pyx_k_ProcessorsType[] = "_ProcessorsType"; -static const char __pyx_k_ResultMetaData[] = "ResultMetaData"; -static const char __pyx_k_key_to_index_2[] = "_key_to_index"; -static const char __pyx_k_BaseRow___reduce[] = "BaseRow.__reduce__"; -static const char __pyx_k_to_tuple_instance[] = "_to_tuple_instance"; -static const char __pyx_k_BaseRow___getstate[] = "BaseRow.__getstate__"; -static const char __pyx_k_BaseRow___setstate[] = "BaseRow.__setstate__"; -static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_BaseRow__values_impl[] = "BaseRow._values_impl"; -static const char __pyx_k_rowproxy_reconstructor[] = "rowproxy_reconstructor"; -static const char __pyx_k_get_by_key_impl_mapping[] = "_get_by_key_impl_mapping"; -static const char __pyx_k_sqlalchemy_engine__row_cy[] = "sqlalchemy.engine._row_cy"; -static const char __pyx_k_BaseRow__to_tuple_instance[] = "BaseRow._to_tuple_instance"; -static const char __pyx_k_BaseRow__get_by_key_impl_mapping[] = "BaseRow._get_by_key_impl_mapping"; -static const char __pyx_k_lib_sqlalchemy_engine__row_cy_py[] = "lib/sqlalchemy/engine/_row_cy.py"; -/* #### Code section: decls ### */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ -static int __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_processors, PyObject *__pyx_v_key_to_index, PyObject *__pyx_v_data); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_2__reduce__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_4__getstate__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_6__setstate__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_state); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_8_values_impl(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_10__iter__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static Py_ssize_t __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_12__len__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static Py_hash_t __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_14__hash__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_16__getitem__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_18_get_by_key_impl_mapping(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_20__getattr__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_name); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_22_to_tuple_instance(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_2rowproxy_reconstructor(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_cls, PyObject *__pyx_v_state); /* proto */ -static PyObject *__pyx_tp_new_10sqlalchemy_6engine_7_row_cy_BaseRow(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_get = {0, 0, 0, 0, 0}; -/* #### Code section: late_includes ### */ -/* #### Code section: module_state ### */ -typedef struct { - PyObject *__pyx_d; - PyObject *__pyx_b; - PyObject *__pyx_cython_runtime; - PyObject *__pyx_empty_tuple; - PyObject *__pyx_empty_bytes; - PyObject *__pyx_empty_unicode; - #ifdef __Pyx_CyFunction_USED - PyTypeObject *__pyx_CyFunctionType; - #endif - #ifdef __Pyx_FusedFunction_USED - PyTypeObject *__pyx_FusedFunctionType; - #endif - #ifdef __Pyx_Generator_USED - PyTypeObject *__pyx_GeneratorType; - #endif - #ifdef __Pyx_IterableCoroutine_USED - PyTypeObject *__pyx_IterableCoroutineType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineAwaitType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineType; - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - PyObject *__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow; - #endif - PyTypeObject *__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow; - PyObject *__pyx_kp_u_; - PyObject *__pyx_n_s_Any; - PyObject *__pyx_n_s_AssertionError; - PyObject *__pyx_n_s_BaseRow; - PyObject *__pyx_n_s_BaseRow___getstate; - PyObject *__pyx_n_s_BaseRow___reduce; - PyObject *__pyx_n_s_BaseRow___setstate; - PyObject *__pyx_n_s_BaseRow__get_by_key_impl_mapping; - PyObject *__pyx_n_s_BaseRow__to_tuple_instance; - PyObject *__pyx_n_s_BaseRow__values_impl; - PyObject *__pyx_n_s_Dict; - PyObject *__pyx_kp_s_Dict_str_Any; - PyObject *__pyx_n_s_Iterator; - PyObject *__pyx_n_s_KeyType; - PyObject *__pyx_n_s_List; - PyObject *__pyx_kp_s_List_Any; - PyObject *__pyx_n_s_None; - PyObject *__pyx_n_s_Optional; - PyObject *__pyx_n_s_ProcessorsType; - PyObject *__pyx_n_s_ResultMetaData; - PyObject *__pyx_n_s_Sequence; - PyObject *__pyx_n_s_TYPE_CHECKING; - PyObject *__pyx_n_s_Tuple; - PyObject *__pyx_kp_s_Tuple_Any; - PyObject *__pyx_kp_s_Tuple_Any_Any; - PyObject *__pyx_n_s_Type; - PyObject *__pyx_kp_s_Type_BaseRow; - PyObject *__pyx_n_s__15; - PyObject *__pyx_n_s_asyncio_coroutines; - PyObject *__pyx_n_s_bool; - PyObject *__pyx_n_s_class; - PyObject *__pyx_n_s_class_getitem; - PyObject *__pyx_n_s_cline_in_traceback; - PyObject *__pyx_n_s_cls; - PyObject *__pyx_n_s_data; - PyObject *__pyx_n_u_data_2; - PyObject *__pyx_kp_u_disable; - PyObject *__pyx_kp_u_enable; - PyObject *__pyx_kp_u_gc; - PyObject *__pyx_n_s_get; - PyObject *__pyx_n_s_get_by_key_impl_mapping; - PyObject *__pyx_n_s_getstate; - PyObject *__pyx_n_s_import; - PyObject *__pyx_n_s_is_compiled; - PyObject *__pyx_n_s_is_coroutine; - PyObject *__pyx_kp_u_isenabled; - PyObject *__pyx_n_s_key; - PyObject *__pyx_n_s_key_not_found; - PyObject *__pyx_n_s_key_to_index; - PyObject *__pyx_n_s_key_to_index_2; - PyObject *__pyx_n_u_key_to_index_2; - PyObject *__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py; - PyObject *__pyx_n_s_main; - PyObject *__pyx_n_s_name; - PyObject *__pyx_n_s_new; - PyObject *__pyx_n_s_obj; - PyObject *__pyx_n_s_parent; - PyObject *__pyx_n_u_parent_2; - PyObject *__pyx_n_s_processors; - PyObject *__pyx_n_s_pyx_vtable; - PyObject *__pyx_n_s_range; - PyObject *__pyx_n_s_reduce; - PyObject *__pyx_n_s_result; - PyObject *__pyx_n_s_return; - PyObject *__pyx_n_s_rowproxy_reconstructor; - PyObject *__pyx_n_s_self; - PyObject *__pyx_n_s_setstate; - PyObject *__pyx_n_s_slots; - PyObject *__pyx_n_s_sqlalchemy_engine__row_cy; - PyObject *__pyx_n_s_state; - PyObject *__pyx_n_s_test; - PyObject *__pyx_n_s_to_tuple_instance; - PyObject *__pyx_n_s_typing; - PyObject *__pyx_n_s_values_impl; - PyObject *__pyx_tuple__3; - PyObject *__pyx_tuple__4; - PyObject *__pyx_tuple__7; - PyObject *__pyx_tuple__10; - PyObject *__pyx_tuple__13; - PyObject *__pyx_codeobj__2; - PyObject *__pyx_codeobj__5; - PyObject *__pyx_codeobj__6; - PyObject *__pyx_codeobj__8; - PyObject *__pyx_codeobj__9; - PyObject *__pyx_codeobj__11; - PyObject *__pyx_codeobj__12; - PyObject *__pyx_codeobj__14; -} __pyx_mstate; - -#if CYTHON_USE_MODULE_STATE -#ifdef __cplusplus -namespace { - extern struct PyModuleDef __pyx_moduledef; -} /* anonymous namespace */ -#else -static struct PyModuleDef __pyx_moduledef; -#endif - -#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) - -#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) - -#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) -#else -static __pyx_mstate __pyx_mstate_global_static = -#ifdef __cplusplus - {}; -#else - {0}; -#endif -static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; -#endif -/* #### Code section: module_state_clear ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_clear(PyObject *m) { - __pyx_mstate *clear_module_state = __pyx_mstate(m); - if (!clear_module_state) return 0; - Py_CLEAR(clear_module_state->__pyx_d); - Py_CLEAR(clear_module_state->__pyx_b); - Py_CLEAR(clear_module_state->__pyx_cython_runtime); - Py_CLEAR(clear_module_state->__pyx_empty_tuple); - Py_CLEAR(clear_module_state->__pyx_empty_bytes); - Py_CLEAR(clear_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_CLEAR(clear_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); - #endif - Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow); - Py_CLEAR(clear_module_state->__pyx_kp_u_); - Py_CLEAR(clear_module_state->__pyx_n_s_Any); - Py_CLEAR(clear_module_state->__pyx_n_s_AssertionError); - Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow); - Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow___getstate); - Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow___reduce); - Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow___setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow__get_by_key_impl_mapping); - Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow__to_tuple_instance); - Py_CLEAR(clear_module_state->__pyx_n_s_BaseRow__values_impl); - Py_CLEAR(clear_module_state->__pyx_n_s_Dict); - Py_CLEAR(clear_module_state->__pyx_kp_s_Dict_str_Any); - Py_CLEAR(clear_module_state->__pyx_n_s_Iterator); - Py_CLEAR(clear_module_state->__pyx_n_s_KeyType); - Py_CLEAR(clear_module_state->__pyx_n_s_List); - Py_CLEAR(clear_module_state->__pyx_kp_s_List_Any); - Py_CLEAR(clear_module_state->__pyx_n_s_None); - Py_CLEAR(clear_module_state->__pyx_n_s_Optional); - Py_CLEAR(clear_module_state->__pyx_n_s_ProcessorsType); - Py_CLEAR(clear_module_state->__pyx_n_s_ResultMetaData); - Py_CLEAR(clear_module_state->__pyx_n_s_Sequence); - Py_CLEAR(clear_module_state->__pyx_n_s_TYPE_CHECKING); - Py_CLEAR(clear_module_state->__pyx_n_s_Tuple); - Py_CLEAR(clear_module_state->__pyx_kp_s_Tuple_Any); - Py_CLEAR(clear_module_state->__pyx_kp_s_Tuple_Any_Any); - Py_CLEAR(clear_module_state->__pyx_n_s_Type); - Py_CLEAR(clear_module_state->__pyx_kp_s_Type_BaseRow); - Py_CLEAR(clear_module_state->__pyx_n_s__15); - Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); - Py_CLEAR(clear_module_state->__pyx_n_s_bool); - Py_CLEAR(clear_module_state->__pyx_n_s_class); - Py_CLEAR(clear_module_state->__pyx_n_s_class_getitem); - Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); - Py_CLEAR(clear_module_state->__pyx_n_s_cls); - Py_CLEAR(clear_module_state->__pyx_n_s_data); - Py_CLEAR(clear_module_state->__pyx_n_u_data_2); - Py_CLEAR(clear_module_state->__pyx_kp_u_disable); - Py_CLEAR(clear_module_state->__pyx_kp_u_enable); - Py_CLEAR(clear_module_state->__pyx_kp_u_gc); - Py_CLEAR(clear_module_state->__pyx_n_s_get); - Py_CLEAR(clear_module_state->__pyx_n_s_get_by_key_impl_mapping); - Py_CLEAR(clear_module_state->__pyx_n_s_getstate); - Py_CLEAR(clear_module_state->__pyx_n_s_import); - Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); - Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); - Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); - Py_CLEAR(clear_module_state->__pyx_n_s_key); - Py_CLEAR(clear_module_state->__pyx_n_s_key_not_found); - Py_CLEAR(clear_module_state->__pyx_n_s_key_to_index); - Py_CLEAR(clear_module_state->__pyx_n_s_key_to_index_2); - Py_CLEAR(clear_module_state->__pyx_n_u_key_to_index_2); - Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py); - Py_CLEAR(clear_module_state->__pyx_n_s_main); - Py_CLEAR(clear_module_state->__pyx_n_s_name); - Py_CLEAR(clear_module_state->__pyx_n_s_new); - Py_CLEAR(clear_module_state->__pyx_n_s_obj); - Py_CLEAR(clear_module_state->__pyx_n_s_parent); - Py_CLEAR(clear_module_state->__pyx_n_u_parent_2); - Py_CLEAR(clear_module_state->__pyx_n_s_processors); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_vtable); - Py_CLEAR(clear_module_state->__pyx_n_s_range); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce); - Py_CLEAR(clear_module_state->__pyx_n_s_result); - Py_CLEAR(clear_module_state->__pyx_n_s_return); - Py_CLEAR(clear_module_state->__pyx_n_s_rowproxy_reconstructor); - Py_CLEAR(clear_module_state->__pyx_n_s_self); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_slots); - Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_engine__row_cy); - Py_CLEAR(clear_module_state->__pyx_n_s_state); - Py_CLEAR(clear_module_state->__pyx_n_s_test); - Py_CLEAR(clear_module_state->__pyx_n_s_to_tuple_instance); - Py_CLEAR(clear_module_state->__pyx_n_s_typing); - Py_CLEAR(clear_module_state->__pyx_n_s_values_impl); - Py_CLEAR(clear_module_state->__pyx_tuple__3); - Py_CLEAR(clear_module_state->__pyx_tuple__4); - Py_CLEAR(clear_module_state->__pyx_tuple__7); - Py_CLEAR(clear_module_state->__pyx_tuple__10); - Py_CLEAR(clear_module_state->__pyx_tuple__13); - Py_CLEAR(clear_module_state->__pyx_codeobj__2); - Py_CLEAR(clear_module_state->__pyx_codeobj__5); - Py_CLEAR(clear_module_state->__pyx_codeobj__6); - Py_CLEAR(clear_module_state->__pyx_codeobj__8); - Py_CLEAR(clear_module_state->__pyx_codeobj__9); - Py_CLEAR(clear_module_state->__pyx_codeobj__11); - Py_CLEAR(clear_module_state->__pyx_codeobj__12); - Py_CLEAR(clear_module_state->__pyx_codeobj__14); - return 0; -} -#endif -/* #### Code section: module_state_traverse ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { - __pyx_mstate *traverse_module_state = __pyx_mstate(m); - if (!traverse_module_state) return 0; - Py_VISIT(traverse_module_state->__pyx_d); - Py_VISIT(traverse_module_state->__pyx_b); - Py_VISIT(traverse_module_state->__pyx_cython_runtime); - Py_VISIT(traverse_module_state->__pyx_empty_tuple); - Py_VISIT(traverse_module_state->__pyx_empty_bytes); - Py_VISIT(traverse_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_VISIT(traverse_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); - #endif - Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow); - Py_VISIT(traverse_module_state->__pyx_kp_u_); - Py_VISIT(traverse_module_state->__pyx_n_s_Any); - Py_VISIT(traverse_module_state->__pyx_n_s_AssertionError); - Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow); - Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow___getstate); - Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow___reduce); - Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow___setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow__get_by_key_impl_mapping); - Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow__to_tuple_instance); - Py_VISIT(traverse_module_state->__pyx_n_s_BaseRow__values_impl); - Py_VISIT(traverse_module_state->__pyx_n_s_Dict); - Py_VISIT(traverse_module_state->__pyx_kp_s_Dict_str_Any); - Py_VISIT(traverse_module_state->__pyx_n_s_Iterator); - Py_VISIT(traverse_module_state->__pyx_n_s_KeyType); - Py_VISIT(traverse_module_state->__pyx_n_s_List); - Py_VISIT(traverse_module_state->__pyx_kp_s_List_Any); - Py_VISIT(traverse_module_state->__pyx_n_s_None); - Py_VISIT(traverse_module_state->__pyx_n_s_Optional); - Py_VISIT(traverse_module_state->__pyx_n_s_ProcessorsType); - Py_VISIT(traverse_module_state->__pyx_n_s_ResultMetaData); - Py_VISIT(traverse_module_state->__pyx_n_s_Sequence); - Py_VISIT(traverse_module_state->__pyx_n_s_TYPE_CHECKING); - Py_VISIT(traverse_module_state->__pyx_n_s_Tuple); - Py_VISIT(traverse_module_state->__pyx_kp_s_Tuple_Any); - Py_VISIT(traverse_module_state->__pyx_kp_s_Tuple_Any_Any); - Py_VISIT(traverse_module_state->__pyx_n_s_Type); - Py_VISIT(traverse_module_state->__pyx_kp_s_Type_BaseRow); - Py_VISIT(traverse_module_state->__pyx_n_s__15); - Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); - Py_VISIT(traverse_module_state->__pyx_n_s_bool); - Py_VISIT(traverse_module_state->__pyx_n_s_class); - Py_VISIT(traverse_module_state->__pyx_n_s_class_getitem); - Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); - Py_VISIT(traverse_module_state->__pyx_n_s_cls); - Py_VISIT(traverse_module_state->__pyx_n_s_data); - Py_VISIT(traverse_module_state->__pyx_n_u_data_2); - Py_VISIT(traverse_module_state->__pyx_kp_u_disable); - Py_VISIT(traverse_module_state->__pyx_kp_u_enable); - Py_VISIT(traverse_module_state->__pyx_kp_u_gc); - Py_VISIT(traverse_module_state->__pyx_n_s_get); - Py_VISIT(traverse_module_state->__pyx_n_s_get_by_key_impl_mapping); - Py_VISIT(traverse_module_state->__pyx_n_s_getstate); - Py_VISIT(traverse_module_state->__pyx_n_s_import); - Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); - Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); - Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); - Py_VISIT(traverse_module_state->__pyx_n_s_key); - Py_VISIT(traverse_module_state->__pyx_n_s_key_not_found); - Py_VISIT(traverse_module_state->__pyx_n_s_key_to_index); - Py_VISIT(traverse_module_state->__pyx_n_s_key_to_index_2); - Py_VISIT(traverse_module_state->__pyx_n_u_key_to_index_2); - Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py); - Py_VISIT(traverse_module_state->__pyx_n_s_main); - Py_VISIT(traverse_module_state->__pyx_n_s_name); - Py_VISIT(traverse_module_state->__pyx_n_s_new); - Py_VISIT(traverse_module_state->__pyx_n_s_obj); - Py_VISIT(traverse_module_state->__pyx_n_s_parent); - Py_VISIT(traverse_module_state->__pyx_n_u_parent_2); - Py_VISIT(traverse_module_state->__pyx_n_s_processors); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_vtable); - Py_VISIT(traverse_module_state->__pyx_n_s_range); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce); - Py_VISIT(traverse_module_state->__pyx_n_s_result); - Py_VISIT(traverse_module_state->__pyx_n_s_return); - Py_VISIT(traverse_module_state->__pyx_n_s_rowproxy_reconstructor); - Py_VISIT(traverse_module_state->__pyx_n_s_self); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_slots); - Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_engine__row_cy); - Py_VISIT(traverse_module_state->__pyx_n_s_state); - Py_VISIT(traverse_module_state->__pyx_n_s_test); - Py_VISIT(traverse_module_state->__pyx_n_s_to_tuple_instance); - Py_VISIT(traverse_module_state->__pyx_n_s_typing); - Py_VISIT(traverse_module_state->__pyx_n_s_values_impl); - Py_VISIT(traverse_module_state->__pyx_tuple__3); - Py_VISIT(traverse_module_state->__pyx_tuple__4); - Py_VISIT(traverse_module_state->__pyx_tuple__7); - Py_VISIT(traverse_module_state->__pyx_tuple__10); - Py_VISIT(traverse_module_state->__pyx_tuple__13); - Py_VISIT(traverse_module_state->__pyx_codeobj__2); - Py_VISIT(traverse_module_state->__pyx_codeobj__5); - Py_VISIT(traverse_module_state->__pyx_codeobj__6); - Py_VISIT(traverse_module_state->__pyx_codeobj__8); - Py_VISIT(traverse_module_state->__pyx_codeobj__9); - Py_VISIT(traverse_module_state->__pyx_codeobj__11); - Py_VISIT(traverse_module_state->__pyx_codeobj__12); - Py_VISIT(traverse_module_state->__pyx_codeobj__14); - return 0; -} -#endif -/* #### Code section: module_state_defines ### */ -#define __pyx_d __pyx_mstate_global->__pyx_d -#define __pyx_b __pyx_mstate_global->__pyx_b -#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime -#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple -#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes -#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode -#ifdef __Pyx_CyFunction_USED -#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType -#endif -#ifdef __Pyx_FusedFunction_USED -#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType -#endif -#ifdef __Pyx_Generator_USED -#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType -#endif -#ifdef __Pyx_IterableCoroutine_USED -#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#define __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow __pyx_mstate_global->__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow -#endif -#define __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow __pyx_mstate_global->__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow -#define __pyx_kp_u_ __pyx_mstate_global->__pyx_kp_u_ -#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any -#define __pyx_n_s_AssertionError __pyx_mstate_global->__pyx_n_s_AssertionError -#define __pyx_n_s_BaseRow __pyx_mstate_global->__pyx_n_s_BaseRow -#define __pyx_n_s_BaseRow___getstate __pyx_mstate_global->__pyx_n_s_BaseRow___getstate -#define __pyx_n_s_BaseRow___reduce __pyx_mstate_global->__pyx_n_s_BaseRow___reduce -#define __pyx_n_s_BaseRow___setstate __pyx_mstate_global->__pyx_n_s_BaseRow___setstate -#define __pyx_n_s_BaseRow__get_by_key_impl_mapping __pyx_mstate_global->__pyx_n_s_BaseRow__get_by_key_impl_mapping -#define __pyx_n_s_BaseRow__to_tuple_instance __pyx_mstate_global->__pyx_n_s_BaseRow__to_tuple_instance -#define __pyx_n_s_BaseRow__values_impl __pyx_mstate_global->__pyx_n_s_BaseRow__values_impl -#define __pyx_n_s_Dict __pyx_mstate_global->__pyx_n_s_Dict -#define __pyx_kp_s_Dict_str_Any __pyx_mstate_global->__pyx_kp_s_Dict_str_Any -#define __pyx_n_s_Iterator __pyx_mstate_global->__pyx_n_s_Iterator -#define __pyx_n_s_KeyType __pyx_mstate_global->__pyx_n_s_KeyType -#define __pyx_n_s_List __pyx_mstate_global->__pyx_n_s_List -#define __pyx_kp_s_List_Any __pyx_mstate_global->__pyx_kp_s_List_Any -#define __pyx_n_s_None __pyx_mstate_global->__pyx_n_s_None -#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional -#define __pyx_n_s_ProcessorsType __pyx_mstate_global->__pyx_n_s_ProcessorsType -#define __pyx_n_s_ResultMetaData __pyx_mstate_global->__pyx_n_s_ResultMetaData -#define __pyx_n_s_Sequence __pyx_mstate_global->__pyx_n_s_Sequence -#define __pyx_n_s_TYPE_CHECKING __pyx_mstate_global->__pyx_n_s_TYPE_CHECKING -#define __pyx_n_s_Tuple __pyx_mstate_global->__pyx_n_s_Tuple -#define __pyx_kp_s_Tuple_Any __pyx_mstate_global->__pyx_kp_s_Tuple_Any -#define __pyx_kp_s_Tuple_Any_Any __pyx_mstate_global->__pyx_kp_s_Tuple_Any_Any -#define __pyx_n_s_Type __pyx_mstate_global->__pyx_n_s_Type -#define __pyx_kp_s_Type_BaseRow __pyx_mstate_global->__pyx_kp_s_Type_BaseRow -#define __pyx_n_s__15 __pyx_mstate_global->__pyx_n_s__15 -#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines -#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool -#define __pyx_n_s_class __pyx_mstate_global->__pyx_n_s_class -#define __pyx_n_s_class_getitem __pyx_mstate_global->__pyx_n_s_class_getitem -#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback -#define __pyx_n_s_cls __pyx_mstate_global->__pyx_n_s_cls -#define __pyx_n_s_data __pyx_mstate_global->__pyx_n_s_data -#define __pyx_n_u_data_2 __pyx_mstate_global->__pyx_n_u_data_2 -#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable -#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable -#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc -#define __pyx_n_s_get __pyx_mstate_global->__pyx_n_s_get -#define __pyx_n_s_get_by_key_impl_mapping __pyx_mstate_global->__pyx_n_s_get_by_key_impl_mapping -#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate -#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import -#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled -#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine -#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled -#define __pyx_n_s_key __pyx_mstate_global->__pyx_n_s_key -#define __pyx_n_s_key_not_found __pyx_mstate_global->__pyx_n_s_key_not_found -#define __pyx_n_s_key_to_index __pyx_mstate_global->__pyx_n_s_key_to_index -#define __pyx_n_s_key_to_index_2 __pyx_mstate_global->__pyx_n_s_key_to_index_2 -#define __pyx_n_u_key_to_index_2 __pyx_mstate_global->__pyx_n_u_key_to_index_2 -#define __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py -#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main -#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name -#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new -#define __pyx_n_s_obj __pyx_mstate_global->__pyx_n_s_obj -#define __pyx_n_s_parent __pyx_mstate_global->__pyx_n_s_parent -#define __pyx_n_u_parent_2 __pyx_mstate_global->__pyx_n_u_parent_2 -#define __pyx_n_s_processors __pyx_mstate_global->__pyx_n_s_processors -#define __pyx_n_s_pyx_vtable __pyx_mstate_global->__pyx_n_s_pyx_vtable -#define __pyx_n_s_range __pyx_mstate_global->__pyx_n_s_range -#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce -#define __pyx_n_s_result __pyx_mstate_global->__pyx_n_s_result -#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return -#define __pyx_n_s_rowproxy_reconstructor __pyx_mstate_global->__pyx_n_s_rowproxy_reconstructor -#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self -#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate -#define __pyx_n_s_slots __pyx_mstate_global->__pyx_n_s_slots -#define __pyx_n_s_sqlalchemy_engine__row_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_engine__row_cy -#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state -#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test -#define __pyx_n_s_to_tuple_instance __pyx_mstate_global->__pyx_n_s_to_tuple_instance -#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing -#define __pyx_n_s_values_impl __pyx_mstate_global->__pyx_n_s_values_impl -#define __pyx_tuple__3 __pyx_mstate_global->__pyx_tuple__3 -#define __pyx_tuple__4 __pyx_mstate_global->__pyx_tuple__4 -#define __pyx_tuple__7 __pyx_mstate_global->__pyx_tuple__7 -#define __pyx_tuple__10 __pyx_mstate_global->__pyx_tuple__10 -#define __pyx_tuple__13 __pyx_mstate_global->__pyx_tuple__13 -#define __pyx_codeobj__2 __pyx_mstate_global->__pyx_codeobj__2 -#define __pyx_codeobj__5 __pyx_mstate_global->__pyx_codeobj__5 -#define __pyx_codeobj__6 __pyx_mstate_global->__pyx_codeobj__6 -#define __pyx_codeobj__8 __pyx_mstate_global->__pyx_codeobj__8 -#define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 -#define __pyx_codeobj__11 __pyx_mstate_global->__pyx_codeobj__11 -#define __pyx_codeobj__12 __pyx_mstate_global->__pyx_codeobj__12 -#define __pyx_codeobj__14 __pyx_mstate_global->__pyx_codeobj__14 -/* #### Code section: module_code ### */ - -/* "sqlalchemy/engine/_row_cy.py":36 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -PyDoc_STRVAR(__pyx_doc_10sqlalchemy_6engine_7_row_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_6engine_7_row_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_6engine_7_row_cy__is_compiled}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy__is_compiled(__pyx_self); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled", 1); - - /* "sqlalchemy/engine/_row_cy.py":38 - * def _is_compiled() -> bool: - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_True); - __pyx_r = Py_True; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":36 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":55 - * _data: Tuple[Any, ...] = cython.declare(tuple, visibility="readonly") - * - * def __init__( # <<<<<<<<<<<<<< - * self, - * parent: ResultMetaData, - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -PyDoc_STRVAR(__pyx_doc_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__, "Row objects are constructed by CursorResult objects."); -#if CYTHON_UPDATE_DESCRIPTOR_DOC -struct wrapperbase __pyx_wrapperbase_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__; -#endif -static int __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_parent = 0; - PyObject *__pyx_v_processors = 0; - PyObject *__pyx_v_key_to_index = 0; - PyObject *__pyx_v_data = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[4] = {0,0,0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_parent,&__pyx_n_s_processors,&__pyx_n_s_key_to_index,&__pyx_n_s_data,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 4: values[3] = __Pyx_Arg_VARARGS(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = __Pyx_Arg_VARARGS(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = __Pyx_Arg_VARARGS(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_parent)) != 0)) { - (void)__Pyx_Arg_NewRef_VARARGS(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 55, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_processors)) != 0)) { - (void)__Pyx_Arg_NewRef_VARARGS(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 55, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 4, 4, 1); __PYX_ERR(0, 55, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key_to_index)) != 0)) { - (void)__Pyx_Arg_NewRef_VARARGS(values[2]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 55, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 4, 4, 2); __PYX_ERR(0, 55, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 3: - if (likely((values[3] = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_data)) != 0)) { - (void)__Pyx_Arg_NewRef_VARARGS(values[3]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 55, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 4, 4, 3); __PYX_ERR(0, 55, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 55, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 4)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - values[1] = __Pyx_Arg_VARARGS(__pyx_args, 1); - values[2] = __Pyx_Arg_VARARGS(__pyx_args, 2); - values[3] = __Pyx_Arg_VARARGS(__pyx_args, 3); - } - __pyx_v_parent = values[0]; - __pyx_v_processors = values[1]; - __pyx_v_key_to_index = ((PyObject*)values[2]); - __pyx_v_data = values[3]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 4, 4, __pyx_nargs); __PYX_ERR(0, 55, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_key_to_index), (&PyDict_Type), 0, "key_to_index", 1))) __PYX_ERR(0, 59, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), __pyx_v_parent, __pyx_v_processors, __pyx_v_key_to_index, __pyx_v_data); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = -1; - __pyx_L0:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_processors, PyObject *__pyx_v_key_to_index, PyObject *__pyx_v_data) { - PyObject *__pyx_v_data_tuple = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 1); - - /* "sqlalchemy/engine/_row_cy.py":66 - * data_tuple: Tuple[Any, ...] = ( - * _apply_processors(processors, data) - * if processors is not None # <<<<<<<<<<<<<< - * else tuple(data) - * ) - */ - __pyx_t_2 = (__pyx_v_processors != Py_None); - if (__pyx_t_2) { - - /* "sqlalchemy/engine/_row_cy.py":65 - * - * data_tuple: Tuple[Any, ...] = ( - * _apply_processors(processors, data) # <<<<<<<<<<<<<< - * if processors is not None - * else tuple(data) - */ - __pyx_t_3 = __pyx_f_10sqlalchemy_6engine_7_row_cy__apply_processors(__pyx_v_processors, __pyx_v_data); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 65, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __pyx_t_3; - __pyx_t_3 = 0; - } else { - - /* "sqlalchemy/engine/_row_cy.py":67 - * _apply_processors(processors, data) - * if processors is not None - * else tuple(data) # <<<<<<<<<<<<<< - * ) - * self._set_attrs(parent, key_to_index, data_tuple) - */ - __pyx_t_3 = __Pyx_PySequence_Tuple(__pyx_v_data); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __pyx_t_3; - __pyx_t_3 = 0; - } - __pyx_v_data_tuple = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":69 - * else tuple(data) - * ) - * self._set_attrs(parent, key_to_index, data_tuple) # <<<<<<<<<<<<<< - * - * @cython.cfunc - */ - __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(__pyx_v_self, __pyx_v_parent, __pyx_v_key_to_index, __pyx_v_data_tuple); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 69, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":55 - * _data: Tuple[Any, ...] = cython.declare(tuple, visibility="readonly") - * - * def __init__( # <<<<<<<<<<<<<< - * self, - * parent: ResultMetaData, - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_data_tuple); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":71 - * self._set_attrs(parent, key_to_index, data_tuple) - * - * @cython.cfunc # <<<<<<<<<<<<<< - * @cython.inline - * def _set_attrs( # type: ignore[no-untyped-def] # cython crashes - */ - -static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_key_to_index, PyObject *__pyx_v_data) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_set_attrs", 1); - - /* "sqlalchemy/engine/_row_cy.py":81 - * if cython.compiled: - * # cython does not use __setattr__ - * self._parent = parent # <<<<<<<<<<<<<< - * self._key_to_index = key_to_index - * self._data = data - */ - __Pyx_INCREF(__pyx_v_parent); - __Pyx_GIVEREF(__pyx_v_parent); - __Pyx_GOTREF(__pyx_v_self->_parent); - __Pyx_DECREF(__pyx_v_self->_parent); - __pyx_v_self->_parent = __pyx_v_parent; - - /* "sqlalchemy/engine/_row_cy.py":82 - * # cython does not use __setattr__ - * self._parent = parent - * self._key_to_index = key_to_index # <<<<<<<<<<<<<< - * self._data = data - * else: - */ - __Pyx_INCREF(__pyx_v_key_to_index); - __Pyx_GIVEREF(__pyx_v_key_to_index); - __Pyx_GOTREF(__pyx_v_self->_key_to_index); - __Pyx_DECREF(__pyx_v_self->_key_to_index); - __pyx_v_self->_key_to_index = __pyx_v_key_to_index; - - /* "sqlalchemy/engine/_row_cy.py":83 - * self._parent = parent - * self._key_to_index = key_to_index - * self._data = data # <<<<<<<<<<<<<< - * else: - * # python does, so use object.__setattr__ - */ - __Pyx_INCREF(__pyx_v_data); - __Pyx_GIVEREF(__pyx_v_data); - __Pyx_GOTREF(__pyx_v_self->_data); - __Pyx_DECREF(__pyx_v_self->_data); - __pyx_v_self->_data = __pyx_v_data; - - /* "sqlalchemy/engine/_row_cy.py":71 - * self._set_attrs(parent, key_to_index, data_tuple) - * - * @cython.cfunc # <<<<<<<<<<<<<< - * @cython.inline - * def _set_attrs( # type: ignore[no-untyped-def] # cython crashes - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":90 - * object.__setattr__(self, "_data", data) - * - * def __reduce__(self) -> Tuple[Any, Any]: # <<<<<<<<<<<<<< - * return ( - * rowproxy_reconstructor, - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__ = {"__reduce__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__reduce__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_2__reduce__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_2__reduce__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - unsigned int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce__", 1); - - /* "sqlalchemy/engine/_row_cy.py":91 - * - * def __reduce__(self) -> Tuple[Any, Any]: - * return ( # <<<<<<<<<<<<<< - * rowproxy_reconstructor, - * (self.__class__, self.__getstate__()), - */ - __Pyx_XDECREF(__pyx_r); - - /* "sqlalchemy/engine/_row_cy.py":92 - * def __reduce__(self) -> Tuple[Any, Any]: - * return ( - * rowproxy_reconstructor, # <<<<<<<<<<<<<< - * (self.__class__, self.__getstate__()), - * ) - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_rowproxy_reconstructor); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - - /* "sqlalchemy/engine/_row_cy.py":93 - * return ( - * rowproxy_reconstructor, - * (self.__class__, self.__getstate__()), # <<<<<<<<<<<<<< - * ) - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_getstate); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - __pyx_t_6 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_6 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_5, NULL}; - __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_6, 0+__pyx_t_6); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_2); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2)) __PYX_ERR(0, 93, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_3); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3)) __PYX_ERR(0, 93, __pyx_L1_error); - __pyx_t_2 = 0; - __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":92 - * def __reduce__(self) -> Tuple[Any, Any]: - * return ( - * rowproxy_reconstructor, # <<<<<<<<<<<<<< - * (self.__class__, self.__getstate__()), - * ) - */ - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1)) __PYX_ERR(0, 92, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_4); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4)) __PYX_ERR(0, 92, __pyx_L1_error); - __pyx_t_1 = 0; - __pyx_t_4 = 0; - __pyx_r = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":90 - * object.__setattr__(self, "_data", data) - * - * def __reduce__(self) -> Tuple[Any, Any]: # <<<<<<<<<<<<<< - * return ( - * rowproxy_reconstructor, - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__reduce__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":96 - * ) - * - * def __getstate__(self) -> Dict[str, Any]: # <<<<<<<<<<<<<< - * return {"_parent": self._parent, "_data": self._data} - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__ = {"__getstate__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__getstate__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__getstate__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__getstate__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_4__getstate__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_4__getstate__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__getstate__", 1); - - /* "sqlalchemy/engine/_row_cy.py":97 - * - * def __getstate__(self) -> Dict[str, Any]: - * return {"_parent": self._parent, "_data": self._data} # <<<<<<<<<<<<<< - * - * def __setstate__(self, state: Dict[str, Any]) -> None: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 97, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_t_1, __pyx_n_u_parent_2, __pyx_v_self->_parent) < 0) __PYX_ERR(0, 97, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_1, __pyx_n_u_data_2, __pyx_v_self->_data) < 0) __PYX_ERR(0, 97, __pyx_L1_error) - __pyx_r = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":96 - * ) - * - * def __getstate__(self) -> Dict[str, Any]: # <<<<<<<<<<<<<< - * return {"_parent": self._parent, "_data": self._data} - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__getstate__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":99 - * return {"_parent": self._parent, "_data": self._data} - * - * def __setstate__(self, state: Dict[str, Any]) -> None: # <<<<<<<<<<<<<< - * parent = state["_parent"] - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__ = {"__setstate__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 99, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate__") < 0)) __PYX_ERR(0, 99, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_state = ((PyObject*)values[0]); - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setstate__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 99, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__setstate__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_state), (&PyDict_Type), 0, "state", 1))) __PYX_ERR(0, 99, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_6__setstate__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), __pyx_v_state); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_6__setstate__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_state) { - PyObject *__pyx_v_parent = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate__", 1); - - /* "sqlalchemy/engine/_row_cy.py":100 - * - * def __setstate__(self, state: Dict[str, Any]) -> None: - * parent = state["_parent"] # <<<<<<<<<<<<<< - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - * - */ - __pyx_t_1 = __Pyx_PyDict_GetItem(__pyx_v_state, __pyx_n_u_parent_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 100, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_parent = __pyx_t_1; - __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":101 - * def __setstate__(self, state: Dict[str, Any]) -> None: - * parent = state["_parent"] - * self._set_attrs(parent, parent._key_to_index, state["_data"]) # <<<<<<<<<<<<<< - * - * def _values_impl(self) -> List[Any]: - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_parent, __pyx_n_s_key_to_index_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("dict", __pyx_t_1))) __PYX_ERR(0, 101, __pyx_L1_error) - __pyx_t_2 = __Pyx_PyDict_GetItem(__pyx_v_state, __pyx_n_u_data_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (!(likely(PyTuple_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_t_2))) __PYX_ERR(0, 101, __pyx_L1_error) - __pyx_t_3 = __pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs(__pyx_v_self, __pyx_v_parent, ((PyObject*)__pyx_t_1), ((PyObject*)__pyx_t_2)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":99 - * return {"_parent": self._parent, "_data": self._data} - * - * def __setstate__(self, state: Dict[str, Any]) -> None: # <<<<<<<<<<<<<< - * parent = state["_parent"] - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__setstate__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_parent); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":103 - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - * - * def _values_impl(self) -> List[Any]: # <<<<<<<<<<<<<< - * return list(self._data) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl = {"_values_impl", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_values_impl (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("_values_impl", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "_values_impl", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_8_values_impl(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_8_values_impl(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_values_impl", 1); - - /* "sqlalchemy/engine/_row_cy.py":104 - * - * def _values_impl(self) -> List[Any]: - * return list(self._data) # <<<<<<<<<<<<<< - * - * def __iter__(self) -> Iterator[Any]: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PySequence_List(__pyx_v_self->_data); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":103 - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - * - * def _values_impl(self) -> List[Any]: # <<<<<<<<<<<<<< - * return list(self._data) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow._values_impl", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":106 - * return list(self._data) - * - * def __iter__(self) -> Iterator[Any]: # <<<<<<<<<<<<<< - * return iter(self._data) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_11__iter__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_11__iter__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_10__iter__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_10__iter__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__iter__", 1); - - /* "sqlalchemy/engine/_row_cy.py":107 - * - * def __iter__(self) -> Iterator[Any]: - * return iter(self._data) # <<<<<<<<<<<<<< - * - * def __len__(self) -> int: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_v_self->_data; - __Pyx_INCREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 107, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":106 - * return list(self._data) - * - * def __iter__(self) -> Iterator[Any]: # <<<<<<<<<<<<<< - * return iter(self._data) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":109 - * return iter(self._data) - * - * def __len__(self) -> int: # <<<<<<<<<<<<<< - * return len(self._data) - * - */ - -/* Python wrapper */ -static Py_ssize_t __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__(PyObject *__pyx_v_self); /*proto*/ -static Py_ssize_t __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - Py_ssize_t __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_12__len__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static Py_ssize_t __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_12__len__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - Py_ssize_t __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__len__", 1); - - /* "sqlalchemy/engine/_row_cy.py":110 - * - * def __len__(self) -> int: - * return len(self._data) # <<<<<<<<<<<<<< - * - * def __hash__(self) -> int: - */ - __pyx_t_1 = __pyx_v_self->_data; - __Pyx_INCREF(__pyx_t_1); - if (unlikely(__pyx_t_1 == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 110, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyTuple_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 110, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":109 - * return iter(self._data) - * - * def __len__(self) -> int: # <<<<<<<<<<<<<< - * return len(self._data) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":112 - * return len(self._data) - * - * def __hash__(self) -> int: # <<<<<<<<<<<<<< - * return hash(self._data) - * - */ - -/* Python wrapper */ -static Py_hash_t __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_15__hash__(PyObject *__pyx_v_self); /*proto*/ -static Py_hash_t __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_15__hash__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - Py_hash_t __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__hash__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_14__hash__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static Py_hash_t __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_14__hash__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - Py_hash_t __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_hash_t __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__hash__", 1); - - /* "sqlalchemy/engine/_row_cy.py":113 - * - * def __hash__(self) -> int: - * return hash(self._data) # <<<<<<<<<<<<<< - * - * def __getitem__(self, key: Any) -> Any: - */ - __pyx_t_1 = __pyx_v_self->_data; - __Pyx_INCREF(__pyx_t_1); - __pyx_t_2 = PyObject_Hash(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_hash_t)-1))) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":112 - * return len(self._data) - * - * def __hash__(self) -> int: # <<<<<<<<<<<<<< - * return hash(self._data) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__hash__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - if (unlikely(__pyx_r == -1) && !PyErr_Occurred()) __pyx_r = -2; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":115 - * return hash(self._data) - * - * def __getitem__(self, key: Any) -> Any: # <<<<<<<<<<<<<< - * return self._data[key] - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_17__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_17__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_16__getitem__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), ((PyObject *)__pyx_v_key)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_16__getitem__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__getitem__", 1); - - /* "sqlalchemy/engine/_row_cy.py":116 - * - * def __getitem__(self, key: Any) -> Any: - * return self._data[key] # <<<<<<<<<<<<<< - * - * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_self->_data == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 116, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_self->_data, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 116, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":115 - * return hash(self._data) - * - * def __getitem__(self, key: Any) -> Any: # <<<<<<<<<<<<<< - * return self._data[key] - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":118 - * return self._data[key] - * - * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: # <<<<<<<<<<<<<< - * return self._get_by_key_impl(key, False) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping = {"_get_by_key_impl_mapping", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_key = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_get_by_key_impl_mapping (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 118, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_get_by_key_impl_mapping") < 0)) __PYX_ERR(0, 118, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_key = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("_get_by_key_impl_mapping", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 118, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow._get_by_key_impl_mapping", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_18_get_by_key_impl_mapping(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), __pyx_v_key); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_18_get_by_key_impl_mapping(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_get_by_key_impl_mapping", 1); - - /* "sqlalchemy/engine/_row_cy.py":119 - * - * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: - * return self._get_by_key_impl(key, False) # <<<<<<<<<<<<<< - * - * @cython.cfunc - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self->__pyx_vtab)->_get_by_key_impl(__pyx_v_self, __pyx_v_key, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 119, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":118 - * return self._data[key] - * - * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: # <<<<<<<<<<<<<< - * return self._get_by_key_impl(key, False) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow._get_by_key_impl_mapping", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":121 - * return self._get_by_key_impl(key, False) - * - * @cython.cfunc # <<<<<<<<<<<<<< - * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: - * index: Optional[int] = self._key_to_index.get(key) - */ - -static PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__get_by_key_impl(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_key, int __pyx_v_attr_err) { - PyObject *__pyx_v_index = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - unsigned int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_get_by_key_impl", 1); - - /* "sqlalchemy/engine/_row_cy.py":123 - * @cython.cfunc - * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: - * index: Optional[int] = self._key_to_index.get(key) # <<<<<<<<<<<<<< - * if index is not None: - * return self._data[index] - */ - if (unlikely(__pyx_v_self->_key_to_index == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); - __PYX_ERR(0, 123, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyDict_GetItemDefault(__pyx_v_self->_key_to_index, __pyx_v_key, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_1))) __PYX_ERR(0, 123, __pyx_L1_error) - __pyx_v_index = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":124 - * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: - * index: Optional[int] = self._key_to_index.get(key) - * if index is not None: # <<<<<<<<<<<<<< - * return self._data[index] - * self._parent._key_not_found(key, attr_err) - */ - __pyx_t_2 = (__pyx_v_index != ((PyObject*)Py_None)); - if (__pyx_t_2) { - - /* "sqlalchemy/engine/_row_cy.py":125 - * index: Optional[int] = self._key_to_index.get(key) - * if index is not None: - * return self._data[index] # <<<<<<<<<<<<<< - * self._parent._key_not_found(key, attr_err) - * - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_self->_data == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 125, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_self->_data, __pyx_v_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 125, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":124 - * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: - * index: Optional[int] = self._key_to_index.get(key) - * if index is not None: # <<<<<<<<<<<<<< - * return self._data[index] - * self._parent._key_not_found(key, attr_err) - */ - } - - /* "sqlalchemy/engine/_row_cy.py":126 - * if index is not None: - * return self._data[index] - * self._parent._key_not_found(key, attr_err) # <<<<<<<<<<<<<< - * - * @cython.annotation_typing(False) - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_parent, __pyx_n_s_key_not_found); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 126, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_attr_err); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 126, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - __pyx_t_6 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_6 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[3] = {__pyx_t_5, __pyx_v_key, __pyx_t_4}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_6, 2+__pyx_t_6); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 126, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":121 - * return self._get_by_key_impl(key, False) - * - * @cython.cfunc # <<<<<<<<<<<<<< - * def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object: - * index: Optional[int] = self._key_to_index.get(key) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow._get_by_key_impl", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_index); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":128 - * self._parent._key_not_found(key, attr_err) - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def __getattr__(self, name: str) -> Any: - * return self._get_by_key_impl(name, True) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_21__getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_name); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_21__getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_name) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__getattr__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_20__getattr__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self), ((PyObject *)__pyx_v_name)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_20__getattr__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self, PyObject *__pyx_v_name) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__getattr__", 1); - - /* "sqlalchemy/engine/_row_cy.py":130 - * @cython.annotation_typing(False) - * def __getattr__(self, name: str) -> Any: - * return self._get_by_key_impl(name, True) # <<<<<<<<<<<<<< - * - * def _to_tuple_instance(self) -> Tuple[Any, ...]: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self->__pyx_vtab)->_get_by_key_impl(__pyx_v_self, __pyx_v_name, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 130, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":128 - * self._parent._key_not_found(key, attr_err) - * - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def __getattr__(self, name: str) -> Any: - * return self._get_by_key_impl(name, True) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.BaseRow.__getattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":132 - * return self._get_by_key_impl(name, True) - * - * def _to_tuple_instance(self) -> Tuple[Any, ...]: # <<<<<<<<<<<<<< - * return self._data - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance = {"_to_tuple_instance", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_to_tuple_instance (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("_to_tuple_instance", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "_to_tuple_instance", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_22_to_tuple_instance(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_22_to_tuple_instance(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_to_tuple_instance", 1); - - /* "sqlalchemy/engine/_row_cy.py":133 - * - * def _to_tuple_instance(self) -> Tuple[Any, ...]: - * return self._data # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->_data); - __pyx_r = __pyx_v_self->_data; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":132 - * return self._get_by_key_impl(name, True) - * - * def _to_tuple_instance(self) -> Tuple[Any, ...]: # <<<<<<<<<<<<<< - * return self._data - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":49 - * - * if cython.compiled: - * _parent: ResultMetaData = cython.declare(object, visibility="readonly") # <<<<<<<<<<<<<< - * _key_to_index: Dict[_KeyType, int] = cython.declare( - * dict, visibility="readonly" - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent_1__get__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent___get__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 1); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->_parent); - __pyx_r = __pyx_v_self->_parent; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":50 - * if cython.compiled: - * _parent: ResultMetaData = cython.declare(object, visibility="readonly") - * _key_to_index: Dict[_KeyType, int] = cython.declare( # <<<<<<<<<<<<<< - * dict, visibility="readonly" - * ) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index_1__get__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index___get__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 1); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->_key_to_index); - __pyx_r = __pyx_v_self->_key_to_index; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":53 - * dict, visibility="readonly" - * ) - * _data: Tuple[Any, ...] = cython.declare(tuple, visibility="readonly") # <<<<<<<<<<<<<< - * - * def __init__( - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data_1__get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data_1__get__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data___get__(((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data___get__(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__get__", 1); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_self->_data); - __pyx_r = __pyx_v_self->_data; - goto __pyx_L0; - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":136 - * - * - * @cython.inline # <<<<<<<<<<<<<< - * @cython.cfunc - * def _apply_processors( - */ - -static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_6engine_7_row_cy__apply_processors(PyObject *__pyx_v_proc, PyObject *__pyx_v_data) { - PyObject *__pyx_v_res = 0; - Py_ssize_t __pyx_v_proc_size; - Py_ssize_t __pyx_v_i; - PyObject *__pyx_v_p = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - int __pyx_t_3; - Py_ssize_t __pyx_t_4; - Py_ssize_t __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - unsigned int __pyx_t_9; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_apply_processors", 1); - - /* "sqlalchemy/engine/_row_cy.py":141 - * proc: _ProcessorsType, data: Sequence[Any] - * ) -> Tuple[Any, ...]: - * res: List[Any] = list(data) # <<<<<<<<<<<<<< - * proc_size: cython.Py_ssize_t = len(proc) - * # TODO: would be nice to do this only on the fist row - */ - __pyx_t_1 = PySequence_List(__pyx_v_data); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 141, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_res = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":142 - * ) -> Tuple[Any, ...]: - * res: List[Any] = list(data) - * proc_size: cython.Py_ssize_t = len(proc) # <<<<<<<<<<<<<< - * # TODO: would be nice to do this only on the fist row - * assert len(res) == proc_size - */ - __pyx_t_2 = PyObject_Length(__pyx_v_proc); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 142, __pyx_L1_error) - __pyx_v_proc_size = __pyx_t_2; - - /* "sqlalchemy/engine/_row_cy.py":144 - * proc_size: cython.Py_ssize_t = len(proc) - * # TODO: would be nice to do this only on the fist row - * assert len(res) == proc_size # <<<<<<<<<<<<<< - * for i in range(proc_size): - * p = proc[i] - */ - #ifndef CYTHON_WITHOUT_ASSERTIONS - if (unlikely(__pyx_assertions_enabled())) { - __pyx_t_2 = __Pyx_PyList_GET_SIZE(__pyx_v_res); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 144, __pyx_L1_error) - __pyx_t_3 = (__pyx_t_2 == __pyx_v_proc_size); - if (unlikely(!__pyx_t_3)) { - __Pyx_Raise(__pyx_builtin_AssertionError, 0, 0, 0); - __PYX_ERR(0, 144, __pyx_L1_error) - } - } - #else - if ((1)); else __PYX_ERR(0, 144, __pyx_L1_error) - #endif - - /* "sqlalchemy/engine/_row_cy.py":145 - * # TODO: would be nice to do this only on the fist row - * assert len(res) == proc_size - * for i in range(proc_size): # <<<<<<<<<<<<<< - * p = proc[i] - * if p is not None: - */ - __pyx_t_2 = __pyx_v_proc_size; - __pyx_t_4 = __pyx_t_2; - for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { - __pyx_v_i = __pyx_t_5; - - /* "sqlalchemy/engine/_row_cy.py":146 - * assert len(res) == proc_size - * for i in range(proc_size): - * p = proc[i] # <<<<<<<<<<<<<< - * if p is not None: - * res[i] = p(res[i]) - */ - __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_proc, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_XDECREF_SET(__pyx_v_p, __pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":147 - * for i in range(proc_size): - * p = proc[i] - * if p is not None: # <<<<<<<<<<<<<< - * res[i] = p(res[i]) - * return tuple(res) - */ - __pyx_t_3 = (__pyx_v_p != Py_None); - if (__pyx_t_3) { - - /* "sqlalchemy/engine/_row_cy.py":148 - * p = proc[i] - * if p is not None: - * res[i] = p(res[i]) # <<<<<<<<<<<<<< - * return tuple(res) - * - */ - __pyx_t_6 = __Pyx_GetItemInt_List(__pyx_v_res, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_INCREF(__pyx_v_p); - __pyx_t_7 = __pyx_v_p; __pyx_t_8 = NULL; - __pyx_t_9 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_8)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_8); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - __pyx_t_9 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_8, __pyx_t_6}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_7, __pyx_callargs+1-__pyx_t_9, 1+__pyx_t_9); - __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - } - if (unlikely((__Pyx_SetItemInt(__pyx_v_res, __pyx_v_i, __pyx_t_1, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1) < 0))) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":147 - * for i in range(proc_size): - * p = proc[i] - * if p is not None: # <<<<<<<<<<<<<< - * res[i] = p(res[i]) - * return tuple(res) - */ - } - } - - /* "sqlalchemy/engine/_row_cy.py":149 - * if p is not None: - * res[i] = p(res[i]) - * return tuple(res) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyList_AsTuple(__pyx_v_res); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 149, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":136 - * - * - * @cython.inline # <<<<<<<<<<<<<< - * @cython.cfunc - * def _apply_processors( - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy._apply_processors", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_res); - __Pyx_XDECREF(__pyx_v_p); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_row_cy.py":156 - * # Turn off annotation typing so the compiled version accepts the python - * # class too. - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def rowproxy_reconstructor( - * cls: Type[BaseRow], state: Dict[str, Any] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor = {"rowproxy_reconstructor", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_cls = 0; - PyObject *__pyx_v_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[2] = {0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("rowproxy_reconstructor (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_cls,&__pyx_n_s_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_cls)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 156, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 156, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("rowproxy_reconstructor", 1, 2, 2, 1); __PYX_ERR(0, 156, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "rowproxy_reconstructor") < 0)) __PYX_ERR(0, 156, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 2)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - } - __pyx_v_cls = values[0]; - __pyx_v_state = values[1]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("rowproxy_reconstructor", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 156, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.rowproxy_reconstructor", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_7_row_cy_2rowproxy_reconstructor(__pyx_self, __pyx_v_cls, __pyx_v_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_7_row_cy_2rowproxy_reconstructor(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_cls, PyObject *__pyx_v_state) { - PyObject *__pyx_v_obj = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("rowproxy_reconstructor", 1); - - /* "sqlalchemy/engine/_row_cy.py":160 - * cls: Type[BaseRow], state: Dict[str, Any] - * ) -> BaseRow: - * obj = cls.__new__(cls) # <<<<<<<<<<<<<< - * obj.__setstate__(state) - * return obj - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_cls, __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 160, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_cls}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 160, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __pyx_v_obj = __pyx_t_1; - __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":161 - * ) -> BaseRow: - * obj = cls.__new__(cls) - * obj.__setstate__(state) # <<<<<<<<<<<<<< - * return obj - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_obj, __pyx_n_s_setstate); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 161, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_state}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 161, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/engine/_row_cy.py":162 - * obj = cls.__new__(cls) - * obj.__setstate__(state) - * return obj # <<<<<<<<<<<<<< - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_obj); - __pyx_r = __pyx_v_obj; - goto __pyx_L0; - - /* "sqlalchemy/engine/_row_cy.py":156 - * # Turn off annotation typing so the compiled version accepts the python - * # class too. - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def rowproxy_reconstructor( - * cls: Type[BaseRow], state: Dict[str, Any] - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.engine._row_cy.rowproxy_reconstructor", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_obj); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} -static struct __pyx_vtabstruct_10sqlalchemy_6engine_7_row_cy_BaseRow __pyx_vtable_10sqlalchemy_6engine_7_row_cy_BaseRow; - -static PyObject *__pyx_tp_new_10sqlalchemy_6engine_7_row_cy_BaseRow(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *p; - PyObject *o; - #if CYTHON_COMPILING_IN_LIMITED_API - allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); - o = alloc_func(t, 0); - #else - if (likely(!__Pyx_PyType_HasFeature(t, Py_TPFLAGS_IS_ABSTRACT))) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - #endif - p = ((struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)o); - p->__pyx_vtab = __pyx_vtabptr_10sqlalchemy_6engine_7_row_cy_BaseRow; - p->_parent = Py_None; Py_INCREF(Py_None); - p->_key_to_index = ((PyObject*)Py_None); Py_INCREF(Py_None); - p->_data = ((PyObject*)Py_None); Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o) { - struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *p = (struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely((PY_VERSION_HEX >= 0x03080000 || __Pyx_PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)) && __Pyx_PyObject_GetSlot(o, tp_finalize, destructor)) && !__Pyx_PyObject_GC_IsFinalized(o)) { - if (__Pyx_PyObject_GetSlot(o, tp_dealloc, destructor) == __pyx_tp_dealloc_10sqlalchemy_6engine_7_row_cy_BaseRow) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - } - #endif - PyObject_GC_UnTrack(o); - Py_CLEAR(p->_parent); - Py_CLEAR(p->_key_to_index); - Py_CLEAR(p->_data); - #if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY - (*Py_TYPE(o)->tp_free)(o); - #else - { - freefunc tp_free = (freefunc)PyType_GetSlot(Py_TYPE(o), Py_tp_free); - if (tp_free) tp_free(o); - } - #endif -} - -static int __pyx_tp_traverse_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *p = (struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)o; - if (p->_parent) { - e = (*v)(p->_parent, a); if (e) return e; - } - if (p->_key_to_index) { - e = (*v)(p->_key_to_index, a); if (e) return e; - } - if (p->_data) { - e = (*v)(p->_data, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *p = (struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *)o; - tmp = ((PyObject*)p->_parent); - p->_parent = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_key_to_index); - p->_key_to_index = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->_data); - p->_data = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} -static PyObject *__pyx_sq_item_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o, Py_ssize_t i) { - PyObject *r; - PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; - r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); - Py_DECREF(x); - return r; -} - -static PyObject *__pyx_tp_getattro_10sqlalchemy_6engine_7_row_cy_BaseRow(PyObject *o, PyObject *n) { - PyObject *v = __Pyx_PyObject_GenericGetAttr(o, n); - if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - v = __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_21__getattr__(o, n); - } - return v; -} - -static PyObject *__pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__parent(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7_parent_1__get__(o); -} - -static PyObject *__pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__key_to_index(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13_key_to_index_1__get__(o); -} - -static PyObject *__pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__data(PyObject *o, CYTHON_UNUSED void *x) { - return __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5_data_1__get__(o); -} - -static PyMethodDef __pyx_methods_10sqlalchemy_6engine_7_row_cy_BaseRow[] = { - {"__reduce__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__getstate__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__setstate__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"_values_impl", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"_get_by_key_impl_mapping", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__getattr__", (PyCFunction)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_21__getattr__, METH_O|METH_COEXIST, 0}, - {"_to_tuple_instance", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {0, 0, 0, 0} -}; - -static struct PyGetSetDef __pyx_getsets_10sqlalchemy_6engine_7_row_cy_BaseRow[] = { - {(char *)"_parent", __pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__parent, 0, (char *)0, 0}, - {(char *)"_key_to_index", __pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__key_to_index, 0, (char *)0, 0}, - {(char *)"_data", __pyx_getprop_10sqlalchemy_6engine_7_row_cy_7BaseRow__data, 0, (char *)0, 0}, - {0, 0, 0, 0, 0} -}; -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_slots[] = { - {Py_tp_dealloc, (void *)__pyx_tp_dealloc_10sqlalchemy_6engine_7_row_cy_BaseRow}, - {Py_sq_length, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__}, - {Py_sq_item, (void *)__pyx_sq_item_10sqlalchemy_6engine_7_row_cy_BaseRow}, - {Py_mp_length, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__}, - {Py_mp_subscript, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_17__getitem__}, - {Py_tp_hash, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_15__hash__}, - {Py_tp_getattro, (void *)__pyx_tp_getattro_10sqlalchemy_6engine_7_row_cy_BaseRow}, - {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_6engine_7_row_cy_BaseRow}, - {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_6engine_7_row_cy_BaseRow}, - {Py_tp_iter, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_11__iter__}, - {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_6engine_7_row_cy_BaseRow}, - {Py_tp_getset, (void *)__pyx_getsets_10sqlalchemy_6engine_7_row_cy_BaseRow}, - {Py_tp_init, (void *)__pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_1__init__}, - {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_6engine_7_row_cy_BaseRow}, - {0, 0}, -}; -static PyType_Spec __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_spec = { - "sqlalchemy.engine._row_cy.BaseRow", - sizeof(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow), - 0, - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, - __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_slots, -}; -#else - -static PySequenceMethods __pyx_tp_as_sequence_BaseRow = { - __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__, /*sq_length*/ - 0, /*sq_concat*/ - 0, /*sq_repeat*/ - __pyx_sq_item_10sqlalchemy_6engine_7_row_cy_BaseRow, /*sq_item*/ - 0, /*sq_slice*/ - 0, /*sq_ass_item*/ - 0, /*sq_ass_slice*/ - 0, /*sq_contains*/ - 0, /*sq_inplace_concat*/ - 0, /*sq_inplace_repeat*/ -}; - -static PyMappingMethods __pyx_tp_as_mapping_BaseRow = { - __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_13__len__, /*mp_length*/ - __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_17__getitem__, /*mp_subscript*/ - 0, /*mp_ass_subscript*/ -}; - -static PyTypeObject __pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow = { - PyVarObject_HEAD_INIT(0, 0) - "sqlalchemy.engine._row_cy.""BaseRow", /*tp_name*/ - sizeof(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - &__pyx_tp_as_sequence_BaseRow, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_BaseRow, /*tp_as_mapping*/ - __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_15__hash__, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - __pyx_tp_getattro_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_traverse*/ - __pyx_tp_clear_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_11__iter__, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_methods*/ - 0, /*tp_members*/ - __pyx_getsets_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - #if !CYTHON_USE_TYPE_SPECS - 0, /*tp_dictoffset*/ - #endif - __pyx_pw_10sqlalchemy_6engine_7_row_cy_7BaseRow_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_10sqlalchemy_6engine_7_row_cy_BaseRow, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - #if CYTHON_USE_TP_FINALIZE - 0, /*tp_finalize*/ - #else - NULL, /*tp_finalize*/ - #endif - #endif - #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, /*tp_vectorcall*/ - #endif - #if __PYX_NEED_TP_PRINT_SLOT == 1 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030C0000 - 0, /*tp_watched*/ - #endif - #if PY_VERSION_HEX >= 0x030d00A4 - 0, /*tp_versions_used*/ - #endif - #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, /*tp_pypy_flags*/ - #endif -}; -#endif - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif -/* #### Code section: pystring_table ### */ - -static int __Pyx_CreateStringTabAndInitStrings(void) { - __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, - {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, - {&__pyx_n_s_AssertionError, __pyx_k_AssertionError, sizeof(__pyx_k_AssertionError), 0, 0, 1, 1}, - {&__pyx_n_s_BaseRow, __pyx_k_BaseRow, sizeof(__pyx_k_BaseRow), 0, 0, 1, 1}, - {&__pyx_n_s_BaseRow___getstate, __pyx_k_BaseRow___getstate, sizeof(__pyx_k_BaseRow___getstate), 0, 0, 1, 1}, - {&__pyx_n_s_BaseRow___reduce, __pyx_k_BaseRow___reduce, sizeof(__pyx_k_BaseRow___reduce), 0, 0, 1, 1}, - {&__pyx_n_s_BaseRow___setstate, __pyx_k_BaseRow___setstate, sizeof(__pyx_k_BaseRow___setstate), 0, 0, 1, 1}, - {&__pyx_n_s_BaseRow__get_by_key_impl_mapping, __pyx_k_BaseRow__get_by_key_impl_mapping, sizeof(__pyx_k_BaseRow__get_by_key_impl_mapping), 0, 0, 1, 1}, - {&__pyx_n_s_BaseRow__to_tuple_instance, __pyx_k_BaseRow__to_tuple_instance, sizeof(__pyx_k_BaseRow__to_tuple_instance), 0, 0, 1, 1}, - {&__pyx_n_s_BaseRow__values_impl, __pyx_k_BaseRow__values_impl, sizeof(__pyx_k_BaseRow__values_impl), 0, 0, 1, 1}, - {&__pyx_n_s_Dict, __pyx_k_Dict, sizeof(__pyx_k_Dict), 0, 0, 1, 1}, - {&__pyx_kp_s_Dict_str_Any, __pyx_k_Dict_str_Any, sizeof(__pyx_k_Dict_str_Any), 0, 0, 1, 0}, - {&__pyx_n_s_Iterator, __pyx_k_Iterator, sizeof(__pyx_k_Iterator), 0, 0, 1, 1}, - {&__pyx_n_s_KeyType, __pyx_k_KeyType, sizeof(__pyx_k_KeyType), 0, 0, 1, 1}, - {&__pyx_n_s_List, __pyx_k_List, sizeof(__pyx_k_List), 0, 0, 1, 1}, - {&__pyx_kp_s_List_Any, __pyx_k_List_Any, sizeof(__pyx_k_List_Any), 0, 0, 1, 0}, - {&__pyx_n_s_None, __pyx_k_None, sizeof(__pyx_k_None), 0, 0, 1, 1}, - {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, - {&__pyx_n_s_ProcessorsType, __pyx_k_ProcessorsType, sizeof(__pyx_k_ProcessorsType), 0, 0, 1, 1}, - {&__pyx_n_s_ResultMetaData, __pyx_k_ResultMetaData, sizeof(__pyx_k_ResultMetaData), 0, 0, 1, 1}, - {&__pyx_n_s_Sequence, __pyx_k_Sequence, sizeof(__pyx_k_Sequence), 0, 0, 1, 1}, - {&__pyx_n_s_TYPE_CHECKING, __pyx_k_TYPE_CHECKING, sizeof(__pyx_k_TYPE_CHECKING), 0, 0, 1, 1}, - {&__pyx_n_s_Tuple, __pyx_k_Tuple, sizeof(__pyx_k_Tuple), 0, 0, 1, 1}, - {&__pyx_kp_s_Tuple_Any, __pyx_k_Tuple_Any, sizeof(__pyx_k_Tuple_Any), 0, 0, 1, 0}, - {&__pyx_kp_s_Tuple_Any_Any, __pyx_k_Tuple_Any_Any, sizeof(__pyx_k_Tuple_Any_Any), 0, 0, 1, 0}, - {&__pyx_n_s_Type, __pyx_k_Type, sizeof(__pyx_k_Type), 0, 0, 1, 1}, - {&__pyx_kp_s_Type_BaseRow, __pyx_k_Type_BaseRow, sizeof(__pyx_k_Type_BaseRow), 0, 0, 1, 0}, - {&__pyx_n_s__15, __pyx_k__15, sizeof(__pyx_k__15), 0, 0, 1, 1}, - {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, - {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, - {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1}, - {&__pyx_n_s_class_getitem, __pyx_k_class_getitem, sizeof(__pyx_k_class_getitem), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_cls, __pyx_k_cls, sizeof(__pyx_k_cls), 0, 0, 1, 1}, - {&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1}, - {&__pyx_n_u_data_2, __pyx_k_data_2, sizeof(__pyx_k_data_2), 0, 1, 0, 1}, - {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, - {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, - {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, - {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, - {&__pyx_n_s_get_by_key_impl_mapping, __pyx_k_get_by_key_impl_mapping, sizeof(__pyx_k_get_by_key_impl_mapping), 0, 0, 1, 1}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, - {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, - {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, - {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, - {&__pyx_n_s_key_not_found, __pyx_k_key_not_found, sizeof(__pyx_k_key_not_found), 0, 0, 1, 1}, - {&__pyx_n_s_key_to_index, __pyx_k_key_to_index, sizeof(__pyx_k_key_to_index), 0, 0, 1, 1}, - {&__pyx_n_s_key_to_index_2, __pyx_k_key_to_index_2, sizeof(__pyx_k_key_to_index_2), 0, 0, 1, 1}, - {&__pyx_n_u_key_to_index_2, __pyx_k_key_to_index_2, sizeof(__pyx_k_key_to_index_2), 0, 1, 0, 1}, - {&__pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_k_lib_sqlalchemy_engine__row_cy_py, sizeof(__pyx_k_lib_sqlalchemy_engine__row_cy_py), 0, 0, 1, 0}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1}, - {&__pyx_n_s_parent, __pyx_k_parent, sizeof(__pyx_k_parent), 0, 0, 1, 1}, - {&__pyx_n_u_parent_2, __pyx_k_parent_2, sizeof(__pyx_k_parent_2), 0, 1, 0, 1}, - {&__pyx_n_s_processors, __pyx_k_processors, sizeof(__pyx_k_processors), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, - {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, - {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, - {&__pyx_n_s_rowproxy_reconstructor, __pyx_k_rowproxy_reconstructor, sizeof(__pyx_k_rowproxy_reconstructor), 0, 0, 1, 1}, - {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_slots, __pyx_k_slots, sizeof(__pyx_k_slots), 0, 0, 1, 1}, - {&__pyx_n_s_sqlalchemy_engine__row_cy, __pyx_k_sqlalchemy_engine__row_cy, sizeof(__pyx_k_sqlalchemy_engine__row_cy), 0, 0, 1, 1}, - {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_to_tuple_instance, __pyx_k_to_tuple_instance, sizeof(__pyx_k_to_tuple_instance), 0, 0, 1, 1}, - {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, - {&__pyx_n_s_values_impl, __pyx_k_values_impl, sizeof(__pyx_k_values_impl), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} - }; - return __Pyx_InitStrings(__pyx_string_tab); -} -/* #### Code section: cached_builtins ### */ -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_AssertionError = __Pyx_GetBuiltinName(__pyx_n_s_AssertionError); if (!__pyx_builtin_AssertionError) __PYX_ERR(0, 144, __pyx_L1_error) - __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 145, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: cached_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "sqlalchemy/engine/_row_cy.py":36 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_is_compiled, 36, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 36, __pyx_L1_error) - - /* "sqlalchemy/engine/_row_cy.py":46 - * @cython.cclass - * class BaseRow: - * __slots__ = ("_parent", "_data", "_key_to_index") # <<<<<<<<<<<<<< - * - * if cython.compiled: - */ - __pyx_tuple__3 = PyTuple_Pack(3, __pyx_n_u_parent_2, __pyx_n_u_data_2, __pyx_n_u_key_to_index_2); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 46, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__3); - __Pyx_GIVEREF(__pyx_tuple__3); - - /* "sqlalchemy/engine/_row_cy.py":90 - * object.__setattr__(self, "_data", data) - * - * def __reduce__(self) -> Tuple[Any, Any]: # <<<<<<<<<<<<<< - * return ( - * rowproxy_reconstructor, - */ - __pyx_tuple__4 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 90, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__4); - __Pyx_GIVEREF(__pyx_tuple__4); - __pyx_codeobj__5 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_reduce, 90, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__5)) __PYX_ERR(0, 90, __pyx_L1_error) - - /* "sqlalchemy/engine/_row_cy.py":96 - * ) - * - * def __getstate__(self) -> Dict[str, Any]: # <<<<<<<<<<<<<< - * return {"_parent": self._parent, "_data": self._data} - * - */ - __pyx_codeobj__6 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_getstate, 96, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__6)) __PYX_ERR(0, 96, __pyx_L1_error) - - /* "sqlalchemy/engine/_row_cy.py":99 - * return {"_parent": self._parent, "_data": self._data} - * - * def __setstate__(self, state: Dict[str, Any]) -> None: # <<<<<<<<<<<<<< - * parent = state["_parent"] - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - */ - __pyx_tuple__7 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_parent); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__7); - __Pyx_GIVEREF(__pyx_tuple__7); - __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__7, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_setstate, 99, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(0, 99, __pyx_L1_error) - - /* "sqlalchemy/engine/_row_cy.py":103 - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - * - * def _values_impl(self) -> List[Any]: # <<<<<<<<<<<<<< - * return list(self._data) - * - */ - __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_values_impl, 103, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 103, __pyx_L1_error) - - /* "sqlalchemy/engine/_row_cy.py":118 - * return self._data[key] - * - * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: # <<<<<<<<<<<<<< - * return self._get_by_key_impl(key, False) - * - */ - __pyx_tuple__10 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_key); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 118, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__10); - __Pyx_GIVEREF(__pyx_tuple__10); - __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__10, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_get_by_key_impl_mapping, 118, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 118, __pyx_L1_error) - - /* "sqlalchemy/engine/_row_cy.py":132 - * return self._get_by_key_impl(name, True) - * - * def _to_tuple_instance(self) -> Tuple[Any, ...]: # <<<<<<<<<<<<<< - * return self._data - * - */ - __pyx_codeobj__12 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_to_tuple_instance, 132, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__12)) __PYX_ERR(0, 132, __pyx_L1_error) - - /* "sqlalchemy/engine/_row_cy.py":156 - * # Turn off annotation typing so the compiled version accepts the python - * # class too. - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def rowproxy_reconstructor( - * cls: Type[BaseRow], state: Dict[str, Any] - */ - __pyx_tuple__13 = PyTuple_Pack(3, __pyx_n_s_cls, __pyx_n_s_state, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(0, 156, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__13); - __Pyx_GIVEREF(__pyx_tuple__13); - __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__row_cy_py, __pyx_n_s_rowproxy_reconstructor, 156, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 156, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} -/* #### Code section: init_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { - __pyx_umethod_PyDict_Type_get.type = (PyObject*)&PyDict_Type; - __pyx_umethod_PyDict_Type_get.method_name = &__pyx_n_s_get; - if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: init_globals ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - /* AssertionsEnabled.init */ - if (likely(__Pyx_init_assertions_enabled() == 0)); else - -if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L1_error) - - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: init_module ### */ - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __pyx_vtabptr_10sqlalchemy_6engine_7_row_cy_BaseRow = &__pyx_vtable_10sqlalchemy_6engine_7_row_cy_BaseRow; - __pyx_vtable_10sqlalchemy_6engine_7_row_cy_BaseRow._set_attrs = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, PyObject *, PyObject *))__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__set_attrs; - __pyx_vtable_10sqlalchemy_6engine_7_row_cy_BaseRow._get_by_key_impl = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_6engine_7_row_cy_BaseRow *, PyObject *, int))__pyx_f_10sqlalchemy_6engine_7_row_cy_7BaseRow__get_by_key_impl; - #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_spec, NULL); if (unlikely(!__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow)) __PYX_ERR(0, 45, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow_spec, __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) - #else - __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow = &__pyx_type_10sqlalchemy_6engine_7_row_cy_BaseRow; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - #endif - #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) - #endif - #if PY_MAJOR_VERSION < 3 - __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow->tp_print = 0; - #endif - #if CYTHON_UPDATE_DESCRIPTOR_DOC - { - PyObject *wrapper = PyObject_GetAttrString((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, "__init__"); if (unlikely(!wrapper)) __PYX_ERR(0, 45, __pyx_L1_error) - if (__Pyx_IS_TYPE(wrapper, &PyWrapperDescr_Type)) { - __pyx_wrapperbase_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__ = *((PyWrapperDescrObject *)wrapper)->d_base; - __pyx_wrapperbase_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__.doc = __pyx_doc_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__; - ((PyWrapperDescrObject *)wrapper)->d_base = &__pyx_wrapperbase_10sqlalchemy_6engine_7_row_cy_7BaseRow___init__; - } - } - #endif - if (__Pyx_SetVtable(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_vtabptr_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_MergeVtables(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) - #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_BaseRow, (PyObject *) __pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow) < 0) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__row_cy(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__row_cy}, - {0, NULL} -}; -#endif - -#ifdef __cplusplus -namespace { - struct PyModuleDef __pyx_moduledef = - #else - static struct PyModuleDef __pyx_moduledef = - #endif - { - PyModuleDef_HEAD_INIT, - "_row_cy", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #elif CYTHON_USE_MODULE_STATE - sizeof(__pyx_mstate), /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - #if CYTHON_USE_MODULE_STATE - __pyx_m_traverse, /* m_traverse */ - __pyx_m_clear, /* m_clear */ - NULL /* m_free */ - #else - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ - #endif - }; - #ifdef __cplusplus -} /* anonymous namespace */ -#endif -#endif - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_row_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_row_cy(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__row_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__row_cy(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) -#else -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) -#endif -{ - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { -#if CYTHON_COMPILING_IN_LIMITED_API - result = PyModule_AddObject(module, to_name, value); -#else - result = PyDict_SetItemString(moddict, to_name, value); -#endif - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - CYTHON_UNUSED_VAR(def); - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - moddict = module; -#else - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; -#endif - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__row_cy(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - int stringtab_initialized = 0; - #if CYTHON_USE_MODULE_STATE - int pystate_addmodule_run = 0; - #endif - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_row_cy' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_row_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #elif CYTHON_USE_MODULE_STATE - __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - { - int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_row_cy" pseudovariable */ - if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - pystate_addmodule_run = 1; - } - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #endif - CYTHON_UNUSED_VAR(__pyx_t_1); - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__row_cy(void)", 0); - if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - PyEval_InitThreads(); - #endif - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - stringtab_initialized = 1; - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_sqlalchemy__engine___row_cy) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "sqlalchemy.engine._row_cy")) { - if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.engine._row_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_type_import_code(); - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "sqlalchemy/engine/_row_cy.py":10 - * from __future__ import annotations - * - * from typing import Any # <<<<<<<<<<<<<< - * from typing import Dict - * from typing import Iterator - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Any); - __Pyx_GIVEREF(__pyx_n_s_Any); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Any)) __PYX_ERR(0, 10, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Any); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":11 - * - * from typing import Any - * from typing import Dict # <<<<<<<<<<<<<< - * from typing import Iterator - * from typing import List - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Dict); - __Pyx_GIVEREF(__pyx_n_s_Dict); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Dict)) __PYX_ERR(0, 11, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Dict); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Dict, __pyx_t_3) < 0) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_row_cy.py":12 - * from typing import Any - * from typing import Dict - * from typing import Iterator # <<<<<<<<<<<<<< - * from typing import List - * from typing import Optional - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Iterator); - __Pyx_GIVEREF(__pyx_n_s_Iterator); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Iterator)) __PYX_ERR(0, 12, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Iterator); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Iterator, __pyx_t_2) < 0) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":13 - * from typing import Dict - * from typing import Iterator - * from typing import List # <<<<<<<<<<<<<< - * from typing import Optional - * from typing import Sequence - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_List); - __Pyx_GIVEREF(__pyx_n_s_List); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_List)) __PYX_ERR(0, 13, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_List); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_List, __pyx_t_3) < 0) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_row_cy.py":14 - * from typing import Iterator - * from typing import List - * from typing import Optional # <<<<<<<<<<<<<< - * from typing import Sequence - * from typing import Tuple - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Optional); - __Pyx_GIVEREF(__pyx_n_s_Optional); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 14, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Optional); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_2) < 0) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":15 - * from typing import List - * from typing import Optional - * from typing import Sequence # <<<<<<<<<<<<<< - * from typing import Tuple - * from typing import Type - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Sequence); - __Pyx_GIVEREF(__pyx_n_s_Sequence); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Sequence)) __PYX_ERR(0, 15, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Sequence); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Sequence, __pyx_t_3) < 0) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_row_cy.py":16 - * from typing import Optional - * from typing import Sequence - * from typing import Tuple # <<<<<<<<<<<<<< - * from typing import Type - * from typing import TYPE_CHECKING - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Tuple); - __Pyx_GIVEREF(__pyx_n_s_Tuple); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Tuple)) __PYX_ERR(0, 16, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Tuple); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Tuple, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":17 - * from typing import Sequence - * from typing import Tuple - * from typing import Type # <<<<<<<<<<<<<< - * from typing import TYPE_CHECKING - * - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Type); - __Pyx_GIVEREF(__pyx_n_s_Type); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Type)) __PYX_ERR(0, 17, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Type); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Type, __pyx_t_3) < 0) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_row_cy.py":18 - * from typing import Tuple - * from typing import Type - * from typing import TYPE_CHECKING # <<<<<<<<<<<<<< - * - * if TYPE_CHECKING: - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_TYPE_CHECKING); - __Pyx_GIVEREF(__pyx_n_s_TYPE_CHECKING); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_TYPE_CHECKING)) __PYX_ERR(0, 18, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_TYPE_CHECKING, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":20 - * from typing import TYPE_CHECKING - * - * if TYPE_CHECKING: # <<<<<<<<<<<<<< - * from .result import _KeyType - * from .result import _ProcessorsType - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_4 < 0))) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "sqlalchemy/engine/_row_cy.py":21 - * - * if TYPE_CHECKING: - * from .result import _KeyType # <<<<<<<<<<<<<< - * from .result import _ProcessorsType - * from .result import ResultMetaData - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_KeyType); - __Pyx_GIVEREF(__pyx_n_s_KeyType); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_KeyType)) __PYX_ERR(0, 21, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_result, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_KeyType); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_KeyType, __pyx_t_3) < 0) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_row_cy.py":22 - * if TYPE_CHECKING: - * from .result import _KeyType - * from .result import _ProcessorsType # <<<<<<<<<<<<<< - * from .result import ResultMetaData - * - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_ProcessorsType); - __Pyx_GIVEREF(__pyx_n_s_ProcessorsType); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_ProcessorsType)) __PYX_ERR(0, 22, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_result, __pyx_t_2, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_ProcessorsType); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_ProcessorsType, __pyx_t_2) < 0) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":23 - * from .result import _KeyType - * from .result import _ProcessorsType - * from .result import ResultMetaData # <<<<<<<<<<<<<< - * - * # START GENERATED CYTHON IMPORT - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_ResultMetaData); - __Pyx_GIVEREF(__pyx_n_s_ResultMetaData); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_ResultMetaData)) __PYX_ERR(0, 23, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_result, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ResultMetaData); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_ResultMetaData, __pyx_t_3) < 0) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_row_cy.py":20 - * from typing import TYPE_CHECKING - * - * if TYPE_CHECKING: # <<<<<<<<<<<<<< - * from .result import _KeyType - * from .result import _ProcessorsType - */ - } - - /* "sqlalchemy/engine/_row_cy.py":27 - * # START GENERATED CYTHON IMPORT - * # This section is automatically generated by the script tools/cython_imports.py - * try: # <<<<<<<<<<<<<< - * # NOTE: the cython compiler needs this "import cython" in the file, it - * # can't be only "from sqlalchemy.util import cython" with the fallback - */ - { - (void)__pyx_t_1; (void)__pyx_t_5; (void)__pyx_t_6; /* mark used */ - /*try:*/ { - - /* "sqlalchemy/engine/_row_cy.py":31 - * # can't be only "from sqlalchemy.util import cython" with the fallback - * # in that module - * import cython # <<<<<<<<<<<<<< - * except ModuleNotFoundError: - * from sqlalchemy.util import cython - */ - } - } - - /* "sqlalchemy/engine/_row_cy.py":36 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 36, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 36, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__2)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 36, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(0, 36, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_row_cy.py":46 - * @cython.cclass - * class BaseRow: - * __slots__ = ("_parent", "_data", "_key_to_index") # <<<<<<<<<<<<<< - * - * if cython.compiled: - */ - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_slots, __pyx_tuple__3) < 0) __PYX_ERR(0, 46, __pyx_L1_error) - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - - /* "sqlalchemy/engine/_row_cy.py":90 - * object.__setattr__(self, "_data", data) - * - * def __reduce__(self) -> Tuple[Any, Any]: # <<<<<<<<<<<<<< - * return ( - * rowproxy_reconstructor, - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 90, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_Tuple_Any_Any) < 0) __PYX_ERR(0, 90, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_3__reduce__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow___reduce, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__5)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 90, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_reduce, __pyx_t_2) < 0) __PYX_ERR(0, 90, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - - /* "sqlalchemy/engine/_row_cy.py":96 - * ) - * - * def __getstate__(self) -> Dict[str, Any]: # <<<<<<<<<<<<<< - * return {"_parent": self._parent, "_data": self._data} - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Dict_str_Any) < 0) __PYX_ERR(0, 96, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_5__getstate__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow___getstate, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__6)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_getstate, __pyx_t_3) < 0) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - - /* "sqlalchemy/engine/_row_cy.py":99 - * return {"_parent": self._parent, "_data": self._data} - * - * def __setstate__(self, state: Dict[str, Any]) -> None: # <<<<<<<<<<<<<< - * parent = state["_parent"] - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_state, __pyx_kp_s_Dict_str_Any) < 0) __PYX_ERR(0, 99, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 99, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_7__setstate__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow___setstate, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__8)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_setstate, __pyx_t_2) < 0) __PYX_ERR(0, 99, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - - /* "sqlalchemy/engine/_row_cy.py":103 - * self._set_attrs(parent, parent._key_to_index, state["_data"]) - * - * def _values_impl(self) -> List[Any]: # <<<<<<<<<<<<<< - * return list(self._data) - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 103, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_List_Any) < 0) __PYX_ERR(0, 103, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_9_values_impl, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow__values_impl, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__9)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 103, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_values_impl, __pyx_t_3) < 0) __PYX_ERR(0, 103, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - - /* "sqlalchemy/engine/_row_cy.py":118 - * return self._data[key] - * - * def _get_by_key_impl_mapping(self, key: _KeyType) -> Any: # <<<<<<<<<<<<<< - * return self._get_by_key_impl(key, False) - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 118, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_KeyType) < 0) __PYX_ERR(0, 118, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_Any) < 0) __PYX_ERR(0, 118, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_19_get_by_key_impl_mapping, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow__get_by_key_impl_mapping, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__11)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 118, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_get_by_key_impl_mapping, __pyx_t_2) < 0) __PYX_ERR(0, 118, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - - /* "sqlalchemy/engine/_row_cy.py":132 - * return self._get_by_key_impl(name, True) - * - * def _to_tuple_instance(self) -> Tuple[Any, ...]: # <<<<<<<<<<<<<< - * return self._data - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 132, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Tuple_Any) < 0) __PYX_ERR(0, 132, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_7BaseRow_23_to_tuple_instance, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_BaseRow__to_tuple_instance, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__12)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 132, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow, __pyx_n_s_to_tuple_instance, __pyx_t_3) < 0) __PYX_ERR(0, 132, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_6engine_7_row_cy_BaseRow); - - /* "sqlalchemy/engine/_row_cy.py":156 - * # Turn off annotation typing so the compiled version accepts the python - * # class too. - * @cython.annotation_typing(False) # <<<<<<<<<<<<<< - * def rowproxy_reconstructor( - * cls: Type[BaseRow], state: Dict[str, Any] - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 156, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_cls, __pyx_kp_s_Type_BaseRow) < 0) __PYX_ERR(0, 156, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_state, __pyx_kp_s_Dict_str_Any) < 0) __PYX_ERR(0, 156, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_BaseRow) < 0) __PYX_ERR(0, 156, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_7_row_cy_3rowproxy_reconstructor, 0, __pyx_n_s_rowproxy_reconstructor, NULL, __pyx_n_s_sqlalchemy_engine__row_cy, __pyx_d, ((PyObject *)__pyx_codeobj__14)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 156, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_rowproxy_reconstructor, __pyx_t_2) < 0) __PYX_ERR(0, 156, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_row_cy.py":1 - * # engine/_row_cy.py # <<<<<<<<<<<<<< - * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors - * # - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - if (__pyx_m) { - if (__pyx_d && stringtab_initialized) { - __Pyx_AddTraceback("init sqlalchemy.engine._row_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - #if !CYTHON_USE_MODULE_STATE - Py_CLEAR(__pyx_m); - #else - Py_DECREF(__pyx_m); - if (pystate_addmodule_run) { - PyObject *tp, *value, *tb; - PyErr_Fetch(&tp, &value, &tb); - PyState_RemoveModule(&__pyx_moduledef); - PyErr_Restore(tp, value, tb); - } - #endif - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init sqlalchemy.engine._row_cy"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} -/* #### Code section: cleanup_globals ### */ -/* #### Code section: cleanup_module ### */ -/* #### Code section: main_method ### */ -/* #### Code section: utility_code_pragmas ### */ -#ifdef _MSC_VER -#pragma warning( push ) -/* Warning 4127: conditional expression is constant - * Cython uses constant conditional expressions to allow in inline functions to be optimized at - * compile-time, so this warning is not useful - */ -#pragma warning( disable : 4127 ) -#endif - - - -/* #### Code section: utility_code_def ### */ - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030C00A6 - PyObject *current_exception = tstate->current_exception; - if (unlikely(!current_exception)) return 0; - exc_type = (PyObject*) Py_TYPE(current_exception); - if (exc_type == err) return 1; -#else - exc_type = tstate->curexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; -#endif - #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(exc_type); - #endif - if (unlikely(PyTuple_Check(err))) { - result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - } else { - result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); - } - #if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(exc_type); - #endif - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject *tmp_value; - assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); - if (value) { - #if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) - #endif - PyException_SetTraceback(value, tb); - } - tmp_value = tstate->current_exception; - tstate->current_exception = value; - Py_XDECREF(tmp_value); - Py_XDECREF(type); - Py_XDECREF(tb); -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#endif -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject* exc_value; - exc_value = tstate->current_exception; - tstate->current_exception = 0; - *value = exc_value; - *type = NULL; - *tb = NULL; - if (exc_value) { - *type = (PyObject*) Py_TYPE(exc_value); - Py_INCREF(*type); - #if CYTHON_COMPILING_IN_CPYTHON - *tb = ((PyBaseExceptionObject*) exc_value)->traceback; - Py_XINCREF(*tb); - #else - *tb = PyException_GetTraceback(exc_value); - #endif - } -#else - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#endif -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* PyObjectGetAttrStrNoError */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - (void) PyObject_GetOptionalAttr(obj, attr_name, &result); - return result; -#else -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -#endif -} - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); - if (unlikely(!result) && !PyErr_Occurred()) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* TupleAndListFromArray */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { - PyObject *v; - Py_ssize_t i; - for (i = 0; i < length; i++) { - v = dest[i] = src[i]; - Py_INCREF(v); - } -} -static CYTHON_INLINE PyObject * -__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - Py_INCREF(__pyx_empty_tuple); - return __pyx_empty_tuple; - } - res = PyTuple_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); - return res; -} -static CYTHON_INLINE PyObject * -__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - return PyList_New(0); - } - res = PyList_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); - return res; -} -#endif - -/* BytesEquals */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else - if (s1 == s2) { - return (equals == Py_EQ); - } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { - const char *ps1, *ps2; - Py_ssize_t length = PyBytes_GET_SIZE(s1); - if (length != PyBytes_GET_SIZE(s2)) - return (equals == Py_NE); - ps1 = PyBytes_AS_STRING(s1); - ps2 = PyBytes_AS_STRING(s2); - if (ps1[0] != ps2[0]) { - return (equals == Py_NE); - } else if (length == 1) { - return (equals == Py_EQ); - } else { - int result; -#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) - Py_hash_t hash1, hash2; - hash1 = ((PyBytesObject*)s1)->ob_shash; - hash2 = ((PyBytesObject*)s2)->ob_shash; - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - return (equals == Py_NE); - } -#endif - result = memcmp(ps1, ps2, (size_t)length); - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { - return (equals == Py_NE); - } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { - return (equals == Py_NE); - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -#endif -} - -/* UnicodeEquals */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else -#if PY_MAJOR_VERSION < 3 - PyObject* owned_ref = NULL; -#endif - int s1_is_unicode, s2_is_unicode; - if (s1 == s2) { - goto return_eq; - } - s1_is_unicode = PyUnicode_CheckExact(s1); - s2_is_unicode = PyUnicode_CheckExact(s2); -#if PY_MAJOR_VERSION < 3 - if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { - owned_ref = PyUnicode_FromObject(s2); - if (unlikely(!owned_ref)) - return -1; - s2 = owned_ref; - s2_is_unicode = 1; - } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { - owned_ref = PyUnicode_FromObject(s1); - if (unlikely(!owned_ref)) - return -1; - s1 = owned_ref; - s1_is_unicode = 1; - } else if (((!s2_is_unicode) & (!s1_is_unicode))) { - return __Pyx_PyBytes_Equals(s1, s2, equals); - } -#endif - if (s1_is_unicode & s2_is_unicode) { - Py_ssize_t length; - int kind; - void *data1, *data2; - if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) - return -1; - length = __Pyx_PyUnicode_GET_LENGTH(s1); - if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { - goto return_ne; - } -#if CYTHON_USE_UNICODE_INTERNALS - { - Py_hash_t hash1, hash2; - #if CYTHON_PEP393_ENABLED - hash1 = ((PyASCIIObject*)s1)->hash; - hash2 = ((PyASCIIObject*)s2)->hash; - #else - hash1 = ((PyUnicodeObject*)s1)->hash; - hash2 = ((PyUnicodeObject*)s2)->hash; - #endif - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - goto return_ne; - } - } -#endif - kind = __Pyx_PyUnicode_KIND(s1); - if (kind != __Pyx_PyUnicode_KIND(s2)) { - goto return_ne; - } - data1 = __Pyx_PyUnicode_DATA(s1); - data2 = __Pyx_PyUnicode_DATA(s2); - if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { - goto return_ne; - } else if (length == 1) { - goto return_eq; - } else { - int result = memcmp(data1, data2, (size_t)(length * kind)); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & s2_is_unicode) { - goto return_ne; - } else if ((s2 == Py_None) & s1_is_unicode) { - goto return_ne; - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -return_eq: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ); -return_ne: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_NE); -#endif -} - -/* fastcall */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) -{ - Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); - for (i = 0; i < n; i++) - { - if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; - } - for (i = 0; i < n; i++) - { - int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); - if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; - return kwvalues[i]; - } - } - return NULL; -} -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { - Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); - PyObject *dict; - dict = PyDict_New(); - if (unlikely(!dict)) - return NULL; - for (i=0; i= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); - while (1) { - Py_XDECREF(key); key = NULL; - Py_XDECREF(value); value = NULL; - if (kwds_is_tuple) { - Py_ssize_t size; -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(kwds); -#else - size = PyTuple_Size(kwds); - if (size < 0) goto bad; -#endif - if (pos >= size) break; -#if CYTHON_AVOID_BORROWED_REFS - key = __Pyx_PySequence_ITEM(kwds, pos); - if (!key) goto bad; -#elif CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kwds, pos); -#else - key = PyTuple_GetItem(kwds, pos); - if (!key) goto bad; -#endif - value = kwvalues[pos]; - pos++; - } - else - { - if (!PyDict_Next(kwds, &pos, &key, &value)) break; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - } - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); - Py_DECREF(key); -#endif - key = NULL; - value = NULL; - continue; - } -#if !CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - Py_INCREF(value); - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = ( - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key) - ); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - Py_XDECREF(key); - Py_XDECREF(value); - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - Py_XDECREF(key); - Py_XDECREF(value); - return -1; -} - -/* ArgTypeTest */ -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) -{ - __Pyx_TypeName type_name; - __Pyx_TypeName obj_type_name; - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - else if (exact) { - #if PY_MAJOR_VERSION == 2 - if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; - #endif - } - else { - if (likely(__Pyx_TypeCheck(obj, type))) return 1; - } - type_name = __Pyx_PyType_GetName(type); - obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, - "Argument '%.200s' has incorrect type (expected " __Pyx_FMT_TYPENAME - ", got " __Pyx_FMT_TYPENAME ")", name, type_name, obj_type_name); - __Pyx_DECREF_TypeName(type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* KeywordStringCheck */ -static int __Pyx_CheckKeywordStrings( - PyObject *kw, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; -#if CYTHON_COMPILING_IN_PYPY - if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) - goto invalid_keyword; - return 1; -#else - if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { - Py_ssize_t kwsize; -#if CYTHON_ASSUME_SAFE_MACROS - kwsize = PyTuple_GET_SIZE(kw); -#else - kwsize = PyTuple_Size(kw); - if (kwsize < 0) return 0; -#endif - if (unlikely(kwsize == 0)) - return 1; - if (!kw_allowed) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, 0); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - goto invalid_keyword; - } -#if PY_VERSION_HEX < 0x03090000 - for (pos = 0; pos < kwsize; pos++) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, pos); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } -#endif - return 1; - } - while (PyDict_Next(kw, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_Check(key))) - #endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } - if (!kw_allowed && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - return 0; -#endif -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#elif CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(!__pyx_m)) { - return NULL; - } - result = PyObject_GetAttr(__pyx_m, name); - if (likely(result)) { - return result; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { - return NULL; - } - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { - return NULL; - } - #endif - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = Py_TYPE(func)->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); - self = __Pyx_CyOrPyCFunction_GET_SELF(func); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectFastCall */ -#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API -static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { - PyObject *argstuple; - PyObject *result = 0; - size_t i; - argstuple = PyTuple_New((Py_ssize_t)nargs); - if (unlikely(!argstuple)) return NULL; - for (i = 0; i < nargs; i++) { - Py_INCREF(args[i]); - if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; - } - result = __Pyx_PyObject_Call(func, argstuple, kwargs); - bad: - Py_DECREF(argstuple); - return result; -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { - Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); -#if CYTHON_COMPILING_IN_CPYTHON - if (nargs == 0 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) - return __Pyx_PyObject_CallMethO(func, NULL); - } - else if (nargs == 1 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) - return __Pyx_PyObject_CallMethO(func, args[0]); - } -#endif - #if PY_VERSION_HEX < 0x030800B1 - #if CYTHON_FAST_PYCCALL - if (PyCFunction_Check(func)) { - if (kwargs) { - return _PyCFunction_FastCallDict(func, args, nargs, kwargs); - } else { - return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); - } - } - #if PY_VERSION_HEX >= 0x030700A1 - if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { - return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); - } - #endif - #endif - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); - } - #endif - #endif - if (kwargs == NULL) { - #if CYTHON_VECTORCALL - #if PY_VERSION_HEX < 0x03090000 - vectorcallfunc f = _PyVectorcall_Function(func); - #else - vectorcallfunc f = PyVectorcall_Function(func); - #endif - if (f) { - return f(func, args, (size_t)nargs, NULL); - } - #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL - if (__Pyx_CyFunction_CheckExact(func)) { - __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); - if (f) return f(func, args, (size_t)nargs, NULL); - } - #endif - } - if (nargs == 0) { - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); - } - #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API - return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); - #else - return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); - #endif -} - -/* DictGetItem */ -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY -static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { - PyObject *value; - value = PyDict_GetItemWithError(d, key); - if (unlikely(!value)) { - if (!PyErr_Occurred()) { - if (unlikely(PyTuple_Check(key))) { - PyObject* args = PyTuple_Pack(1, key); - if (likely(args)) { - PyErr_SetObject(PyExc_KeyError, args); - Py_DECREF(args); - } - } else { - PyErr_SetObject(PyExc_KeyError, key); - } - } - return NULL; - } - Py_INCREF(value); - return value; -} -#endif - -/* RaiseUnexpectedTypeError */ -static int -__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) -{ - __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, - expected, obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (unlikely(!j)) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; - PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; - if (mm && mm->mp_subscript) { - PyObject *r, *key = PyInt_FromSsize_t(i); - if (unlikely(!key)) return NULL; - r = mm->mp_subscript(o, key); - Py_DECREF(key); - return r; - } - if (likely(sm && sm->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { - Py_ssize_t l = sm->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return sm->sq_item(o, i); - } - } -#else - if (is_list || !PyMapping_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* PyObjectCallOneArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *args[2] = {NULL, arg}; - return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* ObjectGetItem */ -#if CYTHON_USE_TYPE_SLOTS -static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject *index) { - PyObject *runerr = NULL; - Py_ssize_t key_value; - key_value = __Pyx_PyIndex_AsSsize_t(index); - if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) { - return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1); - } - if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { - __Pyx_TypeName index_type_name = __Pyx_PyType_GetName(Py_TYPE(index)); - PyErr_Clear(); - PyErr_Format(PyExc_IndexError, - "cannot fit '" __Pyx_FMT_TYPENAME "' into an index-sized integer", index_type_name); - __Pyx_DECREF_TypeName(index_type_name); - } - return NULL; -} -static PyObject *__Pyx_PyObject_GetItem_Slow(PyObject *obj, PyObject *key) { - __Pyx_TypeName obj_type_name; - if (likely(PyType_Check(obj))) { - PyObject *meth = __Pyx_PyObject_GetAttrStrNoError(obj, __pyx_n_s_class_getitem); - if (!meth) { - PyErr_Clear(); - } else { - PyObject *result = __Pyx_PyObject_CallOneArg(meth, key); - Py_DECREF(meth); - return result; - } - } - obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, - "'" __Pyx_FMT_TYPENAME "' object is not subscriptable", obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return NULL; -} -static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject *key) { - PyTypeObject *tp = Py_TYPE(obj); - PyMappingMethods *mm = tp->tp_as_mapping; - PySequenceMethods *sm = tp->tp_as_sequence; - if (likely(mm && mm->mp_subscript)) { - return mm->mp_subscript(obj, key); - } - if (likely(sm && sm->sq_item)) { - return __Pyx_PyObject_GetIndex(obj, key); - } - return __Pyx_PyObject_GetItem_Slow(obj, key); -} -#endif - -/* UnpackUnboundCMethod */ -static PyObject *__Pyx_SelflessCall(PyObject *method, PyObject *args, PyObject *kwargs) { - PyObject *result; - PyObject *selfless_args = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); - if (unlikely(!selfless_args)) return NULL; - result = PyObject_Call(method, selfless_args, kwargs); - Py_DECREF(selfless_args); - return result; -} -static PyMethodDef __Pyx_UnboundCMethod_Def = { - "CythonUnboundCMethod", - __PYX_REINTERPRET_FUNCION(PyCFunction, __Pyx_SelflessCall), - METH_VARARGS | METH_KEYWORDS, - NULL -}; -static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { - PyObject *method; - method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); - if (unlikely(!method)) - return -1; - target->method = method; -#if CYTHON_COMPILING_IN_CPYTHON - #if PY_MAJOR_VERSION >= 3 - if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) - #else - if (likely(!__Pyx_CyOrPyCFunction_Check(method))) - #endif - { - PyMethodDescrObject *descr = (PyMethodDescrObject*) method; - target->func = descr->d_method->ml_meth; - target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); - } else -#endif -#if CYTHON_COMPILING_IN_PYPY -#else - if (PyCFunction_Check(method)) -#endif - { - PyObject *self; - int self_found; -#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY - self = PyObject_GetAttrString(method, "__self__"); - if (!self) { - PyErr_Clear(); - } -#else - self = PyCFunction_GET_SELF(method); -#endif - self_found = (self && self != Py_None); -#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY - Py_XDECREF(self); -#endif - if (self_found) { - PyObject *unbound_method = PyCFunction_New(&__Pyx_UnboundCMethod_Def, method); - if (unlikely(!unbound_method)) return -1; - Py_DECREF(method); - target->method = unbound_method; - } - } - return 0; -} - -/* CallUnboundCMethod1 */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { - if (likely(cfunc->func)) { - int flag = cfunc->flag; - if (flag == METH_O) { - return (*(cfunc->func))(self, arg); - } else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) { - #if PY_VERSION_HEX >= 0x030700A0 - return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); - #else - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - #endif - } else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) { - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - } - } - return __Pyx__CallUnboundCMethod1(cfunc, self, arg); -} -#endif -static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ - PyObject *args, *result = NULL; - if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; -#if CYTHON_COMPILING_IN_CPYTHON - if (cfunc->func && (cfunc->flag & METH_VARARGS)) { - args = PyTuple_New(1); - if (unlikely(!args)) goto bad; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - if (cfunc->flag & METH_KEYWORDS) - result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); - else - result = (*cfunc->func)(self, args); - } else { - args = PyTuple_New(2); - if (unlikely(!args)) goto bad; - Py_INCREF(self); - PyTuple_SET_ITEM(args, 0, self); - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 1, arg); - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); - } -#else - args = PyTuple_Pack(2, self, arg); - if (unlikely(!args)) goto bad; - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); -#endif -bad: - Py_XDECREF(args); - return result; -} - -/* CallUnboundCMethod2 */ -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 -static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) { - if (likely(cfunc->func)) { - PyObject *args[2] = {arg1, arg2}; - if (cfunc->flag == METH_FASTCALL) { - #if PY_VERSION_HEX >= 0x030700A0 - return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, args, 2); - #else - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); - #endif - } - #if PY_VERSION_HEX >= 0x030700A0 - if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS)) - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); - #endif - } - return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2); -} -#endif -static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){ - PyObject *args, *result = NULL; - if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; -#if CYTHON_COMPILING_IN_CPYTHON - if (cfunc->func && (cfunc->flag & METH_VARARGS)) { - args = PyTuple_New(2); - if (unlikely(!args)) goto bad; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - if (cfunc->flag & METH_KEYWORDS) - result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); - else - result = (*cfunc->func)(self, args); - } else { - args = PyTuple_New(3); - if (unlikely(!args)) goto bad; - Py_INCREF(self); - PyTuple_SET_ITEM(args, 0, self); - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 1, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 2, arg2); - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); - } -#else - args = PyTuple_Pack(3, self, arg1, arg2); - if (unlikely(!args)) goto bad; - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); -#endif -bad: - Py_XDECREF(args); - return result; -} - -/* dict_getitem_default */ -static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { - PyObject* value; -#if PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) - value = PyDict_GetItemWithError(d, key); - if (unlikely(!value)) { - if (unlikely(PyErr_Occurred())) - return NULL; - value = default_value; - } - Py_INCREF(value); - if ((1)); -#else - if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { - value = PyDict_GetItem(d, key); - if (unlikely(!value)) { - value = default_value; - } - Py_INCREF(value); - } -#endif - else { - if (default_value == Py_None) - value = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_get, d, key); - else - value = __Pyx_CallUnboundCMethod2(&__pyx_umethod_PyDict_Type_get, d, key, default_value); - } - return value; -} - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - __Pyx_PyThreadState_declare - CYTHON_UNUSED_VAR(cause); - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { - #if PY_VERSION_HEX >= 0x030C00A6 - PyException_SetTraceback(value, tb); - #elif CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* SetItemInt */ -static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v) { - int r; - if (unlikely(!j)) return -1; - r = PyObject_SetItem(o, j, v); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v, int is_list, - CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = (!wraparound) ? i : ((likely(i >= 0)) ? i : i + PyList_GET_SIZE(o)); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o)))) { - PyObject* old = PyList_GET_ITEM(o, n); - Py_INCREF(v); - PyList_SET_ITEM(o, n, v); - Py_DECREF(old); - return 1; - } - } else { - PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; - PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; - if (mm && mm->mp_ass_subscript) { - int r; - PyObject *key = PyInt_FromSsize_t(i); - if (unlikely(!key)) return -1; - r = mm->mp_ass_subscript(o, key, v); - Py_DECREF(key); - return r; - } - if (likely(sm && sm->sq_ass_item)) { - if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { - Py_ssize_t l = sm->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return -1; - PyErr_Clear(); - } - } - return sm->sq_ass_item(o, i, v); - } - } -#else - if (is_list || !PyMapping_Check(o)) - { - return PySequence_SetItem(o, i, v); - } -#endif - return __Pyx_SetItemInt_Generic(o, PyInt_FromSsize_t(i), v); -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, attr_name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(attr_name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* FixUpExtensionType */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { -#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - CYTHON_UNUSED_VAR(spec); - CYTHON_UNUSED_VAR(type); -#else - const PyType_Slot *slot = spec->slots; - while (slot && slot->slot && slot->slot != Py_tp_members) - slot++; - if (slot && slot->slot == Py_tp_members) { - int changed = 0; -#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) - const -#endif - PyMemberDef *memb = (PyMemberDef*) slot->pfunc; - while (memb && memb->name) { - if (memb->name[0] == '_' && memb->name[1] == '_') { -#if PY_VERSION_HEX < 0x030900b1 - if (strcmp(memb->name, "__weaklistoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_weaklistoffset = memb->offset; - changed = 1; - } - else if (strcmp(memb->name, "__dictoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_dictoffset = memb->offset; - changed = 1; - } -#if CYTHON_METH_FASTCALL - else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); -#if PY_VERSION_HEX >= 0x030800b4 - type->tp_vectorcall_offset = memb->offset; -#else - type->tp_print = (printfunc) memb->offset; -#endif - changed = 1; - } -#endif -#else - if ((0)); -#endif -#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON - else if (strcmp(memb->name, "__module__") == 0) { - PyObject *descr; - assert(memb->type == T_OBJECT); - assert(memb->flags == 0 || memb->flags == READONLY); - descr = PyDescr_NewMember(type, memb); - if (unlikely(!descr)) - return -1; - if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { - Py_DECREF(descr); - return -1; - } - Py_DECREF(descr); - changed = 1; - } -#endif - } - memb++; - } - if (changed) - PyType_Modified(type); - } -#endif - return 0; -} -#endif - -/* PyObjectCallNoArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { - PyObject *arg[2] = {NULL, NULL}; - return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - __Pyx_TypeName type_name; - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR - if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) -#elif PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (likely(descr != NULL)) { - *method = descr; - return 0; - } - type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod0 */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { - PyObject *method = NULL, *result = NULL; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_CallOneArg(method, obj); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) goto bad; - result = __Pyx_PyObject_CallNoArg(method); - Py_DECREF(method); -bad: - return result; -} - -/* ValidateBasesTuple */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { - Py_ssize_t i, n; -#if CYTHON_ASSUME_SAFE_MACROS - n = PyTuple_GET_SIZE(bases); -#else - n = PyTuple_Size(bases); - if (n < 0) return -1; -#endif - for (i = 1; i < n; i++) - { -#if CYTHON_AVOID_BORROWED_REFS - PyObject *b0 = PySequence_GetItem(bases, i); - if (!b0) return -1; -#elif CYTHON_ASSUME_SAFE_MACROS - PyObject *b0 = PyTuple_GET_ITEM(bases, i); -#else - PyObject *b0 = PyTuple_GetItem(bases, i); - if (!b0) return -1; -#endif - PyTypeObject *b; -#if PY_MAJOR_VERSION < 3 - if (PyClass_Check(b0)) - { - PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", - PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } -#endif - b = (PyTypeObject*) b0; - if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); - __Pyx_DECREF_TypeName(b_name); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - if (dictoffset == 0) - { - Py_ssize_t b_dictoffset = 0; -#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY - b_dictoffset = b->tp_dictoffset; -#else - PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); - if (!py_b_dictoffset) goto dictoffset_return; - b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); - Py_DECREF(py_b_dictoffset); - if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; -#endif - if (b_dictoffset) { - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "extension type '%.200s' has no __dict__ slot, " - "but base type '" __Pyx_FMT_TYPENAME "' has: " - "either add 'cdef dict __dict__' to the extension type " - "or add '__slots__ = [...]' to the base type", - type_name, b_name); - __Pyx_DECREF_TypeName(b_name); - } -#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) - dictoffset_return: -#endif -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - } -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - } - return 0; -} -#endif - -/* PyType_Ready */ -static int __Pyx_PyType_Ready(PyTypeObject *t) { -#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) - (void)__Pyx_PyObject_CallMethod0; -#if CYTHON_USE_TYPE_SPECS - (void)__Pyx_validate_bases_tuple; -#endif - return PyType_Ready(t); -#else - int r; - PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); - if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) - return -1; -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - { - int gc_was_enabled; - #if PY_VERSION_HEX >= 0x030A00b1 - gc_was_enabled = PyGC_Disable(); - (void)__Pyx_PyObject_CallMethod0; - #else - PyObject *ret, *py_status; - PyObject *gc = NULL; - #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) - gc = PyImport_GetModule(__pyx_kp_u_gc); - #endif - if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); - if (unlikely(!gc)) return -1; - py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); - if (unlikely(!py_status)) { - Py_DECREF(gc); - return -1; - } - gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); - Py_DECREF(py_status); - if (gc_was_enabled > 0) { - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); - if (unlikely(!ret)) { - Py_DECREF(gc); - return -1; - } - Py_DECREF(ret); - } else if (unlikely(gc_was_enabled == -1)) { - Py_DECREF(gc); - return -1; - } - #endif - t->tp_flags |= Py_TPFLAGS_HEAPTYPE; -#if PY_VERSION_HEX >= 0x030A0000 - t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; -#endif -#else - (void)__Pyx_PyObject_CallMethod0; -#endif - r = PyType_Ready(t); -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; - #if PY_VERSION_HEX >= 0x030A00b1 - if (gc_was_enabled) - PyGC_Enable(); - #else - if (gc_was_enabled) { - PyObject *tp, *v, *tb; - PyErr_Fetch(&tp, &v, &tb); - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); - if (likely(ret || r == -1)) { - Py_XDECREF(ret); - PyErr_Restore(tp, v, tb); - } else { - Py_XDECREF(tp); - Py_XDECREF(v); - Py_XDECREF(tb); - r = -1; - } - } - Py_DECREF(gc); - #endif - } -#endif - return r; -#endif -} - -/* SetVTable */ -static int __Pyx_SetVtable(PyTypeObject *type, void *vtable) { - PyObject *ob = PyCapsule_New(vtable, 0, 0); - if (unlikely(!ob)) - goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(PyObject_SetAttr((PyObject *) type, __pyx_n_s_pyx_vtable, ob) < 0)) -#else - if (unlikely(PyDict_SetItem(type->tp_dict, __pyx_n_s_pyx_vtable, ob) < 0)) -#endif - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -/* GetVTable */ -static void* __Pyx_GetVtable(PyTypeObject *type) { - void* ptr; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *ob = PyObject_GetAttr((PyObject *)type, __pyx_n_s_pyx_vtable); -#else - PyObject *ob = PyObject_GetItem(type->tp_dict, __pyx_n_s_pyx_vtable); -#endif - if (!ob) - goto bad; - ptr = PyCapsule_GetPointer(ob, 0); - if (!ptr && !PyErr_Occurred()) - PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); - Py_DECREF(ob); - return ptr; -bad: - Py_XDECREF(ob); - return NULL; -} - -/* MergeVTables */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_MergeVtables(PyTypeObject *type) { - int i; - void** base_vtables; - __Pyx_TypeName tp_base_name; - __Pyx_TypeName base_name; - void* unknown = (void*)-1; - PyObject* bases = type->tp_bases; - int base_depth = 0; - { - PyTypeObject* base = type->tp_base; - while (base) { - base_depth += 1; - base = base->tp_base; - } - } - base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1)); - base_vtables[0] = unknown; - for (i = 1; i < PyTuple_GET_SIZE(bases); i++) { - void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))); - if (base_vtable != NULL) { - int j; - PyTypeObject* base = type->tp_base; - for (j = 0; j < base_depth; j++) { - if (base_vtables[j] == unknown) { - base_vtables[j] = __Pyx_GetVtable(base); - base_vtables[j + 1] = unknown; - } - if (base_vtables[j] == base_vtable) { - break; - } else if (base_vtables[j] == NULL) { - goto bad; - } - base = base->tp_base; - } - } - } - PyErr_Clear(); - free(base_vtables); - return 0; -bad: - tp_base_name = __Pyx_PyType_GetName(type->tp_base); - base_name = __Pyx_PyType_GetName((PyTypeObject*)PyTuple_GET_ITEM(bases, i)); - PyErr_Format(PyExc_TypeError, - "multiple bases have vtable conflict: '" __Pyx_FMT_TYPENAME "' and '" __Pyx_FMT_TYPENAME "'", tp_base_name, base_name); - __Pyx_DECREF_TypeName(tp_base_name); - __Pyx_DECREF_TypeName(base_name); - free(base_vtables); - return -1; -} -#endif - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *module = 0; - PyObject *empty_dict = 0; - PyObject *empty_list = 0; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (unlikely(!py_import)) - goto bad; - if (!from_list) { - empty_list = PyList_New(0); - if (unlikely(!empty_list)) - goto bad; - from_list = empty_list; - } - #endif - empty_dict = PyDict_New(); - if (unlikely(!empty_dict)) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, 1); - if (unlikely(!module)) { - if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (unlikely(!py_level)) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, level); - #endif - } - } -bad: - Py_XDECREF(empty_dict); - Py_XDECREF(empty_list); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - const char* module_name_str = 0; - PyObject* module_name = 0; - PyObject* module_dot = 0; - PyObject* full_name = 0; - PyErr_Clear(); - module_name_str = PyModule_GetName(module); - if (unlikely(!module_name_str)) { goto modbad; } - module_name = PyUnicode_FromString(module_name_str); - if (unlikely(!module_name)) { goto modbad; } - module_dot = PyUnicode_Concat(module_name, __pyx_kp_u_); - if (unlikely(!module_dot)) { goto modbad; } - full_name = PyUnicode_Concat(module_dot, name); - if (unlikely(!full_name)) { goto modbad; } - #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) - { - PyObject *modules = PyImport_GetModuleDict(); - if (unlikely(!modules)) - goto modbad; - value = PyObject_GetItem(modules, full_name); - } - #else - value = PyImport_GetModule(full_name); - #endif - modbad: - Py_XDECREF(full_name); - Py_XDECREF(module_dot); - Py_XDECREF(module_name); - } - if (unlikely(!value)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* FetchSharedCythonModule */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void) { - return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); -} - -/* FetchCommonType */ -static int __Pyx_VerifyCachedType(PyObject *cached_type, - const char *name, - Py_ssize_t basicsize, - Py_ssize_t expected_basicsize) { - if (!PyType_Check(cached_type)) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s is not a type object", name); - return -1; - } - if (basicsize != expected_basicsize) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s has the wrong size, try recompiling", - name); - return -1; - } - return 0; -} -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { - PyObject* abi_module; - const char* object_name; - PyTypeObject *cached_type = NULL; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - object_name = strrchr(type->tp_name, '.'); - object_name = object_name ? object_name+1 : type->tp_name; - cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - if (__Pyx_VerifyCachedType( - (PyObject *)cached_type, - object_name, - cached_type->tp_basicsize, - type->tp_basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - if (PyType_Ready(type) < 0) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) - goto bad; - Py_INCREF(type); - cached_type = type; -done: - Py_DECREF(abi_module); - return cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#else -static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { - PyObject *abi_module, *cached_type = NULL; - const char* object_name = strrchr(spec->name, '.'); - object_name = object_name ? object_name+1 : spec->name; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - cached_type = PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - Py_ssize_t basicsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_basicsize; - py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); - if (unlikely(!py_basicsize)) goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; -#else - basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; -#endif - if (__Pyx_VerifyCachedType( - cached_type, - object_name, - basicsize, - spec->basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - CYTHON_UNUSED_VAR(module); - cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); - if (unlikely(!cached_type)) goto bad; - if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; -done: - Py_DECREF(abi_module); - assert(cached_type == NULL || PyType_Check(cached_type)); - return (PyTypeObject *) cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#endif - -/* PyVectorcallFastCallDict */ -#if CYTHON_METH_FASTCALL -static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - PyObject *res = NULL; - PyObject *kwnames; - PyObject **newargs; - PyObject **kwvalues; - Py_ssize_t i, pos; - size_t j; - PyObject *key, *value; - unsigned long keys_are_strings; - Py_ssize_t nkw = PyDict_GET_SIZE(kw); - newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); - if (unlikely(newargs == NULL)) { - PyErr_NoMemory(); - return NULL; - } - for (j = 0; j < nargs; j++) newargs[j] = args[j]; - kwnames = PyTuple_New(nkw); - if (unlikely(kwnames == NULL)) { - PyMem_Free(newargs); - return NULL; - } - kwvalues = newargs + nargs; - pos = i = 0; - keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; - while (PyDict_Next(kw, &pos, &key, &value)) { - keys_are_strings &= Py_TYPE(key)->tp_flags; - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(kwnames, i, key); - kwvalues[i] = value; - i++; - } - if (unlikely(!keys_are_strings)) { - PyErr_SetString(PyExc_TypeError, "keywords must be strings"); - goto cleanup; - } - res = vc(func, newargs, nargs, kwnames); -cleanup: - Py_DECREF(kwnames); - for (i = 0; i < nkw; i++) - Py_DECREF(kwvalues[i]); - PyMem_Free(newargs); - return res; -} -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { - return vc(func, args, nargs, NULL); - } - return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); -} -#endif - -/* CythonFunctionShared */ -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - if (__Pyx_CyFunction_Check(func)) { - return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; - } else if (PyCFunction_Check(func)) { - return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; - } - return 0; -} -#else -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -} -#endif -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - __Pyx_Py_XDECREF_SET( - __Pyx_CyFunction_GetClassObj(f), - ((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#else - __Pyx_Py_XDECREF_SET( - ((PyCMethodObject *) (f))->mm_class, - (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#endif -} -static PyObject * -__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) -{ - CYTHON_UNUSED_VAR(closure); - if (unlikely(op->func_doc == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); - if (unlikely(!op->func_doc)) return NULL; -#else - if (((PyCFunctionObject*)op)->m_ml->ml_doc) { -#if PY_MAJOR_VERSION >= 3 - op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#else - op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#endif - if (unlikely(op->func_doc == NULL)) - return NULL; - } else { - Py_INCREF(Py_None); - return Py_None; - } -#endif - } - Py_INCREF(op->func_doc); - return op->func_doc; -} -static int -__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (value == NULL) { - value = Py_None; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_doc, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_name == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_name = PyObject_GetAttrString(op->func, "__name__"); -#elif PY_MAJOR_VERSION >= 3 - op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#else - op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#endif - if (unlikely(op->func_name == NULL)) - return NULL; - } - Py_INCREF(op->func_name); - return op->func_name; -} -static int -__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__name__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_name, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_qualname); - return op->func_qualname; -} -static int -__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__qualname__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_qualname, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_dict == NULL)) { - op->func_dict = PyDict_New(); - if (unlikely(op->func_dict == NULL)) - return NULL; - } - Py_INCREF(op->func_dict); - return op->func_dict; -} -static int -__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(value == NULL)) { - PyErr_SetString(PyExc_TypeError, - "function's dictionary may not be deleted"); - return -1; - } - if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "setting function's dictionary to a non-dict"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_dict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_globals); - return op->func_globals; -} -static PyObject * -__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(op); - CYTHON_UNUSED_VAR(context); - Py_INCREF(Py_None); - return Py_None; -} -static PyObject * -__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) -{ - PyObject* result = (op->func_code) ? op->func_code : Py_None; - CYTHON_UNUSED_VAR(context); - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { - int result = 0; - PyObject *res = op->defaults_getter((PyObject *) op); - if (unlikely(!res)) - return -1; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - op->defaults_tuple = PyTuple_GET_ITEM(res, 0); - Py_INCREF(op->defaults_tuple); - op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); - Py_INCREF(op->defaults_kwdict); - #else - op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); - if (unlikely(!op->defaults_tuple)) result = -1; - else { - op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); - if (unlikely(!op->defaults_kwdict)) result = -1; - } - #endif - Py_DECREF(res); - return result; -} -static int -__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__defaults__ must be set to a tuple object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_tuple; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_tuple; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__kwdefaults__ must be set to a dict object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_kwdict; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_kwdict; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value || value == Py_None) { - value = NULL; - } else if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__annotations__ must be set to a dict object"); - return -1; - } - Py_XINCREF(value); - __Pyx_Py_XDECREF_SET(op->func_annotations, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->func_annotations; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - result = PyDict_New(); - if (unlikely(!result)) return NULL; - op->func_annotations = result; - } - Py_INCREF(result); - return result; -} -static PyObject * -__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { - int is_coroutine; - CYTHON_UNUSED_VAR(context); - if (op->func_is_coroutine) { - return __Pyx_NewRef(op->func_is_coroutine); - } - is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; -#if PY_VERSION_HEX >= 0x03050000 - if (is_coroutine) { - PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; - fromlist = PyList_New(1); - if (unlikely(!fromlist)) return NULL; - Py_INCREF(marker); -#if CYTHON_ASSUME_SAFE_MACROS - PyList_SET_ITEM(fromlist, 0, marker); -#else - if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { - Py_DECREF(marker); - Py_DECREF(fromlist); - return NULL; - } -#endif - module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); - Py_DECREF(fromlist); - if (unlikely(!module)) goto ignore; - op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); - Py_DECREF(module); - if (likely(op->func_is_coroutine)) { - return __Pyx_NewRef(op->func_is_coroutine); - } -ignore: - PyErr_Clear(); - } -#endif - op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); - return __Pyx_NewRef(op->func_is_coroutine); -} -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject * -__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_GetAttrString(op->func, "__module__"); -} -static int -__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_SetAttrString(op->func, "__module__", value); -} -#endif -static PyGetSetDef __pyx_CyFunction_getsets[] = { - {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, - {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, - {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, - {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, -#if CYTHON_COMPILING_IN_LIMITED_API - {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, -#endif - {0, 0, 0, 0, 0} -}; -static PyMemberDef __pyx_CyFunction_members[] = { -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, -#endif -#if CYTHON_USE_TYPE_SPECS - {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, -#if CYTHON_METH_FASTCALL -#if CYTHON_BACKPORT_VECTORCALL - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, -#else -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, -#endif -#endif -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, -#else - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, -#endif -#endif - {0, 0, 0, 0, 0} -}; -static PyObject * -__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) -{ - CYTHON_UNUSED_VAR(args); -#if PY_MAJOR_VERSION >= 3 - Py_INCREF(m->func_qualname); - return m->func_qualname; -#else - return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); -#endif -} -static PyMethodDef __pyx_CyFunction_methods[] = { - {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, - {0, 0, 0, 0} -}; -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) -#else -#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) -#endif -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { -#if !CYTHON_COMPILING_IN_LIMITED_API - PyCFunctionObject *cf = (PyCFunctionObject*) op; -#endif - if (unlikely(op == NULL)) - return NULL; -#if CYTHON_COMPILING_IN_LIMITED_API - op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); - if (unlikely(!op->func)) return NULL; -#endif - op->flags = flags; - __Pyx_CyFunction_weakreflist(op) = NULL; -#if !CYTHON_COMPILING_IN_LIMITED_API - cf->m_ml = ml; - cf->m_self = (PyObject *) op; -#endif - Py_XINCREF(closure); - op->func_closure = closure; -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_XINCREF(module); - cf->m_module = module; -#endif - op->func_dict = NULL; - op->func_name = NULL; - Py_INCREF(qualname); - op->func_qualname = qualname; - op->func_doc = NULL; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - op->func_classobj = NULL; -#else - ((PyCMethodObject*)op)->mm_class = NULL; -#endif - op->func_globals = globals; - Py_INCREF(op->func_globals); - Py_XINCREF(code); - op->func_code = code; - op->defaults_pyobjects = 0; - op->defaults_size = 0; - op->defaults = NULL; - op->defaults_tuple = NULL; - op->defaults_kwdict = NULL; - op->defaults_getter = NULL; - op->func_annotations = NULL; - op->func_is_coroutine = NULL; -#if CYTHON_METH_FASTCALL - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { - case METH_NOARGS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; - break; - case METH_O: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; - break; - case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; - break; - case METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; - break; - case METH_VARARGS | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = NULL; - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - Py_DECREF(op); - return NULL; - } -#endif - return (PyObject *) op; -} -static int -__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) -{ - Py_CLEAR(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_CLEAR(m->func); -#else - Py_CLEAR(((PyCFunctionObject*)m)->m_module); -#endif - Py_CLEAR(m->func_dict); - Py_CLEAR(m->func_name); - Py_CLEAR(m->func_qualname); - Py_CLEAR(m->func_doc); - Py_CLEAR(m->func_globals); - Py_CLEAR(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API -#if PY_VERSION_HEX < 0x030900B1 - Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); -#else - { - PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; - ((PyCMethodObject *) (m))->mm_class = NULL; - Py_XDECREF(cls); - } -#endif -#endif - Py_CLEAR(m->defaults_tuple); - Py_CLEAR(m->defaults_kwdict); - Py_CLEAR(m->func_annotations); - Py_CLEAR(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_XDECREF(pydefaults[i]); - PyObject_Free(m->defaults); - m->defaults = NULL; - } - return 0; -} -static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - if (__Pyx_CyFunction_weakreflist(m) != NULL) - PyObject_ClearWeakRefs((PyObject *) m); - __Pyx_CyFunction_clear(m); - __Pyx_PyHeapTypeObject_GC_Del(m); -} -static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - PyObject_GC_UnTrack(m); - __Pyx__CyFunction_dealloc(m); -} -static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) -{ - Py_VISIT(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(m->func); -#else - Py_VISIT(((PyCFunctionObject*)m)->m_module); -#endif - Py_VISIT(m->func_dict); - Py_VISIT(m->func_name); - Py_VISIT(m->func_qualname); - Py_VISIT(m->func_doc); - Py_VISIT(m->func_globals); - Py_VISIT(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); -#endif - Py_VISIT(m->defaults_tuple); - Py_VISIT(m->defaults_kwdict); - Py_VISIT(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_VISIT(pydefaults[i]); - } - return 0; -} -static PyObject* -__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) -{ -#if PY_MAJOR_VERSION >= 3 - return PyUnicode_FromFormat("", - op->func_qualname, (void *)op); -#else - return PyString_FromFormat("", - PyString_AsString(op->func_qualname), (void *)op); -#endif -} -static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *f = ((__pyx_CyFunctionObject*)func)->func; - PyObject *py_name = NULL; - PyCFunction meth; - int flags; - meth = PyCFunction_GetFunction(f); - if (unlikely(!meth)) return NULL; - flags = PyCFunction_GetFlags(f); - if (unlikely(flags < 0)) return NULL; -#else - PyCFunctionObject* f = (PyCFunctionObject*)func; - PyCFunction meth = f->m_ml->ml_meth; - int flags = f->m_ml->ml_flags; -#endif - Py_ssize_t size; - switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { - case METH_VARARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) - return (*meth)(self, arg); - break; - case METH_VARARGS | METH_KEYWORDS: - return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); - case METH_NOARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 0)) - return (*meth)(self, NULL); -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - case METH_O: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 1)) { - PyObject *result, *arg0; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - arg0 = PyTuple_GET_ITEM(arg, 0); - #else - arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; - #endif - result = (*meth)(self, arg0); - #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) - Py_DECREF(arg0); - #endif - return result; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - return NULL; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", - py_name); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", - f->m_ml->ml_name); -#endif - return NULL; -} -static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *self, *result; -#if CYTHON_COMPILING_IN_LIMITED_API - self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); - if (unlikely(!self) && PyErr_Occurred()) return NULL; -#else - self = ((PyCFunctionObject*)func)->m_self; -#endif - result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); - return result; -} -static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { - PyObject *result; - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; -#if CYTHON_METH_FASTCALL - __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); - if (vc) { -#if CYTHON_ASSUME_SAFE_MACROS - return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); -#else - (void) &__Pyx_PyVectorcall_FastCallDict; - return PyVectorcall_Call(func, args, kw); -#endif - } -#endif - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - Py_ssize_t argc; - PyObject *new_args; - PyObject *self; -#if CYTHON_ASSUME_SAFE_MACROS - argc = PyTuple_GET_SIZE(args); -#else - argc = PyTuple_Size(args); - if (unlikely(!argc) < 0) return NULL; -#endif - new_args = PyTuple_GetSlice(args, 1, argc); - if (unlikely(!new_args)) - return NULL; - self = PyTuple_GetItem(args, 0); - if (unlikely(!self)) { - Py_DECREF(new_args); -#if PY_MAJOR_VERSION > 2 - PyErr_Format(PyExc_TypeError, - "unbound method %.200S() needs an argument", - cyfunc->func_qualname); -#else - PyErr_SetString(PyExc_TypeError, - "unbound method needs an argument"); -#endif - return NULL; - } - result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); - Py_DECREF(new_args); - } else { - result = __Pyx_CyFunction_Call(func, args, kw); - } - return result; -} -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) -{ - int ret = 0; - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - if (unlikely(nargs < 1)) { - PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", - ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - ret = 1; - } - if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - return ret; -} -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 0)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, NULL); -} -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 1)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, args[0]); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; - PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); -} -#endif -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_CyFunctionType_slots[] = { - {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, - {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, - {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, - {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, - {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, - {Py_tp_methods, (void *)__pyx_CyFunction_methods}, - {Py_tp_members, (void *)__pyx_CyFunction_members}, - {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, - {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, - {0, 0}, -}; -static PyType_Spec __pyx_CyFunctionType_spec = { - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - __pyx_CyFunctionType_slots -}; -#else -static PyTypeObject __pyx_CyFunctionType_type = { - PyVarObject_HEAD_INIT(0, 0) - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, - (destructor) __Pyx_CyFunction_dealloc, -#if !CYTHON_METH_FASTCALL - 0, -#elif CYTHON_BACKPORT_VECTORCALL - (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), -#else - offsetof(PyCFunctionObject, vectorcall), -#endif - 0, - 0, -#if PY_MAJOR_VERSION < 3 - 0, -#else - 0, -#endif - (reprfunc) __Pyx_CyFunction_repr, - 0, - 0, - 0, - 0, - __Pyx_CyFunction_CallAsMethod, - 0, - 0, - 0, - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - 0, - (traverseproc) __Pyx_CyFunction_traverse, - (inquiry) __Pyx_CyFunction_clear, - 0, -#if PY_VERSION_HEX < 0x030500A0 - offsetof(__pyx_CyFunctionObject, func_weakreflist), -#else - offsetof(PyCFunctionObject, m_weakreflist), -#endif - 0, - 0, - __pyx_CyFunction_methods, - __pyx_CyFunction_members, - __pyx_CyFunction_getsets, - 0, - 0, - __Pyx_PyMethod_New, - 0, - offsetof(__pyx_CyFunctionObject, func_dict), - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -#if PY_VERSION_HEX >= 0x030400a1 - 0, -#endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, -#endif -#if __PYX_NEED_TP_PRINT_SLOT - 0, -#endif -#if PY_VERSION_HEX >= 0x030C0000 - 0, -#endif -#if PY_VERSION_HEX >= 0x030d00A4 - 0, -#endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, -#endif -}; -#endif -static int __pyx_CyFunction_init(PyObject *module) { -#if CYTHON_USE_TYPE_SPECS - __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); -#else - CYTHON_UNUSED_VAR(module); - __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); -#endif - if (unlikely(__pyx_CyFunctionType == NULL)) { - return -1; - } - return 0; -} -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults = PyObject_Malloc(size); - if (unlikely(!m->defaults)) - return PyErr_NoMemory(); - memset(m->defaults, 0, size); - m->defaults_pyobjects = pyobjects; - m->defaults_size = size; - return m->defaults; -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_tuple = tuple; - Py_INCREF(tuple); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_kwdict = dict; - Py_INCREF(dict); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->func_annotations = dict; - Py_INCREF(dict); -} - -/* CythonFunction */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { - PyObject *op = __Pyx_CyFunction_Init( - PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), - ml, flags, qualname, closure, module, globals, code - ); - if (likely(op)) { - PyObject_GC_Track(op); - } - return op; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - CYTHON_MAYBE_UNUSED_VAR(tstate); - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} -#endif - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, - PyObject *firstlineno, PyObject *name) { - PyObject *replace = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; - replace = PyObject_GetAttrString(code, "replace"); - if (likely(replace)) { - PyObject *result; - result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); - Py_DECREF(replace); - return result; - } - PyErr_Clear(); - #if __PYX_LIMITED_VERSION_HEX < 0x030780000 - { - PyObject *compiled = NULL, *result = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; - compiled = Py_CompileString( - "out = type(code)(\n" - " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" - " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" - " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" - " code.co_lnotab)\n", "", Py_file_input); - if (!compiled) return NULL; - result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); - Py_DECREF(compiled); - if (!result) PyErr_Print(); - Py_DECREF(result); - result = PyDict_GetItemString(scratch_dict, "out"); - if (result) Py_INCREF(result); - return result; - } - #else - return NULL; - #endif -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; - PyObject *replace = NULL, *getframe = NULL, *frame = NULL; - PyObject *exc_type, *exc_value, *exc_traceback; - int success = 0; - if (c_line) { - (void) __pyx_cfilenm; - (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); - } - PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); - code_object = Py_CompileString("_getframe()", filename, Py_eval_input); - if (unlikely(!code_object)) goto bad; - py_py_line = PyLong_FromLong(py_line); - if (unlikely(!py_py_line)) goto bad; - py_funcname = PyUnicode_FromString(funcname); - if (unlikely(!py_funcname)) goto bad; - dict = PyDict_New(); - if (unlikely(!dict)) goto bad; - { - PyObject *old_code_object = code_object; - code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); - Py_DECREF(old_code_object); - } - if (unlikely(!code_object)) goto bad; - getframe = PySys_GetObject("_getframe"); - if (unlikely(!getframe)) goto bad; - if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; - frame = PyEval_EvalCode(code_object, dict, dict); - if (unlikely(!frame) || frame == Py_None) goto bad; - success = 1; - bad: - PyErr_Restore(exc_type, exc_value, exc_traceback); - Py_XDECREF(code_object); - Py_XDECREF(py_py_line); - Py_XDECREF(py_funcname); - Py_XDECREF(dict); - Py_XDECREF(replace); - if (success) { - PyTraceBack_Here( - (struct _frame*)frame); - } - Py_XDECREF(frame); -} -#else -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = NULL; - PyObject *py_funcname = NULL; - #if PY_MAJOR_VERSION < 3 - PyObject *py_srcfile = NULL; - py_srcfile = PyString_FromString(filename); - if (!py_srcfile) goto bad; - #endif - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - funcname = PyUnicode_AsUTF8(py_funcname); - if (!funcname) goto bad; - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - if (!py_funcname) goto bad; - #endif - } - #if PY_MAJOR_VERSION < 3 - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - #else - py_code = PyCode_NewEmpty(filename, funcname, py_line); - #endif - Py_XDECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_funcname); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_srcfile); - #endif - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject *ptype, *pvalue, *ptraceback; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) { - /* If the code object creation fails, then we should clear the - fetched exception references and propagate the new exception */ - Py_XDECREF(ptype); - Py_XDECREF(pvalue); - Py_XDECREF(ptraceback); - goto bad; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} -#endif - -/* FormatTypeName */ -#if CYTHON_COMPILING_IN_LIMITED_API -static __Pyx_TypeName -__Pyx_PyType_GetName(PyTypeObject* tp) -{ - PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, - __pyx_n_s_name); - if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { - PyErr_Clear(); - Py_XDECREF(name); - name = __Pyx_NewRef(__pyx_n_s__15); - } - return name; -} -#endif - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(long) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(long) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(long) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - long val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (long) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (long) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (long) -1; - } else { - stepval = v; - } - v = NULL; - val = (long) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((long) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((long) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (long) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const int neg_one = (int) -1, const_zero = (int) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(int) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(int) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(int) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - int val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (int) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (int) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (int) -1; - } else { - stepval = v; - } - v = NULL; - val = (int) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((int) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((int) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (int) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (cls == a || cls == b) return 1; - mro = cls->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - PyObject *base = PyTuple_GET_ITEM(mro, i); - if (base == (PyObject *)a || base == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - if (exc_type1) { - return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); - } else { - return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030B00A4 - return Py_Version & ~0xFFUL; -#else - const char* rt_version = Py_GetVersion(); - unsigned long version = 0; - unsigned long factor = 0x01000000UL; - unsigned int digit = 0; - int i = 0; - while (factor) { - while ('0' <= rt_version[i] && rt_version[i] <= '9') { - digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); - ++i; - } - version += factor * digit; - if (rt_version[i] != '.') - break; - digit = 0; - factor >>= 8; - ++i; - } - return version; -#endif -} -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { - const unsigned long MAJOR_MINOR = 0xFFFF0000UL; - if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) - return 0; - if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) - return 1; - { - char message[200]; - PyOS_snprintf(message, sizeof(message), - "compile time Python version %d.%d " - "of module '%.100s' " - "%s " - "runtime version %d.%d", - (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), - __Pyx_MODULE_NAME, - (allow_newer) ? "was newer than" : "does not match", - (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) - ); - return PyErr_WarnEx(NULL, message, 1); - } -} - -/* InitStrings */ -#if PY_MAJOR_VERSION >= 3 -static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { - if (t.is_unicode | t.is_str) { - if (t.intern) { - *str = PyUnicode_InternFromString(t.s); - } else if (t.encoding) { - *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); - } else { - *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); - } - } else { - *str = PyBytes_FromStringAndSize(t.s, t.n - 1); - } - if (!*str) - return -1; - if (PyObject_Hash(*str) == -1) - return -1; - return 0; -} -#endif -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { - while (t->p) { - #if PY_MAJOR_VERSION >= 3 - __Pyx_InitString(*t, t->p); - #else - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - #endif - ++t; - } - return 0; -} - -#include -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { - size_t len = strlen(s); - if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { - PyErr_SetString(PyExc_OverflowError, "byte string is too long"); - return -1; - } - return (Py_ssize_t) len; -} -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return __Pyx_PyUnicode_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return PyByteArray_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { - __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " - "The ability to return an instance of a strict subclass of int is deprecated, " - "and may be removed in a future version of Python.", - result_type_name)) { - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; - } - __Pyx_DECREF_TypeName(result_type_name); - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", - type_name, type_name, result_type_name); - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - if (likely(__Pyx_PyLong_IsCompact(b))) { - return __Pyx_PyLong_CompactValue(b); - } else { - const digit* digits = __Pyx_PyLong_Digits(b); - const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { - if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { - return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); -#if PY_MAJOR_VERSION < 3 - } else if (likely(PyInt_CheckExact(o))) { - return PyInt_AS_LONG(o); -#endif - } else { - Py_ssize_t ival; - PyObject *x; - x = PyNumber_Index(o); - if (!x) return -1; - ival = PyInt_AsLong(x); - Py_DECREF(x); - return ival; - } -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -/* #### Code section: utility_code_pragmas_end ### */ -#ifdef _MSC_VER -#pragma warning( pop ) -#endif - - - -/* #### Code section: end ### */ -#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/engine/_util_cy.c b/lib/sqlalchemy/engine/_util_cy.c deleted file mode 100644 index bab84947303..00000000000 --- a/lib/sqlalchemy/engine/_util_cy.c +++ /dev/null @@ -1,8853 +0,0 @@ -/* Generated by Cython 3.0.11 */ - -/* BEGIN: Cython Metadata -{ - "distutils": { - "name": "sqlalchemy.engine._util_cy", - "sources": [ - "lib/sqlalchemy/engine/_util_cy.py" - ] - }, - "module_name": "sqlalchemy.engine._util_cy" -} -END: Cython Metadata */ - -#ifndef PY_SSIZE_T_CLEAN -#define PY_SSIZE_T_CLEAN -#endif /* PY_SSIZE_T_CLEAN */ -#if defined(CYTHON_LIMITED_API) && 0 - #ifndef Py_LIMITED_API - #if CYTHON_LIMITED_API+0 > 0x03030000 - #define Py_LIMITED_API CYTHON_LIMITED_API - #else - #define Py_LIMITED_API 0x03030000 - #endif - #endif -#endif - -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.7+ or Python 3.3+. -#else -#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API -#define __PYX_EXTRA_ABI_MODULE_NAME "limited" -#else -#define __PYX_EXTRA_ABI_MODULE_NAME "" -#endif -#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME -#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI -#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x03000BF0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #define HAVE_LONG_LONG -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX -#if defined(GRAALVM_PYTHON) - /* For very preliminary testing purposes. Most variables are set the same as PyPy. - The existence of this section does not imply that anything works or is even tested */ - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 1 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(PYPY_VERSION) - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #if PY_VERSION_HEX < 0x03090000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(CYTHON_LIMITED_API) - #ifdef Py_LIMITED_API - #undef __PYX_LIMITED_VERSION_HEX - #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API - #endif - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 1 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_CLINE_IN_TRACEBACK - #define CYTHON_CLINE_IN_TRACEBACK 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 1 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #endif - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 1 - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #ifndef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 1 - #endif - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 - #endif -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #ifndef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) - #endif - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #if PY_VERSION_HEX < 0x030400a1 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #elif !defined(CYTHON_USE_TP_FINALIZE) - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #if PY_VERSION_HEX < 0x030600B1 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #elif !defined(CYTHON_USE_DICT_VERSIONS) - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) - #endif - #if PY_VERSION_HEX < 0x030700A3 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #elif !defined(CYTHON_USE_EXC_INFO_STACK) - #define CYTHON_USE_EXC_INFO_STACK 1 - #endif - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 1 - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if !defined(CYTHON_VECTORCALL) -#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) -#endif -#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_MAJOR_VERSION < 3 - #include "longintrepr.h" - #endif - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(maybe_unused) - #define CYTHON_UNUSED [[maybe_unused]] - #endif - #endif - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR - #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_USE_CPP_STD_MOVE - #if defined(__cplusplus) && (\ - __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) - #define CYTHON_USE_CPP_STD_MOVE 1 - #else - #define CYTHON_USE_CPP_STD_MOVE 0 - #endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned short uint16_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - #endif - #endif - #if _MSC_VER < 1300 - #ifdef _WIN64 - typedef unsigned long long __pyx_uintptr_t; - #else - typedef unsigned int __pyx_uintptr_t; - #endif - #else - #ifdef _WIN64 - typedef unsigned __int64 __pyx_uintptr_t; - #else - typedef unsigned __int32 __pyx_uintptr_t; - #endif - #endif -#else - #include - typedef uintptr_t __pyx_uintptr_t; -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif -#ifdef __cplusplus - template - struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; - #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) -#else - #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) -#endif -#if CYTHON_COMPILING_IN_PYPY == 1 - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) -#else - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) -#endif -#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_DefaultClassType PyClass_Type - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" - #define __Pyx_DefaultClassType PyType_Type -#if CYTHON_COMPILING_IN_LIMITED_API - static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyObject *exception_table = NULL; - PyObject *types_module=NULL, *code_type=NULL, *result=NULL; - #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; - PyObject *py_minor_version = NULL; - #endif - long minor_version = 0; - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); - #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; - #else - if (!(version_info = PySys_GetObject("version_info"))) goto end; - if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; - minor_version = PyLong_AsLong(py_minor_version); - Py_DECREF(py_minor_version); - if (minor_version == -1 && PyErr_Occurred()) goto end; - #endif - if (!(types_module = PyImport_ImportModule("types"))) goto end; - if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; - if (minor_version <= 7) { - (void)p; - result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else if (minor_version <= 10) { - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else { - if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); - } - end: - Py_XDECREF(code_type); - Py_XDECREF(exception_table); - Py_XDECREF(types_module); - if (type) { - PyErr_Restore(type, value, traceback); - } - return result; - } - #ifndef CO_OPTIMIZED - #define CO_OPTIMIZED 0x0001 - #endif - #ifndef CO_NEWLOCALS - #define CO_NEWLOCALS 0x0002 - #endif - #ifndef CO_VARARGS - #define CO_VARARGS 0x0004 - #endif - #ifndef CO_VARKEYWORDS - #define CO_VARKEYWORDS 0x0008 - #endif - #ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x0200 - #endif - #ifndef CO_GENERATOR - #define CO_GENERATOR 0x0020 - #endif - #ifndef CO_COROUTINE - #define CO_COROUTINE 0x0080 - #endif -#elif PY_VERSION_HEX >= 0x030B0000 - static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); - if (!empty_bytes) return NULL; - result = - #if PY_VERSION_HEX >= 0x030C0000 - PyUnstable_Code_NewWithPosOnlyArgs - #else - PyCode_NewWithPosOnlyArgs - #endif - (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); - Py_DECREF(empty_bytes); - return result; - } -#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif -#endif -#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) - #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) -#else - #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) - #define __Pyx_Py_Is(x, y) Py_Is(x, y) -#else - #define __Pyx_Py_Is(x, y) ((x) == (y)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) - #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) -#else - #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) - #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) -#else - #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) - #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) -#else - #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) -#endif -#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) -#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) -#else - #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) -#endif -#ifndef CO_COROUTINE - #define CO_COROUTINE 0x80 -#endif -#ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x200 -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef Py_TPFLAGS_SEQUENCE - #define Py_TPFLAGS_SEQUENCE 0 -#endif -#ifndef Py_TPFLAGS_MAPPING - #define Py_TPFLAGS_MAPPING 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #if PY_VERSION_HEX >= 0x030d00A4 - # define __Pyx_PyCFunctionFast PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords - #else - # define __Pyx_PyCFunctionFast _PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords - #endif -#endif -#if CYTHON_METH_FASTCALL - #define __Pyx_METH_FASTCALL METH_FASTCALL - #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast - #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords -#else - #define __Pyx_METH_FASTCALL METH_VARARGS - #define __Pyx_PyCFunction_FastCall PyCFunction - #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords -#endif -#if CYTHON_VECTORCALL - #define __pyx_vectorcallfunc vectorcallfunc - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET - #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) -#elif CYTHON_BACKPORT_VECTORCALL - typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, - size_t nargsf, PyObject *kwnames); - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) -#else - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) -#endif -#if PY_MAJOR_VERSION >= 0x030900B1 -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) -#else -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) -#endif -#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) -#elif !CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) -#endif -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) -static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { - return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; -} -#endif -static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { -#if CYTHON_COMPILING_IN_LIMITED_API - return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; -#else - return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -#endif -} -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) -#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) - typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); -#else - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) - #define __Pyx_PyCMethod PyCMethod -#endif -#ifndef METH_METHOD - #define METH_METHOD 0x200 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyThreadState_Current PyThreadState_Get() -#elif !CYTHON_FAST_THREAD_STATE - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) -{ - void *result; - result = PyModule_GetState(op); - if (!result) - Py_FatalError("Couldn't find the module state"); - return result; -} -#endif -#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) -#else - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if PY_MAJOR_VERSION < 3 - #if CYTHON_COMPILING_IN_PYPY - #if PYPY_VERSION_NUM < 0x07030600 - #if defined(__cplusplus) && __cplusplus >= 201402L - [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] - #elif defined(__GNUC__) || defined(__clang__) - __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) - #elif defined(_MSC_VER) - __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) - #endif - static CYTHON_INLINE int PyGILState_Check(void) { - return 0; - } - #else // PYPY_VERSION_NUM < 0x07030600 - #endif // PYPY_VERSION_NUM < 0x07030600 - #else - static CYTHON_INLINE int PyGILState_Check(void) { - PyThreadState * tstate = _PyThreadState_Current; - return tstate && (tstate == PyGILState_GetThisThreadState()); - } - #endif -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { - PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); - if (res == NULL) PyErr_Clear(); - return res; -} -#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) -#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#else -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { -#if CYTHON_COMPILING_IN_PYPY - return PyDict_GetItem(dict, name); -#else - PyDictEntry *ep; - PyDictObject *mp = (PyDictObject*) dict; - long hash = ((PyStringObject *) name)->ob_shash; - assert(hash != -1); - ep = (mp->ma_lookup)(mp, name, hash); - if (ep == NULL) { - return NULL; - } - return ep->me_value; -#endif -} -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#endif -#if CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) - #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) - #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) -#else - #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) - #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) - #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) -#else - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) -#endif -#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 -#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE((PyObject*)obj);\ - assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ - PyObject_GC_Del(obj);\ - Py_DECREF(type);\ -} -#else -#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) - #define __Pyx_PyUnicode_DATA(u) ((void*)u) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) -#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_READY(op) (0) - #else - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #endif - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #else - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #endif - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #if !defined(PyUnicode_DecodeUnicodeEscape) - #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) - #endif - #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) - #undef PyUnicode_Contains - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) - #endif - #if !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) - #endif - #if !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) - #endif -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#if CYTHON_COMPILING_IN_CPYTHON - #define __Pyx_PySequence_ListKeepNew(obj)\ - (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) -#else - #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) -#else - #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) - #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) -#endif -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) -#else - static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { - PyObject *module = PyImport_AddModule(name); - Py_XINCREF(module); - return module; - } -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define __Pyx_Py3Int_Check(op) PyLong_Check(op) - #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#else - #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) - #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) - #if !defined(_USE_MATH_DEFINES) - #define _USE_MATH_DEFINES - #endif -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifdef CYTHON_EXTERN_C - #undef __PYX_EXTERN_C - #define __PYX_EXTERN_C CYTHON_EXTERN_C -#elif defined(__PYX_EXTERN_C) - #ifdef _MSC_VER - #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") - #else - #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. - #endif -#else - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__sqlalchemy__engine___util_cy -#define __PYX_HAVE_API__sqlalchemy__engine___util_cy -/* Early includes */ -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_VERSION_HEX >= 0x030C00A7 - #ifndef _PyLong_SIGN_MASK - #define _PyLong_SIGN_MASK 3 - #endif - #ifndef _PyLong_NON_SIZE_BITS - #define _PyLong_NON_SIZE_BITS 3 - #endif - #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) - #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) - #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) - #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) - #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_SignedDigitCount(x)\ - ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) - #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) - #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) - #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) - #else - #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) - #endif - typedef Py_ssize_t __Pyx_compact_pylong; - typedef size_t __Pyx_compact_upylong; - #else - #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) - #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) - #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) - #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) - #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) - #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) - #define __Pyx_PyLong_CompactValue(x)\ - ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) - typedef sdigit __Pyx_compact_pylong; - typedef digit __Pyx_compact_upylong; - #endif - #if PY_VERSION_HEX >= 0x030C00A5 - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) - #else - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) - #endif -#endif -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -#include -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = (char) c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#include -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -#if !CYTHON_USE_MODULE_STATE -static PyObject *__pyx_m = NULL; -#endif -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm = __FILE__; -static const char *__pyx_filename; - -/* #### Code section: filename_table ### */ - -static const char *__pyx_f[] = { - "lib/sqlalchemy/engine/_util_cy.py", -}; -/* #### Code section: utility_code_proto_before_types ### */ -/* ForceInitThreads.proto */ -#ifndef __PYX_FORCE_INIT_THREADS - #define __PYX_FORCE_INIT_THREADS 0 -#endif - -/* #### Code section: numeric_typedefs ### */ -/* #### Code section: complex_type_declarations ### */ -/* #### Code section: type_declarations ### */ - -/*--- Type declarations ---*/ -/* #### Code section: utility_code_proto ### */ - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, Py_ssize_t); - void (*DECREF)(void*, PyObject*, Py_ssize_t); - void (*GOTREF)(void*, PyObject*, Py_ssize_t); - void (*GIVEREF)(void*, PyObject*, Py_ssize_t); - void* (*SetupContext)(const char*, Py_ssize_t, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - } - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) - #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() -#endif - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContextNogil() - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_Py_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; Py_XDECREF(tmp);\ - } while (0) -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#if PY_VERSION_HEX >= 0x030C00A6 -#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) -#else -#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) -#endif -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) -#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* TupleAndListFromArray.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); -static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); -#endif - -/* IncludeStringH.proto */ -#include - -/* BytesEquals.proto */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); - -/* UnicodeEquals.proto */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); - -/* fastcall.proto */ -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) -#elif CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) -#else - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) -#endif -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) - #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) -#else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg - #define __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) -#define __Pyx_KwValues_VARARGS(args, nargs) NULL -#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) -#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) -#if CYTHON_METH_FASTCALL - #define __Pyx_Arg_FASTCALL(args, i) args[i] - #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) - #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) - static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); - #else - #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) - #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs - to have the same reference counting */ - #define __Pyx_Arg_XDECREF_FASTCALL(arg) -#else - #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS - #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS - #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS - #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS - #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS - #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) - #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) -#else -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) -#endif - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) do {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, - const char* function_name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#if !CYTHON_VECTORCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif -#if !CYTHON_VECTORCALL -#if PY_VERSION_HEX >= 0x03080000 - #include "frameobject.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif - #define __Pxy_PyFrame_Initialize_Offsets() - #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) -#else - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif -#endif -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectFastCall.proto */ -#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* KeywordStringCheck.proto */ -static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); - -/* RaiseUnexpectedTypeError.proto */ -static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* ImportDottedModule.proto */ -static PyObject *__Pyx_ImportDottedModule(PyObject *name, PyObject *parts_tuple); -#if PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx_ImportDottedModule_WalkParts(PyObject *module, PyObject *name, PyObject *parts_tuple); -#endif - -/* IncludeStructmemberH.proto */ -#include - -/* FixUpExtensionType.proto */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); -#endif - -/* FetchSharedCythonModule.proto */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void); - -/* FetchCommonType.proto */ -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); -#else -static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); -#endif - -/* PyMethodNew.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - typesModule = PyImport_ImportModule("types"); - if (!typesModule) return NULL; - methodType = PyObject_GetAttrString(typesModule, "MethodType"); - Py_DECREF(typesModule); - if (!methodType) return NULL; - result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); - Py_DECREF(methodType); - return result; -} -#elif PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - return PyMethod_New(func, self); -} -#else - #define __Pyx_PyMethod_New PyMethod_New -#endif - -/* PyVectorcallFastCallDict.proto */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); -#endif - -/* CythonFunctionShared.proto */ -#define __Pyx_CyFunction_USED -#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 -#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 -#define __Pyx_CYFUNCTION_CCLASS 0x04 -#define __Pyx_CYFUNCTION_COROUTINE 0x08 -#define __Pyx_CyFunction_GetClosure(f)\ - (((__pyx_CyFunctionObject *) (f))->func_closure) -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_CyFunction_GetClassObj(f)\ - (((__pyx_CyFunctionObject *) (f))->func_classobj) -#else - #define __Pyx_CyFunction_GetClassObj(f)\ - ((PyObject*) ((PyCMethodObject *) (f))->mm_class) -#endif -#define __Pyx_CyFunction_SetClassObj(f, classobj)\ - __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) -#define __Pyx_CyFunction_Defaults(type, f)\ - ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) -#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ - ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) -typedef struct { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject_HEAD - PyObject *func; -#elif PY_VERSION_HEX < 0x030900B1 - PyCFunctionObject func; -#else - PyCMethodObject func; -#endif -#if CYTHON_BACKPORT_VECTORCALL - __pyx_vectorcallfunc func_vectorcall; -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_weakreflist; -#endif - PyObject *func_dict; - PyObject *func_name; - PyObject *func_qualname; - PyObject *func_doc; - PyObject *func_globals; - PyObject *func_code; - PyObject *func_closure; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_classobj; -#endif - void *defaults; - int defaults_pyobjects; - size_t defaults_size; - int flags; - PyObject *defaults_tuple; - PyObject *defaults_kwdict; - PyObject *(*defaults_getter)(PyObject *); - PyObject *func_annotations; - PyObject *func_is_coroutine; -} __pyx_CyFunctionObject; -#undef __Pyx_CyOrPyCFunction_Check -#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) -#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) -#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); -#undef __Pyx_IsSameCFunction -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, - size_t size, - int pyobjects); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, - PyObject *tuple); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, - PyObject *dict); -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, - PyObject *dict); -static int __pyx_CyFunction_init(PyObject *module); -#if CYTHON_METH_FASTCALL -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -#if CYTHON_BACKPORT_VECTORCALL -#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) -#else -#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) -#endif -#endif - -/* CythonFunction.proto */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); -#endif - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* GCCDiagnostics.proto */ -#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) -#define __Pyx_HAS_GCC_DIAGNOSTIC -#endif - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* FormatTypeName.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -typedef PyObject *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%U" -static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); -#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) -#else -typedef const char *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%.200s" -#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) -#define __Pyx_DECREF_TypeName(obj) -#endif - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static unsigned long __Pyx_get_runtime_version(void); -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -/* #### Code section: module_declarations ### */ - -/* Module declarations from "cython" */ - -/* Module declarations from "sqlalchemy.engine._util_cy" */ -static PyObject *__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple = 0; -static CYTHON_INLINE int __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(PyObject *); /*proto*/ -static int __pyx_f_10sqlalchemy_6engine_8_util_cy__is_contiguous(PyObject *); /*proto*/ -/* #### Code section: typeinfo ### */ -/* #### Code section: before_global_var ### */ -#define __Pyx_MODULE_NAME "sqlalchemy.engine._util_cy" -extern int __pyx_module_is_main_sqlalchemy__engine___util_cy; -int __pyx_module_is_main_sqlalchemy__engine___util_cy = 0; - -/* Implementation of "sqlalchemy.engine._util_cy" */ -/* #### Code section: global_var ### */ -static PyObject *__pyx_builtin_range; -/* #### Code section: string_decls ### */ -static const char __pyx_k__2[] = "."; -static const char __pyx_k__3[] = "*"; -static const char __pyx_k__4[] = ""; -static const char __pyx_k_2_1[] = "2.1"; -static const char __pyx_k_Any[] = "Any"; -static const char __pyx_k__11[] = "?"; -static const char __pyx_k_exc[] = "exc"; -static const char __pyx_k_int[] = "int"; -static const char __pyx_k_bool[] = "bool"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_spec[] = "__spec__"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_util[] = "util"; -static const char __pyx_k_Tuple[] = "Tuple"; -static const char __pyx_k_range[] = "range"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_params[] = "params"; -static const char __pyx_k_result[] = "result"; -static const char __pyx_k_return[] = "return"; -static const char __pyx_k_typing[] = "typing"; -static const char __pyx_k_Mapping[] = "Mapping"; -static const char __pyx_k_indexes[] = "indexes"; -static const char __pyx_k_Optional[] = "Optional"; -static const char __pyx_k_operator[] = "operator"; -static const char __pyx_k_max_index[] = "max_index"; -static const char __pyx_k_interfaces[] = "interfaces"; -static const char __pyx_k_itemgetter[] = "itemgetter"; -static const char __pyx_k_is_compiled[] = "_is_compiled"; -static const char __pyx_k_tuplegetter[] = "tuplegetter"; -static const char __pyx_k_initializing[] = "_initializing"; -static const char __pyx_k_is_coroutine[] = "_is_coroutine"; -static const char __pyx_k_ArgumentError[] = "ArgumentError"; -static const char __pyx_k_TYPE_CHECKING[] = "TYPE_CHECKING"; -static const char __pyx_k_TupleGetterType[] = "_TupleGetterType"; -static const char __pyx_k_collections_abc[] = "collections.abc"; -static const char __pyx_k_warn_deprecated[] = "warn_deprecated"; -static const char __pyx_k_distill_params_20[] = "_distill_params_20"; -static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_distill_raw_params[] = "_distill_raw_params"; -static const char __pyx_k_CoreAnyExecuteParams[] = "_CoreAnyExecuteParams"; -static const char __pyx_k_DBAPIAnyExecuteParams[] = "_DBAPIAnyExecuteParams"; -static const char __pyx_k_CoreMultiExecuteParams[] = "_CoreMultiExecuteParams"; -static const char __pyx_k_DBAPIMultiExecuteParams[] = "_DBAPIMultiExecuteParams"; -static const char __pyx_k_sqlalchemy_engine__util_cy[] = "sqlalchemy.engine._util_cy"; -static const char __pyx_k_Optional__CoreAnyExecuteParams[] = "Optional[_CoreAnyExecuteParams]"; -static const char __pyx_k_Empty_parameter_sequence_passed[] = "Empty parameter sequence passed to execute(). This use is deprecated and will raise an exception in a future SQLAlchemy release"; -static const char __pyx_k_List_argument_must_consist_only[] = "List argument must consist only of tuples or dictionaries"; -static const char __pyx_k_Optional__DBAPIAnyExecuteParams[] = "Optional[_DBAPIAnyExecuteParams]"; -static const char __pyx_k_lib_sqlalchemy_engine__util_cy_p[] = "lib/sqlalchemy/engine/_util_cy.py"; -static const char __pyx_k_mapping_or_list_expected_for_par[] = "mapping or list expected for parameters"; -static const char __pyx_k_mapping_or_sequence_expected_for[] = "mapping or sequence expected for parameters"; -/* #### Code section: decls ### */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_2_distill_params_20(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_params); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_4_distill_raw_params(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_params); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_6tuplegetter(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_indexes); /* proto */ -/* #### Code section: late_includes ### */ -/* #### Code section: module_state ### */ -typedef struct { - PyObject *__pyx_d; - PyObject *__pyx_b; - PyObject *__pyx_cython_runtime; - PyObject *__pyx_empty_tuple; - PyObject *__pyx_empty_bytes; - PyObject *__pyx_empty_unicode; - #ifdef __Pyx_CyFunction_USED - PyTypeObject *__pyx_CyFunctionType; - #endif - #ifdef __Pyx_FusedFunction_USED - PyTypeObject *__pyx_FusedFunctionType; - #endif - #ifdef __Pyx_Generator_USED - PyTypeObject *__pyx_GeneratorType; - #endif - #ifdef __Pyx_IterableCoroutine_USED - PyTypeObject *__pyx_IterableCoroutineType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineAwaitType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineType; - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - PyObject *__pyx_kp_u_2_1; - PyObject *__pyx_n_s_Any; - PyObject *__pyx_n_s_ArgumentError; - PyObject *__pyx_n_s_CoreAnyExecuteParams; - PyObject *__pyx_n_s_CoreMultiExecuteParams; - PyObject *__pyx_n_s_DBAPIAnyExecuteParams; - PyObject *__pyx_n_s_DBAPIMultiExecuteParams; - PyObject *__pyx_kp_u_Empty_parameter_sequence_passed; - PyObject *__pyx_kp_u_List_argument_must_consist_only; - PyObject *__pyx_n_s_Mapping; - PyObject *__pyx_n_s_Optional; - PyObject *__pyx_kp_s_Optional__CoreAnyExecuteParams; - PyObject *__pyx_kp_s_Optional__DBAPIAnyExecuteParams; - PyObject *__pyx_n_s_TYPE_CHECKING; - PyObject *__pyx_n_s_Tuple; - PyObject *__pyx_n_s_TupleGetterType; - PyObject *__pyx_n_s__11; - PyObject *__pyx_kp_u__2; - PyObject *__pyx_n_s__3; - PyObject *__pyx_n_s__4; - PyObject *__pyx_n_s_asyncio_coroutines; - PyObject *__pyx_n_s_bool; - PyObject *__pyx_n_s_cline_in_traceback; - PyObject *__pyx_n_s_collections_abc; - PyObject *__pyx_n_s_distill_params_20; - PyObject *__pyx_n_s_distill_raw_params; - PyObject *__pyx_n_s_exc; - PyObject *__pyx_n_s_import; - PyObject *__pyx_n_s_indexes; - PyObject *__pyx_n_s_initializing; - PyObject *__pyx_n_s_int; - PyObject *__pyx_n_s_interfaces; - PyObject *__pyx_n_s_is_compiled; - PyObject *__pyx_n_s_is_coroutine; - PyObject *__pyx_n_s_itemgetter; - PyObject *__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p; - PyObject *__pyx_n_s_main; - PyObject *__pyx_kp_u_mapping_or_list_expected_for_par; - PyObject *__pyx_kp_u_mapping_or_sequence_expected_for; - PyObject *__pyx_n_s_max_index; - PyObject *__pyx_n_s_name; - PyObject *__pyx_n_s_operator; - PyObject *__pyx_n_s_params; - PyObject *__pyx_n_s_range; - PyObject *__pyx_n_s_result; - PyObject *__pyx_n_s_return; - PyObject *__pyx_n_s_spec; - PyObject *__pyx_n_s_sqlalchemy_engine__util_cy; - PyObject *__pyx_n_s_test; - PyObject *__pyx_n_s_tuplegetter; - PyObject *__pyx_n_s_typing; - PyObject *__pyx_n_s_util; - PyObject *__pyx_n_s_warn_deprecated; - PyObject *__pyx_int_1; - PyObject *__pyx_tuple_; - PyObject *__pyx_tuple__6; - PyObject *__pyx_tuple__9; - PyObject *__pyx_codeobj__5; - PyObject *__pyx_codeobj__7; - PyObject *__pyx_codeobj__8; - PyObject *__pyx_codeobj__10; -} __pyx_mstate; - -#if CYTHON_USE_MODULE_STATE -#ifdef __cplusplus -namespace { - extern struct PyModuleDef __pyx_moduledef; -} /* anonymous namespace */ -#else -static struct PyModuleDef __pyx_moduledef; -#endif - -#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) - -#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) - -#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) -#else -static __pyx_mstate __pyx_mstate_global_static = -#ifdef __cplusplus - {}; -#else - {0}; -#endif -static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; -#endif -/* #### Code section: module_state_clear ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_clear(PyObject *m) { - __pyx_mstate *clear_module_state = __pyx_mstate(m); - if (!clear_module_state) return 0; - Py_CLEAR(clear_module_state->__pyx_d); - Py_CLEAR(clear_module_state->__pyx_b); - Py_CLEAR(clear_module_state->__pyx_cython_runtime); - Py_CLEAR(clear_module_state->__pyx_empty_tuple); - Py_CLEAR(clear_module_state->__pyx_empty_bytes); - Py_CLEAR(clear_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_CLEAR(clear_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); - #endif - Py_CLEAR(clear_module_state->__pyx_kp_u_2_1); - Py_CLEAR(clear_module_state->__pyx_n_s_Any); - Py_CLEAR(clear_module_state->__pyx_n_s_ArgumentError); - Py_CLEAR(clear_module_state->__pyx_n_s_CoreAnyExecuteParams); - Py_CLEAR(clear_module_state->__pyx_n_s_CoreMultiExecuteParams); - Py_CLEAR(clear_module_state->__pyx_n_s_DBAPIAnyExecuteParams); - Py_CLEAR(clear_module_state->__pyx_n_s_DBAPIMultiExecuteParams); - Py_CLEAR(clear_module_state->__pyx_kp_u_Empty_parameter_sequence_passed); - Py_CLEAR(clear_module_state->__pyx_kp_u_List_argument_must_consist_only); - Py_CLEAR(clear_module_state->__pyx_n_s_Mapping); - Py_CLEAR(clear_module_state->__pyx_n_s_Optional); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional__CoreAnyExecuteParams); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional__DBAPIAnyExecuteParams); - Py_CLEAR(clear_module_state->__pyx_n_s_TYPE_CHECKING); - Py_CLEAR(clear_module_state->__pyx_n_s_Tuple); - Py_CLEAR(clear_module_state->__pyx_n_s_TupleGetterType); - Py_CLEAR(clear_module_state->__pyx_n_s__11); - Py_CLEAR(clear_module_state->__pyx_kp_u__2); - Py_CLEAR(clear_module_state->__pyx_n_s__3); - Py_CLEAR(clear_module_state->__pyx_n_s__4); - Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); - Py_CLEAR(clear_module_state->__pyx_n_s_bool); - Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); - Py_CLEAR(clear_module_state->__pyx_n_s_collections_abc); - Py_CLEAR(clear_module_state->__pyx_n_s_distill_params_20); - Py_CLEAR(clear_module_state->__pyx_n_s_distill_raw_params); - Py_CLEAR(clear_module_state->__pyx_n_s_exc); - Py_CLEAR(clear_module_state->__pyx_n_s_import); - Py_CLEAR(clear_module_state->__pyx_n_s_indexes); - Py_CLEAR(clear_module_state->__pyx_n_s_initializing); - Py_CLEAR(clear_module_state->__pyx_n_s_int); - Py_CLEAR(clear_module_state->__pyx_n_s_interfaces); - Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); - Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); - Py_CLEAR(clear_module_state->__pyx_n_s_itemgetter); - Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p); - Py_CLEAR(clear_module_state->__pyx_n_s_main); - Py_CLEAR(clear_module_state->__pyx_kp_u_mapping_or_list_expected_for_par); - Py_CLEAR(clear_module_state->__pyx_kp_u_mapping_or_sequence_expected_for); - Py_CLEAR(clear_module_state->__pyx_n_s_max_index); - Py_CLEAR(clear_module_state->__pyx_n_s_name); - Py_CLEAR(clear_module_state->__pyx_n_s_operator); - Py_CLEAR(clear_module_state->__pyx_n_s_params); - Py_CLEAR(clear_module_state->__pyx_n_s_range); - Py_CLEAR(clear_module_state->__pyx_n_s_result); - Py_CLEAR(clear_module_state->__pyx_n_s_return); - Py_CLEAR(clear_module_state->__pyx_n_s_spec); - Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_engine__util_cy); - Py_CLEAR(clear_module_state->__pyx_n_s_test); - Py_CLEAR(clear_module_state->__pyx_n_s_tuplegetter); - Py_CLEAR(clear_module_state->__pyx_n_s_typing); - Py_CLEAR(clear_module_state->__pyx_n_s_util); - Py_CLEAR(clear_module_state->__pyx_n_s_warn_deprecated); - Py_CLEAR(clear_module_state->__pyx_int_1); - Py_CLEAR(clear_module_state->__pyx_tuple_); - Py_CLEAR(clear_module_state->__pyx_tuple__6); - Py_CLEAR(clear_module_state->__pyx_tuple__9); - Py_CLEAR(clear_module_state->__pyx_codeobj__5); - Py_CLEAR(clear_module_state->__pyx_codeobj__7); - Py_CLEAR(clear_module_state->__pyx_codeobj__8); - Py_CLEAR(clear_module_state->__pyx_codeobj__10); - return 0; -} -#endif -/* #### Code section: module_state_traverse ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { - __pyx_mstate *traverse_module_state = __pyx_mstate(m); - if (!traverse_module_state) return 0; - Py_VISIT(traverse_module_state->__pyx_d); - Py_VISIT(traverse_module_state->__pyx_b); - Py_VISIT(traverse_module_state->__pyx_cython_runtime); - Py_VISIT(traverse_module_state->__pyx_empty_tuple); - Py_VISIT(traverse_module_state->__pyx_empty_bytes); - Py_VISIT(traverse_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_VISIT(traverse_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); - #endif - Py_VISIT(traverse_module_state->__pyx_kp_u_2_1); - Py_VISIT(traverse_module_state->__pyx_n_s_Any); - Py_VISIT(traverse_module_state->__pyx_n_s_ArgumentError); - Py_VISIT(traverse_module_state->__pyx_n_s_CoreAnyExecuteParams); - Py_VISIT(traverse_module_state->__pyx_n_s_CoreMultiExecuteParams); - Py_VISIT(traverse_module_state->__pyx_n_s_DBAPIAnyExecuteParams); - Py_VISIT(traverse_module_state->__pyx_n_s_DBAPIMultiExecuteParams); - Py_VISIT(traverse_module_state->__pyx_kp_u_Empty_parameter_sequence_passed); - Py_VISIT(traverse_module_state->__pyx_kp_u_List_argument_must_consist_only); - Py_VISIT(traverse_module_state->__pyx_n_s_Mapping); - Py_VISIT(traverse_module_state->__pyx_n_s_Optional); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional__CoreAnyExecuteParams); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional__DBAPIAnyExecuteParams); - Py_VISIT(traverse_module_state->__pyx_n_s_TYPE_CHECKING); - Py_VISIT(traverse_module_state->__pyx_n_s_Tuple); - Py_VISIT(traverse_module_state->__pyx_n_s_TupleGetterType); - Py_VISIT(traverse_module_state->__pyx_n_s__11); - Py_VISIT(traverse_module_state->__pyx_kp_u__2); - Py_VISIT(traverse_module_state->__pyx_n_s__3); - Py_VISIT(traverse_module_state->__pyx_n_s__4); - Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); - Py_VISIT(traverse_module_state->__pyx_n_s_bool); - Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); - Py_VISIT(traverse_module_state->__pyx_n_s_collections_abc); - Py_VISIT(traverse_module_state->__pyx_n_s_distill_params_20); - Py_VISIT(traverse_module_state->__pyx_n_s_distill_raw_params); - Py_VISIT(traverse_module_state->__pyx_n_s_exc); - Py_VISIT(traverse_module_state->__pyx_n_s_import); - Py_VISIT(traverse_module_state->__pyx_n_s_indexes); - Py_VISIT(traverse_module_state->__pyx_n_s_initializing); - Py_VISIT(traverse_module_state->__pyx_n_s_int); - Py_VISIT(traverse_module_state->__pyx_n_s_interfaces); - Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); - Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); - Py_VISIT(traverse_module_state->__pyx_n_s_itemgetter); - Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p); - Py_VISIT(traverse_module_state->__pyx_n_s_main); - Py_VISIT(traverse_module_state->__pyx_kp_u_mapping_or_list_expected_for_par); - Py_VISIT(traverse_module_state->__pyx_kp_u_mapping_or_sequence_expected_for); - Py_VISIT(traverse_module_state->__pyx_n_s_max_index); - Py_VISIT(traverse_module_state->__pyx_n_s_name); - Py_VISIT(traverse_module_state->__pyx_n_s_operator); - Py_VISIT(traverse_module_state->__pyx_n_s_params); - Py_VISIT(traverse_module_state->__pyx_n_s_range); - Py_VISIT(traverse_module_state->__pyx_n_s_result); - Py_VISIT(traverse_module_state->__pyx_n_s_return); - Py_VISIT(traverse_module_state->__pyx_n_s_spec); - Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_engine__util_cy); - Py_VISIT(traverse_module_state->__pyx_n_s_test); - Py_VISIT(traverse_module_state->__pyx_n_s_tuplegetter); - Py_VISIT(traverse_module_state->__pyx_n_s_typing); - Py_VISIT(traverse_module_state->__pyx_n_s_util); - Py_VISIT(traverse_module_state->__pyx_n_s_warn_deprecated); - Py_VISIT(traverse_module_state->__pyx_int_1); - Py_VISIT(traverse_module_state->__pyx_tuple_); - Py_VISIT(traverse_module_state->__pyx_tuple__6); - Py_VISIT(traverse_module_state->__pyx_tuple__9); - Py_VISIT(traverse_module_state->__pyx_codeobj__5); - Py_VISIT(traverse_module_state->__pyx_codeobj__7); - Py_VISIT(traverse_module_state->__pyx_codeobj__8); - Py_VISIT(traverse_module_state->__pyx_codeobj__10); - return 0; -} -#endif -/* #### Code section: module_state_defines ### */ -#define __pyx_d __pyx_mstate_global->__pyx_d -#define __pyx_b __pyx_mstate_global->__pyx_b -#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime -#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple -#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes -#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode -#ifdef __Pyx_CyFunction_USED -#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType -#endif -#ifdef __Pyx_FusedFunction_USED -#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType -#endif -#ifdef __Pyx_Generator_USED -#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType -#endif -#ifdef __Pyx_IterableCoroutine_USED -#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#define __pyx_kp_u_2_1 __pyx_mstate_global->__pyx_kp_u_2_1 -#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any -#define __pyx_n_s_ArgumentError __pyx_mstate_global->__pyx_n_s_ArgumentError -#define __pyx_n_s_CoreAnyExecuteParams __pyx_mstate_global->__pyx_n_s_CoreAnyExecuteParams -#define __pyx_n_s_CoreMultiExecuteParams __pyx_mstate_global->__pyx_n_s_CoreMultiExecuteParams -#define __pyx_n_s_DBAPIAnyExecuteParams __pyx_mstate_global->__pyx_n_s_DBAPIAnyExecuteParams -#define __pyx_n_s_DBAPIMultiExecuteParams __pyx_mstate_global->__pyx_n_s_DBAPIMultiExecuteParams -#define __pyx_kp_u_Empty_parameter_sequence_passed __pyx_mstate_global->__pyx_kp_u_Empty_parameter_sequence_passed -#define __pyx_kp_u_List_argument_must_consist_only __pyx_mstate_global->__pyx_kp_u_List_argument_must_consist_only -#define __pyx_n_s_Mapping __pyx_mstate_global->__pyx_n_s_Mapping -#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional -#define __pyx_kp_s_Optional__CoreAnyExecuteParams __pyx_mstate_global->__pyx_kp_s_Optional__CoreAnyExecuteParams -#define __pyx_kp_s_Optional__DBAPIAnyExecuteParams __pyx_mstate_global->__pyx_kp_s_Optional__DBAPIAnyExecuteParams -#define __pyx_n_s_TYPE_CHECKING __pyx_mstate_global->__pyx_n_s_TYPE_CHECKING -#define __pyx_n_s_Tuple __pyx_mstate_global->__pyx_n_s_Tuple -#define __pyx_n_s_TupleGetterType __pyx_mstate_global->__pyx_n_s_TupleGetterType -#define __pyx_n_s__11 __pyx_mstate_global->__pyx_n_s__11 -#define __pyx_kp_u__2 __pyx_mstate_global->__pyx_kp_u__2 -#define __pyx_n_s__3 __pyx_mstate_global->__pyx_n_s__3 -#define __pyx_n_s__4 __pyx_mstate_global->__pyx_n_s__4 -#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines -#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool -#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback -#define __pyx_n_s_collections_abc __pyx_mstate_global->__pyx_n_s_collections_abc -#define __pyx_n_s_distill_params_20 __pyx_mstate_global->__pyx_n_s_distill_params_20 -#define __pyx_n_s_distill_raw_params __pyx_mstate_global->__pyx_n_s_distill_raw_params -#define __pyx_n_s_exc __pyx_mstate_global->__pyx_n_s_exc -#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import -#define __pyx_n_s_indexes __pyx_mstate_global->__pyx_n_s_indexes -#define __pyx_n_s_initializing __pyx_mstate_global->__pyx_n_s_initializing -#define __pyx_n_s_int __pyx_mstate_global->__pyx_n_s_int -#define __pyx_n_s_interfaces __pyx_mstate_global->__pyx_n_s_interfaces -#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled -#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine -#define __pyx_n_s_itemgetter __pyx_mstate_global->__pyx_n_s_itemgetter -#define __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p -#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main -#define __pyx_kp_u_mapping_or_list_expected_for_par __pyx_mstate_global->__pyx_kp_u_mapping_or_list_expected_for_par -#define __pyx_kp_u_mapping_or_sequence_expected_for __pyx_mstate_global->__pyx_kp_u_mapping_or_sequence_expected_for -#define __pyx_n_s_max_index __pyx_mstate_global->__pyx_n_s_max_index -#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name -#define __pyx_n_s_operator __pyx_mstate_global->__pyx_n_s_operator -#define __pyx_n_s_params __pyx_mstate_global->__pyx_n_s_params -#define __pyx_n_s_range __pyx_mstate_global->__pyx_n_s_range -#define __pyx_n_s_result __pyx_mstate_global->__pyx_n_s_result -#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return -#define __pyx_n_s_spec __pyx_mstate_global->__pyx_n_s_spec -#define __pyx_n_s_sqlalchemy_engine__util_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_engine__util_cy -#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test -#define __pyx_n_s_tuplegetter __pyx_mstate_global->__pyx_n_s_tuplegetter -#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing -#define __pyx_n_s_util __pyx_mstate_global->__pyx_n_s_util -#define __pyx_n_s_warn_deprecated __pyx_mstate_global->__pyx_n_s_warn_deprecated -#define __pyx_int_1 __pyx_mstate_global->__pyx_int_1 -#define __pyx_tuple_ __pyx_mstate_global->__pyx_tuple_ -#define __pyx_tuple__6 __pyx_mstate_global->__pyx_tuple__6 -#define __pyx_tuple__9 __pyx_mstate_global->__pyx_tuple__9 -#define __pyx_codeobj__5 __pyx_mstate_global->__pyx_codeobj__5 -#define __pyx_codeobj__7 __pyx_mstate_global->__pyx_codeobj__7 -#define __pyx_codeobj__8 __pyx_mstate_global->__pyx_codeobj__8 -#define __pyx_codeobj__10 __pyx_mstate_global->__pyx_codeobj__10 -/* #### Code section: module_code ### */ - -/* "sqlalchemy/engine/_util_cy.py":38 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -PyDoc_STRVAR(__pyx_doc_10sqlalchemy_6engine_8_util_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_8_util_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_6engine_8_util_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_6engine_8_util_cy__is_compiled}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_6engine_8_util_cy__is_compiled(__pyx_self); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled", 1); - - /* "sqlalchemy/engine/_util_cy.py":40 - * def _is_compiled() -> bool: - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_True); - __pyx_r = Py_True; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":38 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_util_cy.py":48 - * - * - * @cython.inline # <<<<<<<<<<<<<< - * @cython.cfunc - * def _is_mapping_or_tuple(value: object, /) -> cython.bint: - */ - -static CYTHON_INLINE int __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_is_mapping_or_tuple", 1); - - /* "sqlalchemy/engine/_util_cy.py":52 - * def _is_mapping_or_tuple(value: object, /) -> cython.bint: - * return ( - * isinstance(value, dict) # <<<<<<<<<<<<<< - * or isinstance(value, tuple) - * or isinstance(value, Mapping) - */ - __pyx_t_2 = PyDict_Check(__pyx_v_value); - if (!__pyx_t_2) { - } else { - __pyx_t_1 = __pyx_t_2; - goto __pyx_L3_bool_binop_done; - } - - /* "sqlalchemy/engine/_util_cy.py":53 - * return ( - * isinstance(value, dict) - * or isinstance(value, tuple) # <<<<<<<<<<<<<< - * or isinstance(value, Mapping) - * # only do immutabledict or abc.__instancecheck__ for Mapping after - */ - __pyx_t_2 = PyTuple_Check(__pyx_v_value); - if (!__pyx_t_2) { - } else { - __pyx_t_1 = __pyx_t_2; - goto __pyx_L3_bool_binop_done; - } - - /* "sqlalchemy/engine/_util_cy.py":54 - * isinstance(value, dict) - * or isinstance(value, tuple) - * or isinstance(value, Mapping) # <<<<<<<<<<<<<< - * # only do immutabledict or abc.__instancecheck__ for Mapping after - * # we've checked for plain dictionaries and would otherwise raise - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 54, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyObject_IsInstance(__pyx_v_value, __pyx_t_3); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 54, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_1 = __pyx_t_2; - __pyx_L3_bool_binop_done:; - __pyx_r = __pyx_t_1; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":48 - * - * - * @cython.inline # <<<<<<<<<<<<<< - * @cython.cfunc - * def _is_mapping_or_tuple(value: object, /) -> cython.bint: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.engine._util_cy._is_mapping_or_tuple", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_util_cy.py":61 - * - * # _is_mapping_or_tuple could be inlined if pure python perf is a problem - * def _distill_params_20( # <<<<<<<<<<<<<< - * params: Optional[_CoreAnyExecuteParams], - * ) -> _CoreMultiExecuteParams: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_3_distill_params_20(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_8_util_cy_3_distill_params_20 = {"_distill_params_20", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_8_util_cy_3_distill_params_20, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_3_distill_params_20(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_params = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_distill_params_20 (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_params,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_params)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 61, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_distill_params_20") < 0)) __PYX_ERR(0, 61, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_params = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("_distill_params_20", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 61, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._util_cy._distill_params_20", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_8_util_cy_2_distill_params_20(__pyx_self, __pyx_v_params); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_2_distill_params_20(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_params) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - Py_ssize_t __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - unsigned int __pyx_t_7; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_distill_params_20", 1); - - /* "sqlalchemy/engine/_util_cy.py":64 - * params: Optional[_CoreAnyExecuteParams], - * ) -> _CoreMultiExecuteParams: - * if params is None: # <<<<<<<<<<<<<< - * return _Empty_Tuple - * # Assume list is more likely than tuple - */ - __pyx_t_1 = (__pyx_v_params == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_util_cy.py":65 - * ) -> _CoreMultiExecuteParams: - * if params is None: - * return _Empty_Tuple # <<<<<<<<<<<<<< - * # Assume list is more likely than tuple - * elif isinstance(params, list) or isinstance(params, tuple): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple); - __pyx_r = __pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":64 - * params: Optional[_CoreAnyExecuteParams], - * ) -> _CoreMultiExecuteParams: - * if params is None: # <<<<<<<<<<<<<< - * return _Empty_Tuple - * # Assume list is more likely than tuple - */ - } - - /* "sqlalchemy/engine/_util_cy.py":67 - * return _Empty_Tuple - * # Assume list is more likely than tuple - * elif isinstance(params, list) or isinstance(params, tuple): # <<<<<<<<<<<<<< - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) == 0: - */ - __pyx_t_2 = PyList_Check(__pyx_v_params); - if (!__pyx_t_2) { - } else { - __pyx_t_1 = __pyx_t_2; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_2 = PyTuple_Check(__pyx_v_params); - __pyx_t_1 = __pyx_t_2; - __pyx_L4_bool_binop_done:; - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_util_cy.py":69 - * elif isinstance(params, list) or isinstance(params, tuple): - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) == 0: # <<<<<<<<<<<<<< - * warn_deprecated( - * "Empty parameter sequence passed to execute(). " - */ - __pyx_t_3 = PyObject_Length(__pyx_v_params); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 69, __pyx_L1_error) - __pyx_t_1 = (__pyx_t_3 == 0); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_util_cy.py":70 - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) == 0: - * warn_deprecated( # <<<<<<<<<<<<<< - * "Empty parameter sequence passed to execute(). " - * "This use is deprecated and will raise an exception in a " - */ - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_warn_deprecated); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - - /* "sqlalchemy/engine/_util_cy.py":69 - * elif isinstance(params, list) or isinstance(params, tuple): - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) == 0: # <<<<<<<<<<<<<< - * warn_deprecated( - * "Empty parameter sequence passed to execute(). " - */ - goto __pyx_L6; - } - - /* "sqlalchemy/engine/_util_cy.py":76 - * "2.1", - * ) - * elif not _is_mapping_or_tuple(params[0]): # <<<<<<<<<<<<<< - * raise exc.ArgumentError( - * "List argument must consist only of tuples or dictionaries" - */ - __pyx_t_5 = __Pyx_GetItemInt(__pyx_v_params, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 76, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(__pyx_t_5); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 76, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_2 = (!__pyx_t_1); - if (unlikely(__pyx_t_2)) { - - /* "sqlalchemy/engine/_util_cy.py":77 - * ) - * elif not _is_mapping_or_tuple(params[0]): - * raise exc.ArgumentError( # <<<<<<<<<<<<<< - * "List argument must consist only of tuples or dictionaries" - * ) - */ - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_exc); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 77, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_ArgumentError); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 77, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = NULL; - __pyx_t_7 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_7 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_kp_u_List_argument_must_consist_only}; - __pyx_t_5 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_7, 1+__pyx_t_7); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 77, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_Raise(__pyx_t_5, 0, 0, 0); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __PYX_ERR(0, 77, __pyx_L1_error) - - /* "sqlalchemy/engine/_util_cy.py":76 - * "2.1", - * ) - * elif not _is_mapping_or_tuple(params[0]): # <<<<<<<<<<<<<< - * raise exc.ArgumentError( - * "List argument must consist only of tuples or dictionaries" - */ - } - __pyx_L6:; - - /* "sqlalchemy/engine/_util_cy.py":80 - * "List argument must consist only of tuples or dictionaries" - * ) - * return params # <<<<<<<<<<<<<< - * elif isinstance(params, dict) or isinstance(params, Mapping): - * # only do immutabledict or abc.__instancecheck__ for Mapping after - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_params); - __pyx_r = __pyx_v_params; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":67 - * return _Empty_Tuple - * # Assume list is more likely than tuple - * elif isinstance(params, list) or isinstance(params, tuple): # <<<<<<<<<<<<<< - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) == 0: - */ - } - - /* "sqlalchemy/engine/_util_cy.py":81 - * ) - * return params - * elif isinstance(params, dict) or isinstance(params, Mapping): # <<<<<<<<<<<<<< - * # only do immutabledict or abc.__instancecheck__ for Mapping after - * # we've checked for plain dictionaries and would otherwise raise - */ - __pyx_t_1 = PyDict_Check(__pyx_v_params); - if (!__pyx_t_1) { - } else { - __pyx_t_2 = __pyx_t_1; - goto __pyx_L7_bool_binop_done; - } - __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_1 = PyObject_IsInstance(__pyx_v_params, __pyx_t_5); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 81, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_2 = __pyx_t_1; - __pyx_L7_bool_binop_done:; - if (likely(__pyx_t_2)) { - - /* "sqlalchemy/engine/_util_cy.py":84 - * # only do immutabledict or abc.__instancecheck__ for Mapping after - * # we've checked for plain dictionaries and would otherwise raise - * return [params] # <<<<<<<<<<<<<< - * else: - * raise exc.ArgumentError("mapping or list expected for parameters") - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_5 = PyList_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 84, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_INCREF(__pyx_v_params); - __Pyx_GIVEREF(__pyx_v_params); - if (__Pyx_PyList_SET_ITEM(__pyx_t_5, 0, __pyx_v_params)) __PYX_ERR(0, 84, __pyx_L1_error); - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":81 - * ) - * return params - * elif isinstance(params, dict) or isinstance(params, Mapping): # <<<<<<<<<<<<<< - * # only do immutabledict or abc.__instancecheck__ for Mapping after - * # we've checked for plain dictionaries and would otherwise raise - */ - } - - /* "sqlalchemy/engine/_util_cy.py":86 - * return [params] - * else: - * raise exc.ArgumentError("mapping or list expected for parameters") # <<<<<<<<<<<<<< - * - * - */ - /*else*/ { - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_exc); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_ArgumentError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_6 = NULL; - __pyx_t_7 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_6)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_6); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_7 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_6, __pyx_kp_u_mapping_or_list_expected_for_par}; - __pyx_t_5 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_7, 1+__pyx_t_7); - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __Pyx_Raise(__pyx_t_5, 0, 0, 0); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __PYX_ERR(0, 86, __pyx_L1_error) - } - - /* "sqlalchemy/engine/_util_cy.py":61 - * - * # _is_mapping_or_tuple could be inlined if pure python perf is a problem - * def _distill_params_20( # <<<<<<<<<<<<<< - * params: Optional[_CoreAnyExecuteParams], - * ) -> _CoreMultiExecuteParams: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("sqlalchemy.engine._util_cy._distill_params_20", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_util_cy.py":89 - * - * - * def _distill_raw_params( # <<<<<<<<<<<<<< - * params: Optional[_DBAPIAnyExecuteParams], - * ) -> _DBAPIMultiExecuteParams: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params = {"_distill_raw_params", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_params = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_distill_raw_params (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_params,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_params)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 89, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_distill_raw_params") < 0)) __PYX_ERR(0, 89, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_params = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("_distill_raw_params", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 89, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.engine._util_cy._distill_raw_params", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_8_util_cy_4_distill_raw_params(__pyx_self, __pyx_v_params); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_4_distill_raw_params(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_params) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - Py_ssize_t __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - unsigned int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_distill_raw_params", 1); - - /* "sqlalchemy/engine/_util_cy.py":92 - * params: Optional[_DBAPIAnyExecuteParams], - * ) -> _DBAPIMultiExecuteParams: - * if params is None: # <<<<<<<<<<<<<< - * return _Empty_Tuple - * elif isinstance(params, list): - */ - __pyx_t_1 = (__pyx_v_params == Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_util_cy.py":93 - * ) -> _DBAPIMultiExecuteParams: - * if params is None: - * return _Empty_Tuple # <<<<<<<<<<<<<< - * elif isinstance(params, list): - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple); - __pyx_r = __pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":92 - * params: Optional[_DBAPIAnyExecuteParams], - * ) -> _DBAPIMultiExecuteParams: - * if params is None: # <<<<<<<<<<<<<< - * return _Empty_Tuple - * elif isinstance(params, list): - */ - } - - /* "sqlalchemy/engine/_util_cy.py":94 - * if params is None: - * return _Empty_Tuple - * elif isinstance(params, list): # <<<<<<<<<<<<<< - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): - */ - __pyx_t_1 = PyList_Check(__pyx_v_params); - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_util_cy.py":96 - * elif isinstance(params, list): - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): # <<<<<<<<<<<<<< - * raise exc.ArgumentError( - * "List argument must consist only of tuples or dictionaries" - */ - __pyx_t_2 = PyObject_Length(__pyx_v_params); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 96, __pyx_L1_error) - __pyx_t_3 = (__pyx_t_2 > 0); - if (__pyx_t_3) { - } else { - __pyx_t_1 = __pyx_t_3; - goto __pyx_L5_bool_binop_done; - } - __pyx_t_4 = __Pyx_GetItemInt(__pyx_v_params, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_3 = __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(__pyx_t_4); if (unlikely(__pyx_t_3 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_5 = (!__pyx_t_3); - __pyx_t_1 = __pyx_t_5; - __pyx_L5_bool_binop_done:; - if (unlikely(__pyx_t_1)) { - - /* "sqlalchemy/engine/_util_cy.py":97 - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): - * raise exc.ArgumentError( # <<<<<<<<<<<<<< - * "List argument must consist only of tuples or dictionaries" - * ) - */ - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_exc); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 97, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_ArgumentError); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 97, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_6 = NULL; - __pyx_t_8 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_7))) { - __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_7); - if (likely(__pyx_t_6)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); - __Pyx_INCREF(__pyx_t_6); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_7, function); - __pyx_t_8 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_6, __pyx_kp_u_List_argument_must_consist_only}; - __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_7, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 97, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - } - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(0, 97, __pyx_L1_error) - - /* "sqlalchemy/engine/_util_cy.py":96 - * elif isinstance(params, list): - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): # <<<<<<<<<<<<<< - * raise exc.ArgumentError( - * "List argument must consist only of tuples or dictionaries" - */ - } - - /* "sqlalchemy/engine/_util_cy.py":100 - * "List argument must consist only of tuples or dictionaries" - * ) - * return params # <<<<<<<<<<<<<< - * elif _is_mapping_or_tuple(params): - * return [params] # type: ignore[return-value] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_params); - __pyx_r = __pyx_v_params; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":94 - * if params is None: - * return _Empty_Tuple - * elif isinstance(params, list): # <<<<<<<<<<<<<< - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) > 0 and not _is_mapping_or_tuple(params[0]): - */ - } - - /* "sqlalchemy/engine/_util_cy.py":101 - * ) - * return params - * elif _is_mapping_or_tuple(params): # <<<<<<<<<<<<<< - * return [params] # type: ignore[return-value] - * else: - */ - __pyx_t_1 = __pyx_f_10sqlalchemy_6engine_8_util_cy__is_mapping_or_tuple(__pyx_v_params); if (unlikely(__pyx_t_1 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 101, __pyx_L1_error) - if (likely(__pyx_t_1)) { - - /* "sqlalchemy/engine/_util_cy.py":102 - * return params - * elif _is_mapping_or_tuple(params): - * return [params] # type: ignore[return-value] # <<<<<<<<<<<<<< - * else: - * raise exc.ArgumentError("mapping or sequence expected for parameters") - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = PyList_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 102, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_INCREF(__pyx_v_params); - __Pyx_GIVEREF(__pyx_v_params); - if (__Pyx_PyList_SET_ITEM(__pyx_t_4, 0, __pyx_v_params)) __PYX_ERR(0, 102, __pyx_L1_error); - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":101 - * ) - * return params - * elif _is_mapping_or_tuple(params): # <<<<<<<<<<<<<< - * return [params] # type: ignore[return-value] - * else: - */ - } - - /* "sqlalchemy/engine/_util_cy.py":104 - * return [params] # type: ignore[return-value] - * else: - * raise exc.ArgumentError("mapping or sequence expected for parameters") # <<<<<<<<<<<<<< - * - * - */ - /*else*/ { - __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_exc); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_ArgumentError); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __pyx_t_7 = NULL; - __pyx_t_8 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_8 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_kp_u_mapping_or_sequence_expected_for}; - __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 104, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_Raise(__pyx_t_4, 0, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __PYX_ERR(0, 104, __pyx_L1_error) - } - - /* "sqlalchemy/engine/_util_cy.py":89 - * - * - * def _distill_raw_params( # <<<<<<<<<<<<<< - * params: Optional[_DBAPIAnyExecuteParams], - * ) -> _DBAPIMultiExecuteParams: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("sqlalchemy.engine._util_cy._distill_raw_params", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_util_cy.py":107 - * - * - * @cython.cfunc # <<<<<<<<<<<<<< - * def _is_contiguous(indexes: Tuple[int, ...]) -> cython.bint: - * i: cython.Py_ssize_t - */ - -static int __pyx_f_10sqlalchemy_6engine_8_util_cy__is_contiguous(PyObject *__pyx_v_indexes) { - Py_ssize_t __pyx_v_i; - Py_ssize_t __pyx_v_prev; - Py_ssize_t __pyx_v_curr; - int __pyx_r; - __Pyx_RefNannyDeclarations - Py_ssize_t __pyx_t_1; - Py_ssize_t __pyx_t_2; - Py_ssize_t __pyx_t_3; - Py_ssize_t __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_is_contiguous", 1); - - /* "sqlalchemy/engine/_util_cy.py":112 - * prev: cython.Py_ssize_t - * curr: cython.Py_ssize_t - * for i in range(1, len(indexes)): # <<<<<<<<<<<<<< - * prev = indexes[i - 1] - * curr = indexes[i] - */ - __pyx_t_1 = __Pyx_PyTuple_GET_SIZE(__pyx_v_indexes); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 112, __pyx_L1_error) - __pyx_t_2 = __pyx_t_1; - for (__pyx_t_3 = 1; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { - __pyx_v_i = __pyx_t_3; - - /* "sqlalchemy/engine/_util_cy.py":113 - * curr: cython.Py_ssize_t - * for i in range(1, len(indexes)): - * prev = indexes[i - 1] # <<<<<<<<<<<<<< - * curr = indexes[i] - * if prev != curr - 1: - */ - __pyx_t_4 = (__pyx_v_i - 1); - __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_indexes, __pyx_t_4, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = __Pyx_PyIndex_AsSsize_t(__pyx_t_5); if (unlikely((__pyx_t_4 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_v_prev = __pyx_t_4; - - /* "sqlalchemy/engine/_util_cy.py":114 - * for i in range(1, len(indexes)): - * prev = indexes[i - 1] - * curr = indexes[i] # <<<<<<<<<<<<<< - * if prev != curr - 1: - * return False - */ - __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_indexes, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 114, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = __Pyx_PyIndex_AsSsize_t(__pyx_t_5); if (unlikely((__pyx_t_4 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 114, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_v_curr = __pyx_t_4; - - /* "sqlalchemy/engine/_util_cy.py":115 - * prev = indexes[i - 1] - * curr = indexes[i] - * if prev != curr - 1: # <<<<<<<<<<<<<< - * return False - * return True - */ - __pyx_t_6 = (__pyx_v_prev != (__pyx_v_curr - 1)); - if (__pyx_t_6) { - - /* "sqlalchemy/engine/_util_cy.py":116 - * curr = indexes[i] - * if prev != curr - 1: - * return False # <<<<<<<<<<<<<< - * return True - * - */ - __pyx_r = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":115 - * prev = indexes[i - 1] - * curr = indexes[i] - * if prev != curr - 1: # <<<<<<<<<<<<<< - * return False - * return True - */ - } - } - - /* "sqlalchemy/engine/_util_cy.py":117 - * if prev != curr - 1: - * return False - * return True # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = 1; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":107 - * - * - * @cython.cfunc # <<<<<<<<<<<<<< - * def _is_contiguous(indexes: Tuple[int, ...]) -> cython.bint: - * i: cython.Py_ssize_t - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.engine._util_cy._is_contiguous", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/engine/_util_cy.py":120 - * - * - * def tuplegetter(*indexes: int) -> _TupleGetterType: # <<<<<<<<<<<<<< - * max_index: int - * if len(indexes) == 1 or _is_contiguous(indexes): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_7tuplegetter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_6engine_8_util_cy_7tuplegetter = {"tuplegetter", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_6engine_8_util_cy_7tuplegetter, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_6engine_8_util_cy_7tuplegetter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_indexes = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("tuplegetter (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "tuplegetter", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_indexes = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_6engine_8_util_cy_6tuplegetter(__pyx_self, __pyx_v_indexes); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_indexes); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_6engine_8_util_cy_6tuplegetter(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_indexes) { - PyObject *__pyx_v_max_index = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - Py_ssize_t __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - unsigned int __pyx_t_9; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("tuplegetter", 1); - - /* "sqlalchemy/engine/_util_cy.py":122 - * def tuplegetter(*indexes: int) -> _TupleGetterType: - * max_index: int - * if len(indexes) == 1 or _is_contiguous(indexes): # <<<<<<<<<<<<<< - * # slice form is faster but returns a list if input is list - * max_index = indexes[-1] - */ - __pyx_t_2 = __Pyx_PyTuple_GET_SIZE(__pyx_v_indexes); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 122, __pyx_L1_error) - __pyx_t_3 = (__pyx_t_2 == 1); - if (!__pyx_t_3) { - } else { - __pyx_t_1 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_3 = __pyx_f_10sqlalchemy_6engine_8_util_cy__is_contiguous(__pyx_v_indexes); if (unlikely(__pyx_t_3 == ((int)-1) && PyErr_Occurred())) __PYX_ERR(0, 122, __pyx_L1_error) - __pyx_t_1 = __pyx_t_3; - __pyx_L4_bool_binop_done:; - if (__pyx_t_1) { - - /* "sqlalchemy/engine/_util_cy.py":124 - * if len(indexes) == 1 or _is_contiguous(indexes): - * # slice form is faster but returns a list if input is list - * max_index = indexes[-1] # <<<<<<<<<<<<<< - * return operator.itemgetter(slice(indexes[0], max_index + 1)) - * else: - */ - __pyx_t_4 = __Pyx_GetItemInt_Tuple(__pyx_v_indexes, -1L, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 124, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_4))) __PYX_ERR(0, 124, __pyx_L1_error) - __pyx_v_max_index = ((PyObject*)__pyx_t_4); - __pyx_t_4 = 0; - - /* "sqlalchemy/engine/_util_cy.py":125 - * # slice form is faster but returns a list if input is list - * max_index = indexes[-1] - * return operator.itemgetter(slice(indexes[0], max_index + 1)) # <<<<<<<<<<<<<< - * else: - * return operator.itemgetter(*indexes) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_operator); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 125, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_itemgetter); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 125, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_indexes, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 125, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_7 = PyNumber_Add(__pyx_v_max_index, __pyx_int_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 125, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = PySlice_New(__pyx_t_5, __pyx_t_7, Py_None); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 125, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __pyx_t_7 = NULL; - __pyx_t_9 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_9 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_8}; - __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_9, 1+__pyx_t_9); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 125, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "sqlalchemy/engine/_util_cy.py":122 - * def tuplegetter(*indexes: int) -> _TupleGetterType: - * max_index: int - * if len(indexes) == 1 or _is_contiguous(indexes): # <<<<<<<<<<<<<< - * # slice form is faster but returns a list if input is list - * max_index = indexes[-1] - */ - } - - /* "sqlalchemy/engine/_util_cy.py":127 - * return operator.itemgetter(slice(indexes[0], max_index + 1)) - * else: - * return operator.itemgetter(*indexes) # <<<<<<<<<<<<<< - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_operator); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 127, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_itemgetter); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 127, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_v_indexes, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 127, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - } - - /* "sqlalchemy/engine/_util_cy.py":120 - * - * - * def tuplegetter(*indexes: int) -> _TupleGetterType: # <<<<<<<<<<<<<< - * max_index: int - * if len(indexes) == 1 or _is_contiguous(indexes): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("sqlalchemy.engine._util_cy.tuplegetter", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_max_index); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif -/* #### Code section: pystring_table ### */ - -static int __Pyx_CreateStringTabAndInitStrings(void) { - __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_2_1, __pyx_k_2_1, sizeof(__pyx_k_2_1), 0, 1, 0, 0}, - {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, - {&__pyx_n_s_ArgumentError, __pyx_k_ArgumentError, sizeof(__pyx_k_ArgumentError), 0, 0, 1, 1}, - {&__pyx_n_s_CoreAnyExecuteParams, __pyx_k_CoreAnyExecuteParams, sizeof(__pyx_k_CoreAnyExecuteParams), 0, 0, 1, 1}, - {&__pyx_n_s_CoreMultiExecuteParams, __pyx_k_CoreMultiExecuteParams, sizeof(__pyx_k_CoreMultiExecuteParams), 0, 0, 1, 1}, - {&__pyx_n_s_DBAPIAnyExecuteParams, __pyx_k_DBAPIAnyExecuteParams, sizeof(__pyx_k_DBAPIAnyExecuteParams), 0, 0, 1, 1}, - {&__pyx_n_s_DBAPIMultiExecuteParams, __pyx_k_DBAPIMultiExecuteParams, sizeof(__pyx_k_DBAPIMultiExecuteParams), 0, 0, 1, 1}, - {&__pyx_kp_u_Empty_parameter_sequence_passed, __pyx_k_Empty_parameter_sequence_passed, sizeof(__pyx_k_Empty_parameter_sequence_passed), 0, 1, 0, 0}, - {&__pyx_kp_u_List_argument_must_consist_only, __pyx_k_List_argument_must_consist_only, sizeof(__pyx_k_List_argument_must_consist_only), 0, 1, 0, 0}, - {&__pyx_n_s_Mapping, __pyx_k_Mapping, sizeof(__pyx_k_Mapping), 0, 0, 1, 1}, - {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, - {&__pyx_kp_s_Optional__CoreAnyExecuteParams, __pyx_k_Optional__CoreAnyExecuteParams, sizeof(__pyx_k_Optional__CoreAnyExecuteParams), 0, 0, 1, 0}, - {&__pyx_kp_s_Optional__DBAPIAnyExecuteParams, __pyx_k_Optional__DBAPIAnyExecuteParams, sizeof(__pyx_k_Optional__DBAPIAnyExecuteParams), 0, 0, 1, 0}, - {&__pyx_n_s_TYPE_CHECKING, __pyx_k_TYPE_CHECKING, sizeof(__pyx_k_TYPE_CHECKING), 0, 0, 1, 1}, - {&__pyx_n_s_Tuple, __pyx_k_Tuple, sizeof(__pyx_k_Tuple), 0, 0, 1, 1}, - {&__pyx_n_s_TupleGetterType, __pyx_k_TupleGetterType, sizeof(__pyx_k_TupleGetterType), 0, 0, 1, 1}, - {&__pyx_n_s__11, __pyx_k__11, sizeof(__pyx_k__11), 0, 0, 1, 1}, - {&__pyx_kp_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 0}, - {&__pyx_n_s__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 0, 1, 1}, - {&__pyx_n_s__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 1, 1}, - {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, - {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_collections_abc, __pyx_k_collections_abc, sizeof(__pyx_k_collections_abc), 0, 0, 1, 1}, - {&__pyx_n_s_distill_params_20, __pyx_k_distill_params_20, sizeof(__pyx_k_distill_params_20), 0, 0, 1, 1}, - {&__pyx_n_s_distill_raw_params, __pyx_k_distill_raw_params, sizeof(__pyx_k_distill_raw_params), 0, 0, 1, 1}, - {&__pyx_n_s_exc, __pyx_k_exc, sizeof(__pyx_k_exc), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_indexes, __pyx_k_indexes, sizeof(__pyx_k_indexes), 0, 0, 1, 1}, - {&__pyx_n_s_initializing, __pyx_k_initializing, sizeof(__pyx_k_initializing), 0, 0, 1, 1}, - {&__pyx_n_s_int, __pyx_k_int, sizeof(__pyx_k_int), 0, 0, 1, 1}, - {&__pyx_n_s_interfaces, __pyx_k_interfaces, sizeof(__pyx_k_interfaces), 0, 0, 1, 1}, - {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, - {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, - {&__pyx_n_s_itemgetter, __pyx_k_itemgetter, sizeof(__pyx_k_itemgetter), 0, 0, 1, 1}, - {&__pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_k_lib_sqlalchemy_engine__util_cy_p, sizeof(__pyx_k_lib_sqlalchemy_engine__util_cy_p), 0, 0, 1, 0}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_kp_u_mapping_or_list_expected_for_par, __pyx_k_mapping_or_list_expected_for_par, sizeof(__pyx_k_mapping_or_list_expected_for_par), 0, 1, 0, 0}, - {&__pyx_kp_u_mapping_or_sequence_expected_for, __pyx_k_mapping_or_sequence_expected_for, sizeof(__pyx_k_mapping_or_sequence_expected_for), 0, 1, 0, 0}, - {&__pyx_n_s_max_index, __pyx_k_max_index, sizeof(__pyx_k_max_index), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_operator, __pyx_k_operator, sizeof(__pyx_k_operator), 0, 0, 1, 1}, - {&__pyx_n_s_params, __pyx_k_params, sizeof(__pyx_k_params), 0, 0, 1, 1}, - {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, - {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, - {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, - {&__pyx_n_s_spec, __pyx_k_spec, sizeof(__pyx_k_spec), 0, 0, 1, 1}, - {&__pyx_n_s_sqlalchemy_engine__util_cy, __pyx_k_sqlalchemy_engine__util_cy, sizeof(__pyx_k_sqlalchemy_engine__util_cy), 0, 0, 1, 1}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_tuplegetter, __pyx_k_tuplegetter, sizeof(__pyx_k_tuplegetter), 0, 0, 1, 1}, - {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, - {&__pyx_n_s_util, __pyx_k_util, sizeof(__pyx_k_util), 0, 0, 1, 1}, - {&__pyx_n_s_warn_deprecated, __pyx_k_warn_deprecated, sizeof(__pyx_k_warn_deprecated), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} - }; - return __Pyx_InitStrings(__pyx_string_tab); -} -/* #### Code section: cached_builtins ### */ -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 112, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: cached_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "sqlalchemy/engine/_util_cy.py":70 - * # collections_abc.MutableSequence # avoid abc.__instancecheck__ - * if len(params) == 0: - * warn_deprecated( # <<<<<<<<<<<<<< - * "Empty parameter sequence passed to execute(). " - * "This use is deprecated and will raise an exception in a " - */ - __pyx_tuple_ = PyTuple_Pack(2, __pyx_kp_u_Empty_parameter_sequence_passed, __pyx_kp_u_2_1); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 70, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - - /* "sqlalchemy/engine/_util_cy.py":38 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_codeobj__5 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_n_s_is_compiled, 38, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__5)) __PYX_ERR(0, 38, __pyx_L1_error) - - /* "sqlalchemy/engine/_util_cy.py":61 - * - * # _is_mapping_or_tuple could be inlined if pure python perf is a problem - * def _distill_params_20( # <<<<<<<<<<<<<< - * params: Optional[_CoreAnyExecuteParams], - * ) -> _CoreMultiExecuteParams: - */ - __pyx_tuple__6 = PyTuple_Pack(1, __pyx_n_s_params); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 61, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__6); - __Pyx_GIVEREF(__pyx_tuple__6); - __pyx_codeobj__7 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__6, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_n_s_distill_params_20, 61, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__7)) __PYX_ERR(0, 61, __pyx_L1_error) - - /* "sqlalchemy/engine/_util_cy.py":89 - * - * - * def _distill_raw_params( # <<<<<<<<<<<<<< - * params: Optional[_DBAPIAnyExecuteParams], - * ) -> _DBAPIMultiExecuteParams: - */ - __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__6, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_n_s_distill_raw_params, 89, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(0, 89, __pyx_L1_error) - - /* "sqlalchemy/engine/_util_cy.py":120 - * - * - * def tuplegetter(*indexes: int) -> _TupleGetterType: # <<<<<<<<<<<<<< - * max_index: int - * if len(indexes) == 1 or _is_contiguous(indexes): - */ - __pyx_tuple__9 = PyTuple_Pack(2, __pyx_n_s_indexes, __pyx_n_s_max_index); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__9); - __Pyx_GIVEREF(__pyx_tuple__9); - __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__9, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_engine__util_cy_p, __pyx_n_s_tuplegetter, 120, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} -/* #### Code section: init_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { - if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: init_globals ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - return 0; -} -/* #### Code section: init_module ### */ - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple = ((PyObject*)Py_None); Py_INCREF(Py_None); - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__util_cy(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__util_cy}, - {0, NULL} -}; -#endif - -#ifdef __cplusplus -namespace { - struct PyModuleDef __pyx_moduledef = - #else - static struct PyModuleDef __pyx_moduledef = - #endif - { - PyModuleDef_HEAD_INIT, - "_util_cy", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #elif CYTHON_USE_MODULE_STATE - sizeof(__pyx_mstate), /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - #if CYTHON_USE_MODULE_STATE - __pyx_m_traverse, /* m_traverse */ - __pyx_m_clear, /* m_clear */ - NULL /* m_free */ - #else - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ - #endif - }; - #ifdef __cplusplus -} /* anonymous namespace */ -#endif -#endif - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_util_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_util_cy(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__util_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__util_cy(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) -#else -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) -#endif -{ - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { -#if CYTHON_COMPILING_IN_LIMITED_API - result = PyModule_AddObject(module, to_name, value); -#else - result = PyDict_SetItemString(moddict, to_name, value); -#endif - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - CYTHON_UNUSED_VAR(def); - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - moddict = module; -#else - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; -#endif - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__util_cy(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - int stringtab_initialized = 0; - #if CYTHON_USE_MODULE_STATE - int pystate_addmodule_run = 0; - #endif - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_util_cy' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_util_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #elif CYTHON_USE_MODULE_STATE - __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - { - int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_util_cy" pseudovariable */ - if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - pystate_addmodule_run = 1; - } - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #endif - CYTHON_UNUSED_VAR(__pyx_t_1); - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__util_cy(void)", 0); - if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - PyEval_InitThreads(); - #endif - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - stringtab_initialized = 1; - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_sqlalchemy__engine___util_cy) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "sqlalchemy.engine._util_cy")) { - if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.engine._util_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - (void)__Pyx_modinit_type_init_code(); - (void)__Pyx_modinit_type_import_code(); - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "sqlalchemy/engine/_util_cy.py":10 - * from __future__ import annotations - * - * from collections.abc import Mapping # <<<<<<<<<<<<<< - * import operator - * from typing import Any - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Mapping); - __Pyx_GIVEREF(__pyx_n_s_Mapping); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Mapping)) __PYX_ERR(0, 10, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_collections_abc, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Mapping, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":11 - * - * from collections.abc import Mapping - * import operator # <<<<<<<<<<<<<< - * from typing import Any - * from typing import Optional - */ - __pyx_t_3 = __Pyx_ImportDottedModule(__pyx_n_s_operator, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_operator, __pyx_t_3) < 0) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":12 - * from collections.abc import Mapping - * import operator - * from typing import Any # <<<<<<<<<<<<<< - * from typing import Optional - * from typing import Tuple - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Any); - __Pyx_GIVEREF(__pyx_n_s_Any); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Any)) __PYX_ERR(0, 12, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Any); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_3) < 0) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_util_cy.py":13 - * import operator - * from typing import Any - * from typing import Optional # <<<<<<<<<<<<<< - * from typing import Tuple - * from typing import TYPE_CHECKING - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Optional); - __Pyx_GIVEREF(__pyx_n_s_Optional); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 13, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Optional); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_2) < 0) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":14 - * from typing import Any - * from typing import Optional - * from typing import Tuple # <<<<<<<<<<<<<< - * from typing import TYPE_CHECKING - * - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Tuple); - __Pyx_GIVEREF(__pyx_n_s_Tuple); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Tuple)) __PYX_ERR(0, 14, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Tuple); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Tuple, __pyx_t_3) < 0) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_util_cy.py":15 - * from typing import Optional - * from typing import Tuple - * from typing import TYPE_CHECKING # <<<<<<<<<<<<<< - * - * from .. import exc - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_TYPE_CHECKING); - __Pyx_GIVEREF(__pyx_n_s_TYPE_CHECKING); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_TYPE_CHECKING)) __PYX_ERR(0, 15, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_TYPE_CHECKING, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":17 - * from typing import TYPE_CHECKING - * - * from .. import exc # <<<<<<<<<<<<<< - * from ..util import warn_deprecated - * - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_exc); - __Pyx_GIVEREF(__pyx_n_s_exc); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_exc)) __PYX_ERR(0, 17, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s__4, __pyx_t_3, 2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_exc); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_exc, __pyx_t_3) < 0) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_util_cy.py":18 - * - * from .. import exc - * from ..util import warn_deprecated # <<<<<<<<<<<<<< - * - * if TYPE_CHECKING: - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_warn_deprecated); - __Pyx_GIVEREF(__pyx_n_s_warn_deprecated); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_warn_deprecated)) __PYX_ERR(0, 18, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_util, __pyx_t_2, 2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_warn_deprecated); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_warn_deprecated, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":20 - * from ..util import warn_deprecated - * - * if TYPE_CHECKING: # <<<<<<<<<<<<<< - * from .interfaces import _CoreAnyExecuteParams - * from .interfaces import _CoreMultiExecuteParams - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_4 < 0))) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "sqlalchemy/engine/_util_cy.py":21 - * - * if TYPE_CHECKING: - * from .interfaces import _CoreAnyExecuteParams # <<<<<<<<<<<<<< - * from .interfaces import _CoreMultiExecuteParams - * from .interfaces import _DBAPIAnyExecuteParams - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_CoreAnyExecuteParams); - __Pyx_GIVEREF(__pyx_n_s_CoreAnyExecuteParams); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_CoreAnyExecuteParams)) __PYX_ERR(0, 21, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_interfaces, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CoreAnyExecuteParams); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_CoreAnyExecuteParams, __pyx_t_3) < 0) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_util_cy.py":22 - * if TYPE_CHECKING: - * from .interfaces import _CoreAnyExecuteParams - * from .interfaces import _CoreMultiExecuteParams # <<<<<<<<<<<<<< - * from .interfaces import _DBAPIAnyExecuteParams - * from .interfaces import _DBAPIMultiExecuteParams - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_CoreMultiExecuteParams); - __Pyx_GIVEREF(__pyx_n_s_CoreMultiExecuteParams); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_CoreMultiExecuteParams)) __PYX_ERR(0, 22, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_interfaces, __pyx_t_2, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_CoreMultiExecuteParams); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_CoreMultiExecuteParams, __pyx_t_2) < 0) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":23 - * from .interfaces import _CoreAnyExecuteParams - * from .interfaces import _CoreMultiExecuteParams - * from .interfaces import _DBAPIAnyExecuteParams # <<<<<<<<<<<<<< - * from .interfaces import _DBAPIMultiExecuteParams - * from .result import _TupleGetterType - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_DBAPIAnyExecuteParams); - __Pyx_GIVEREF(__pyx_n_s_DBAPIAnyExecuteParams); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_DBAPIAnyExecuteParams)) __PYX_ERR(0, 23, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_interfaces, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_DBAPIAnyExecuteParams); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_DBAPIAnyExecuteParams, __pyx_t_3) < 0) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_util_cy.py":24 - * from .interfaces import _CoreMultiExecuteParams - * from .interfaces import _DBAPIAnyExecuteParams - * from .interfaces import _DBAPIMultiExecuteParams # <<<<<<<<<<<<<< - * from .result import _TupleGetterType - * - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_DBAPIMultiExecuteParams); - __Pyx_GIVEREF(__pyx_n_s_DBAPIMultiExecuteParams); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_DBAPIMultiExecuteParams)) __PYX_ERR(0, 24, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_interfaces, __pyx_t_2, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_DBAPIMultiExecuteParams); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_DBAPIMultiExecuteParams, __pyx_t_2) < 0) __PYX_ERR(0, 24, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":25 - * from .interfaces import _DBAPIAnyExecuteParams - * from .interfaces import _DBAPIMultiExecuteParams - * from .result import _TupleGetterType # <<<<<<<<<<<<<< - * - * # START GENERATED CYTHON IMPORT - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_TupleGetterType); - __Pyx_GIVEREF(__pyx_n_s_TupleGetterType); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_TupleGetterType)) __PYX_ERR(0, 25, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_result, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_TupleGetterType); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_TupleGetterType, __pyx_t_3) < 0) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_util_cy.py":20 - * from ..util import warn_deprecated - * - * if TYPE_CHECKING: # <<<<<<<<<<<<<< - * from .interfaces import _CoreAnyExecuteParams - * from .interfaces import _CoreMultiExecuteParams - */ - } - - /* "sqlalchemy/engine/_util_cy.py":29 - * # START GENERATED CYTHON IMPORT - * # This section is automatically generated by the script tools/cython_imports.py - * try: # <<<<<<<<<<<<<< - * # NOTE: the cython compiler needs this "import cython" in the file, it - * # can't be only "from sqlalchemy.util import cython" with the fallback - */ - { - (void)__pyx_t_1; (void)__pyx_t_5; (void)__pyx_t_6; /* mark used */ - /*try:*/ { - - /* "sqlalchemy/engine/_util_cy.py":33 - * # can't be only "from sqlalchemy.util import cython" with the fallback - * # in that module - * import cython # <<<<<<<<<<<<<< - * except ModuleNotFoundError: - * from sqlalchemy.util import cython - */ - } - } - - /* "sqlalchemy/engine/_util_cy.py":38 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 38, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 38, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_8_util_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_engine__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__5)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 38, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(0, 38, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":45 - * # END GENERATED CYTHON IMPORT - * - * _Empty_Tuple: Tuple[Any, ...] = cython.declare(tuple, ()) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_INCREF(__pyx_empty_tuple); - __Pyx_XGOTREF(__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple); - __Pyx_DECREF_SET(__pyx_v_10sqlalchemy_6engine_8_util_cy__Empty_Tuple, __pyx_empty_tuple); - __Pyx_GIVEREF(__pyx_empty_tuple); - - /* "sqlalchemy/engine/_util_cy.py":61 - * - * # _is_mapping_or_tuple could be inlined if pure python perf is a problem - * def _distill_params_20( # <<<<<<<<<<<<<< - * params: Optional[_CoreAnyExecuteParams], - * ) -> _CoreMultiExecuteParams: - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 61, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_params, __pyx_kp_s_Optional__CoreAnyExecuteParams) < 0) __PYX_ERR(0, 61, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_CoreMultiExecuteParams) < 0) __PYX_ERR(0, 61, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_8_util_cy_3_distill_params_20, 0, __pyx_n_s_distill_params_20, NULL, __pyx_n_s_sqlalchemy_engine__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__7)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 61, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_distill_params_20, __pyx_t_2) < 0) __PYX_ERR(0, 61, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_util_cy.py":89 - * - * - * def _distill_raw_params( # <<<<<<<<<<<<<< - * params: Optional[_DBAPIAnyExecuteParams], - * ) -> _DBAPIMultiExecuteParams: - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_params, __pyx_kp_s_Optional__DBAPIAnyExecuteParams) < 0) __PYX_ERR(0, 89, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_DBAPIMultiExecuteParams) < 0) __PYX_ERR(0, 89, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_8_util_cy_5_distill_raw_params, 0, __pyx_n_s_distill_raw_params, NULL, __pyx_n_s_sqlalchemy_engine__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__8)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_distill_raw_params, __pyx_t_3) < 0) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/engine/_util_cy.py":120 - * - * - * def tuplegetter(*indexes: int) -> _TupleGetterType: # <<<<<<<<<<<<<< - * max_index: int - * if len(indexes) == 1 or _is_contiguous(indexes): - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_indexes, __pyx_n_s_int) < 0) __PYX_ERR(0, 120, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_TupleGetterType) < 0) __PYX_ERR(0, 120, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_6engine_8_util_cy_7tuplegetter, 0, __pyx_n_s_tuplegetter, NULL, __pyx_n_s_sqlalchemy_engine__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__10)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_tuplegetter, __pyx_t_2) < 0) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/engine/_util_cy.py":1 - * # engine/_util_cy.py # <<<<<<<<<<<<<< - * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors - * # - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - if (__pyx_m) { - if (__pyx_d && stringtab_initialized) { - __Pyx_AddTraceback("init sqlalchemy.engine._util_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - #if !CYTHON_USE_MODULE_STATE - Py_CLEAR(__pyx_m); - #else - Py_DECREF(__pyx_m); - if (pystate_addmodule_run) { - PyObject *tp, *value, *tb; - PyErr_Fetch(&tp, &value, &tb); - PyState_RemoveModule(&__pyx_moduledef); - PyErr_Restore(tp, value, tb); - } - #endif - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init sqlalchemy.engine._util_cy"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} -/* #### Code section: cleanup_globals ### */ -/* #### Code section: cleanup_module ### */ -/* #### Code section: main_method ### */ -/* #### Code section: utility_code_pragmas ### */ -#ifdef _MSC_VER -#pragma warning( push ) -/* Warning 4127: conditional expression is constant - * Cython uses constant conditional expressions to allow in inline functions to be optimized at - * compile-time, so this warning is not useful - */ -#pragma warning( disable : 4127 ) -#endif - - - -/* #### Code section: utility_code_def ### */ - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030C00A6 - PyObject *current_exception = tstate->current_exception; - if (unlikely(!current_exception)) return 0; - exc_type = (PyObject*) Py_TYPE(current_exception); - if (exc_type == err) return 1; -#else - exc_type = tstate->curexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; -#endif - #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(exc_type); - #endif - if (unlikely(PyTuple_Check(err))) { - result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - } else { - result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); - } - #if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(exc_type); - #endif - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject *tmp_value; - assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); - if (value) { - #if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) - #endif - PyException_SetTraceback(value, tb); - } - tmp_value = tstate->current_exception; - tstate->current_exception = value; - Py_XDECREF(tmp_value); - Py_XDECREF(type); - Py_XDECREF(tb); -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#endif -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject* exc_value; - exc_value = tstate->current_exception; - tstate->current_exception = 0; - *value = exc_value; - *type = NULL; - *tb = NULL; - if (exc_value) { - *type = (PyObject*) Py_TYPE(exc_value); - Py_INCREF(*type); - #if CYTHON_COMPILING_IN_CPYTHON - *tb = ((PyBaseExceptionObject*) exc_value)->traceback; - Py_XINCREF(*tb); - #else - *tb = PyException_GetTraceback(exc_value); - #endif - } -#else - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#endif -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* PyObjectGetAttrStrNoError */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - (void) PyObject_GetOptionalAttr(obj, attr_name, &result); - return result; -#else -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -#endif -} - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); - if (unlikely(!result) && !PyErr_Occurred()) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* TupleAndListFromArray */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { - PyObject *v; - Py_ssize_t i; - for (i = 0; i < length; i++) { - v = dest[i] = src[i]; - Py_INCREF(v); - } -} -static CYTHON_INLINE PyObject * -__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - Py_INCREF(__pyx_empty_tuple); - return __pyx_empty_tuple; - } - res = PyTuple_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); - return res; -} -static CYTHON_INLINE PyObject * -__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - return PyList_New(0); - } - res = PyList_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); - return res; -} -#endif - -/* BytesEquals */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else - if (s1 == s2) { - return (equals == Py_EQ); - } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { - const char *ps1, *ps2; - Py_ssize_t length = PyBytes_GET_SIZE(s1); - if (length != PyBytes_GET_SIZE(s2)) - return (equals == Py_NE); - ps1 = PyBytes_AS_STRING(s1); - ps2 = PyBytes_AS_STRING(s2); - if (ps1[0] != ps2[0]) { - return (equals == Py_NE); - } else if (length == 1) { - return (equals == Py_EQ); - } else { - int result; -#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) - Py_hash_t hash1, hash2; - hash1 = ((PyBytesObject*)s1)->ob_shash; - hash2 = ((PyBytesObject*)s2)->ob_shash; - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - return (equals == Py_NE); - } -#endif - result = memcmp(ps1, ps2, (size_t)length); - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { - return (equals == Py_NE); - } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { - return (equals == Py_NE); - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -#endif -} - -/* UnicodeEquals */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else -#if PY_MAJOR_VERSION < 3 - PyObject* owned_ref = NULL; -#endif - int s1_is_unicode, s2_is_unicode; - if (s1 == s2) { - goto return_eq; - } - s1_is_unicode = PyUnicode_CheckExact(s1); - s2_is_unicode = PyUnicode_CheckExact(s2); -#if PY_MAJOR_VERSION < 3 - if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { - owned_ref = PyUnicode_FromObject(s2); - if (unlikely(!owned_ref)) - return -1; - s2 = owned_ref; - s2_is_unicode = 1; - } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { - owned_ref = PyUnicode_FromObject(s1); - if (unlikely(!owned_ref)) - return -1; - s1 = owned_ref; - s1_is_unicode = 1; - } else if (((!s2_is_unicode) & (!s1_is_unicode))) { - return __Pyx_PyBytes_Equals(s1, s2, equals); - } -#endif - if (s1_is_unicode & s2_is_unicode) { - Py_ssize_t length; - int kind; - void *data1, *data2; - if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) - return -1; - length = __Pyx_PyUnicode_GET_LENGTH(s1); - if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { - goto return_ne; - } -#if CYTHON_USE_UNICODE_INTERNALS - { - Py_hash_t hash1, hash2; - #if CYTHON_PEP393_ENABLED - hash1 = ((PyASCIIObject*)s1)->hash; - hash2 = ((PyASCIIObject*)s2)->hash; - #else - hash1 = ((PyUnicodeObject*)s1)->hash; - hash2 = ((PyUnicodeObject*)s2)->hash; - #endif - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - goto return_ne; - } - } -#endif - kind = __Pyx_PyUnicode_KIND(s1); - if (kind != __Pyx_PyUnicode_KIND(s2)) { - goto return_ne; - } - data1 = __Pyx_PyUnicode_DATA(s1); - data2 = __Pyx_PyUnicode_DATA(s2); - if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { - goto return_ne; - } else if (length == 1) { - goto return_eq; - } else { - int result = memcmp(data1, data2, (size_t)(length * kind)); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & s2_is_unicode) { - goto return_ne; - } else if ((s2 == Py_None) & s1_is_unicode) { - goto return_ne; - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -return_eq: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ); -return_ne: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_NE); -#endif -} - -/* fastcall */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) -{ - Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); - for (i = 0; i < n; i++) - { - if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; - } - for (i = 0; i < n; i++) - { - int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); - if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; - return kwvalues[i]; - } - } - return NULL; -} -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { - Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); - PyObject *dict; - dict = PyDict_New(); - if (unlikely(!dict)) - return NULL; - for (i=0; itp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#elif CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(!__pyx_m)) { - return NULL; - } - result = PyObject_GetAttr(__pyx_m, name); - if (likely(result)) { - return result; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); - while (1) { - Py_XDECREF(key); key = NULL; - Py_XDECREF(value); value = NULL; - if (kwds_is_tuple) { - Py_ssize_t size; -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(kwds); -#else - size = PyTuple_Size(kwds); - if (size < 0) goto bad; -#endif - if (pos >= size) break; -#if CYTHON_AVOID_BORROWED_REFS - key = __Pyx_PySequence_ITEM(kwds, pos); - if (!key) goto bad; -#elif CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kwds, pos); -#else - key = PyTuple_GetItem(kwds, pos); - if (!key) goto bad; -#endif - value = kwvalues[pos]; - pos++; - } - else - { - if (!PyDict_Next(kwds, &pos, &key, &value)) break; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - } - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); - Py_DECREF(key); -#endif - key = NULL; - value = NULL; - continue; - } -#if !CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - Py_INCREF(value); - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = ( - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key) - ); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - Py_XDECREF(key); - Py_XDECREF(value); - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - Py_XDECREF(key); - Py_XDECREF(value); - return -1; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = Py_TYPE(func)->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (unlikely(!j)) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; - PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; - if (mm && mm->mp_subscript) { - PyObject *r, *key = PyInt_FromSsize_t(i); - if (unlikely(!key)) return NULL; - r = mm->mp_subscript(o, key); - Py_DECREF(key); - return r; - } - if (likely(sm && sm->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { - Py_ssize_t l = sm->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return sm->sq_item(o, i); - } - } -#else - if (is_list || !PyMapping_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { - return NULL; - } - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { - return NULL; - } - #endif - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); - self = __Pyx_CyOrPyCFunction_GET_SELF(func); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectFastCall */ -#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API -static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { - PyObject *argstuple; - PyObject *result = 0; - size_t i; - argstuple = PyTuple_New((Py_ssize_t)nargs); - if (unlikely(!argstuple)) return NULL; - for (i = 0; i < nargs; i++) { - Py_INCREF(args[i]); - if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; - } - result = __Pyx_PyObject_Call(func, argstuple, kwargs); - bad: - Py_DECREF(argstuple); - return result; -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { - Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); -#if CYTHON_COMPILING_IN_CPYTHON - if (nargs == 0 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) - return __Pyx_PyObject_CallMethO(func, NULL); - } - else if (nargs == 1 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) - return __Pyx_PyObject_CallMethO(func, args[0]); - } -#endif - #if PY_VERSION_HEX < 0x030800B1 - #if CYTHON_FAST_PYCCALL - if (PyCFunction_Check(func)) { - if (kwargs) { - return _PyCFunction_FastCallDict(func, args, nargs, kwargs); - } else { - return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); - } - } - #if PY_VERSION_HEX >= 0x030700A1 - if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { - return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); - } - #endif - #endif - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); - } - #endif - #endif - if (kwargs == NULL) { - #if CYTHON_VECTORCALL - #if PY_VERSION_HEX < 0x03090000 - vectorcallfunc f = _PyVectorcall_Function(func); - #else - vectorcallfunc f = PyVectorcall_Function(func); - #endif - if (f) { - return f(func, args, (size_t)nargs, NULL); - } - #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL - if (__Pyx_CyFunction_CheckExact(func)) { - __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); - if (f) return f(func, args, (size_t)nargs, NULL); - } - #endif - } - if (nargs == 0) { - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); - } - #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API - return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); - #else - return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); - #endif -} - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - __Pyx_PyThreadState_declare - CYTHON_UNUSED_VAR(cause); - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { - #if PY_VERSION_HEX >= 0x030C00A6 - PyException_SetTraceback(value, tb); - #elif CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* KeywordStringCheck */ -static int __Pyx_CheckKeywordStrings( - PyObject *kw, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; -#if CYTHON_COMPILING_IN_PYPY - if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) - goto invalid_keyword; - return 1; -#else - if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { - Py_ssize_t kwsize; -#if CYTHON_ASSUME_SAFE_MACROS - kwsize = PyTuple_GET_SIZE(kw); -#else - kwsize = PyTuple_Size(kw); - if (kwsize < 0) return 0; -#endif - if (unlikely(kwsize == 0)) - return 1; - if (!kw_allowed) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, 0); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - goto invalid_keyword; - } -#if PY_VERSION_HEX < 0x03090000 - for (pos = 0; pos < kwsize; pos++) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, pos); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } -#endif - return 1; - } - while (PyDict_Next(kw, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_Check(key))) - #endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } - if (!kw_allowed && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - return 0; -#endif -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - -/* RaiseUnexpectedTypeError */ -static int -__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) -{ - __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, - expected, obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *module = 0; - PyObject *empty_dict = 0; - PyObject *empty_list = 0; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (unlikely(!py_import)) - goto bad; - if (!from_list) { - empty_list = PyList_New(0); - if (unlikely(!empty_list)) - goto bad; - from_list = empty_list; - } - #endif - empty_dict = PyDict_New(); - if (unlikely(!empty_dict)) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, 1); - if (unlikely(!module)) { - if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (unlikely(!py_level)) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, level); - #endif - } - } -bad: - Py_XDECREF(empty_dict); - Py_XDECREF(empty_list); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - const char* module_name_str = 0; - PyObject* module_name = 0; - PyObject* module_dot = 0; - PyObject* full_name = 0; - PyErr_Clear(); - module_name_str = PyModule_GetName(module); - if (unlikely(!module_name_str)) { goto modbad; } - module_name = PyUnicode_FromString(module_name_str); - if (unlikely(!module_name)) { goto modbad; } - module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__2); - if (unlikely(!module_dot)) { goto modbad; } - full_name = PyUnicode_Concat(module_dot, name); - if (unlikely(!full_name)) { goto modbad; } - #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) - { - PyObject *modules = PyImport_GetModuleDict(); - if (unlikely(!modules)) - goto modbad; - value = PyObject_GetItem(modules, full_name); - } - #else - value = PyImport_GetModule(full_name); - #endif - modbad: - Py_XDECREF(full_name); - Py_XDECREF(module_dot); - Py_XDECREF(module_name); - } - if (unlikely(!value)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* ImportDottedModule */ -#if PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx__ImportDottedModule_Error(PyObject *name, PyObject *parts_tuple, Py_ssize_t count) { - PyObject *partial_name = NULL, *slice = NULL, *sep = NULL; - if (unlikely(PyErr_Occurred())) { - PyErr_Clear(); - } - if (likely(PyTuple_GET_SIZE(parts_tuple) == count)) { - partial_name = name; - } else { - slice = PySequence_GetSlice(parts_tuple, 0, count); - if (unlikely(!slice)) - goto bad; - sep = PyUnicode_FromStringAndSize(".", 1); - if (unlikely(!sep)) - goto bad; - partial_name = PyUnicode_Join(sep, slice); - } - PyErr_Format( -#if PY_MAJOR_VERSION < 3 - PyExc_ImportError, - "No module named '%s'", PyString_AS_STRING(partial_name)); -#else -#if PY_VERSION_HEX >= 0x030600B1 - PyExc_ModuleNotFoundError, -#else - PyExc_ImportError, -#endif - "No module named '%U'", partial_name); -#endif -bad: - Py_XDECREF(sep); - Py_XDECREF(slice); - Py_XDECREF(partial_name); - return NULL; -} -#endif -#if PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx__ImportDottedModule_Lookup(PyObject *name) { - PyObject *imported_module; -#if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) - PyObject *modules = PyImport_GetModuleDict(); - if (unlikely(!modules)) - return NULL; - imported_module = __Pyx_PyDict_GetItemStr(modules, name); - Py_XINCREF(imported_module); -#else - imported_module = PyImport_GetModule(name); -#endif - return imported_module; -} -#endif -#if PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx_ImportDottedModule_WalkParts(PyObject *module, PyObject *name, PyObject *parts_tuple) { - Py_ssize_t i, nparts; - nparts = PyTuple_GET_SIZE(parts_tuple); - for (i=1; i < nparts && module; i++) { - PyObject *part, *submodule; -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - part = PyTuple_GET_ITEM(parts_tuple, i); -#else - part = PySequence_ITEM(parts_tuple, i); -#endif - submodule = __Pyx_PyObject_GetAttrStrNoError(module, part); -#if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) - Py_DECREF(part); -#endif - Py_DECREF(module); - module = submodule; - } - if (unlikely(!module)) { - return __Pyx__ImportDottedModule_Error(name, parts_tuple, i); - } - return module; -} -#endif -static PyObject *__Pyx__ImportDottedModule(PyObject *name, PyObject *parts_tuple) { -#if PY_MAJOR_VERSION < 3 - PyObject *module, *from_list, *star = __pyx_n_s__3; - CYTHON_UNUSED_VAR(parts_tuple); - from_list = PyList_New(1); - if (unlikely(!from_list)) - return NULL; - Py_INCREF(star); - PyList_SET_ITEM(from_list, 0, star); - module = __Pyx_Import(name, from_list, 0); - Py_DECREF(from_list); - return module; -#else - PyObject *imported_module; - PyObject *module = __Pyx_Import(name, NULL, 0); - if (!parts_tuple || unlikely(!module)) - return module; - imported_module = __Pyx__ImportDottedModule_Lookup(name); - if (likely(imported_module)) { - Py_DECREF(module); - return imported_module; - } - PyErr_Clear(); - return __Pyx_ImportDottedModule_WalkParts(module, name, parts_tuple); -#endif -} -static PyObject *__Pyx_ImportDottedModule(PyObject *name, PyObject *parts_tuple) { -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030400B1 - PyObject *module = __Pyx__ImportDottedModule_Lookup(name); - if (likely(module)) { - PyObject *spec = __Pyx_PyObject_GetAttrStrNoError(module, __pyx_n_s_spec); - if (likely(spec)) { - PyObject *unsafe = __Pyx_PyObject_GetAttrStrNoError(spec, __pyx_n_s_initializing); - if (likely(!unsafe || !__Pyx_PyObject_IsTrue(unsafe))) { - Py_DECREF(spec); - spec = NULL; - } - Py_XDECREF(unsafe); - } - if (likely(!spec)) { - PyErr_Clear(); - return module; - } - Py_DECREF(spec); - Py_DECREF(module); - } else if (PyErr_Occurred()) { - PyErr_Clear(); - } -#endif - return __Pyx__ImportDottedModule(name, parts_tuple); -} - -/* FixUpExtensionType */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { -#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - CYTHON_UNUSED_VAR(spec); - CYTHON_UNUSED_VAR(type); -#else - const PyType_Slot *slot = spec->slots; - while (slot && slot->slot && slot->slot != Py_tp_members) - slot++; - if (slot && slot->slot == Py_tp_members) { - int changed = 0; -#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) - const -#endif - PyMemberDef *memb = (PyMemberDef*) slot->pfunc; - while (memb && memb->name) { - if (memb->name[0] == '_' && memb->name[1] == '_') { -#if PY_VERSION_HEX < 0x030900b1 - if (strcmp(memb->name, "__weaklistoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_weaklistoffset = memb->offset; - changed = 1; - } - else if (strcmp(memb->name, "__dictoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_dictoffset = memb->offset; - changed = 1; - } -#if CYTHON_METH_FASTCALL - else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); -#if PY_VERSION_HEX >= 0x030800b4 - type->tp_vectorcall_offset = memb->offset; -#else - type->tp_print = (printfunc) memb->offset; -#endif - changed = 1; - } -#endif -#else - if ((0)); -#endif -#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON - else if (strcmp(memb->name, "__module__") == 0) { - PyObject *descr; - assert(memb->type == T_OBJECT); - assert(memb->flags == 0 || memb->flags == READONLY); - descr = PyDescr_NewMember(type, memb); - if (unlikely(!descr)) - return -1; - if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { - Py_DECREF(descr); - return -1; - } - Py_DECREF(descr); - changed = 1; - } -#endif - } - memb++; - } - if (changed) - PyType_Modified(type); - } -#endif - return 0; -} -#endif - -/* FetchSharedCythonModule */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void) { - return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); -} - -/* FetchCommonType */ -static int __Pyx_VerifyCachedType(PyObject *cached_type, - const char *name, - Py_ssize_t basicsize, - Py_ssize_t expected_basicsize) { - if (!PyType_Check(cached_type)) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s is not a type object", name); - return -1; - } - if (basicsize != expected_basicsize) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s has the wrong size, try recompiling", - name); - return -1; - } - return 0; -} -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { - PyObject* abi_module; - const char* object_name; - PyTypeObject *cached_type = NULL; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - object_name = strrchr(type->tp_name, '.'); - object_name = object_name ? object_name+1 : type->tp_name; - cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - if (__Pyx_VerifyCachedType( - (PyObject *)cached_type, - object_name, - cached_type->tp_basicsize, - type->tp_basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - if (PyType_Ready(type) < 0) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) - goto bad; - Py_INCREF(type); - cached_type = type; -done: - Py_DECREF(abi_module); - return cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#else -static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { - PyObject *abi_module, *cached_type = NULL; - const char* object_name = strrchr(spec->name, '.'); - object_name = object_name ? object_name+1 : spec->name; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - cached_type = PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - Py_ssize_t basicsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_basicsize; - py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); - if (unlikely(!py_basicsize)) goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; -#else - basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; -#endif - if (__Pyx_VerifyCachedType( - cached_type, - object_name, - basicsize, - spec->basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - CYTHON_UNUSED_VAR(module); - cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); - if (unlikely(!cached_type)) goto bad; - if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; -done: - Py_DECREF(abi_module); - assert(cached_type == NULL || PyType_Check(cached_type)); - return (PyTypeObject *) cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#endif - -/* PyVectorcallFastCallDict */ -#if CYTHON_METH_FASTCALL -static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - PyObject *res = NULL; - PyObject *kwnames; - PyObject **newargs; - PyObject **kwvalues; - Py_ssize_t i, pos; - size_t j; - PyObject *key, *value; - unsigned long keys_are_strings; - Py_ssize_t nkw = PyDict_GET_SIZE(kw); - newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); - if (unlikely(newargs == NULL)) { - PyErr_NoMemory(); - return NULL; - } - for (j = 0; j < nargs; j++) newargs[j] = args[j]; - kwnames = PyTuple_New(nkw); - if (unlikely(kwnames == NULL)) { - PyMem_Free(newargs); - return NULL; - } - kwvalues = newargs + nargs; - pos = i = 0; - keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; - while (PyDict_Next(kw, &pos, &key, &value)) { - keys_are_strings &= Py_TYPE(key)->tp_flags; - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(kwnames, i, key); - kwvalues[i] = value; - i++; - } - if (unlikely(!keys_are_strings)) { - PyErr_SetString(PyExc_TypeError, "keywords must be strings"); - goto cleanup; - } - res = vc(func, newargs, nargs, kwnames); -cleanup: - Py_DECREF(kwnames); - for (i = 0; i < nkw; i++) - Py_DECREF(kwvalues[i]); - PyMem_Free(newargs); - return res; -} -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { - return vc(func, args, nargs, NULL); - } - return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); -} -#endif - -/* CythonFunctionShared */ -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - if (__Pyx_CyFunction_Check(func)) { - return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; - } else if (PyCFunction_Check(func)) { - return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; - } - return 0; -} -#else -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -} -#endif -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - __Pyx_Py_XDECREF_SET( - __Pyx_CyFunction_GetClassObj(f), - ((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#else - __Pyx_Py_XDECREF_SET( - ((PyCMethodObject *) (f))->mm_class, - (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#endif -} -static PyObject * -__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) -{ - CYTHON_UNUSED_VAR(closure); - if (unlikely(op->func_doc == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); - if (unlikely(!op->func_doc)) return NULL; -#else - if (((PyCFunctionObject*)op)->m_ml->ml_doc) { -#if PY_MAJOR_VERSION >= 3 - op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#else - op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#endif - if (unlikely(op->func_doc == NULL)) - return NULL; - } else { - Py_INCREF(Py_None); - return Py_None; - } -#endif - } - Py_INCREF(op->func_doc); - return op->func_doc; -} -static int -__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (value == NULL) { - value = Py_None; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_doc, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_name == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_name = PyObject_GetAttrString(op->func, "__name__"); -#elif PY_MAJOR_VERSION >= 3 - op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#else - op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#endif - if (unlikely(op->func_name == NULL)) - return NULL; - } - Py_INCREF(op->func_name); - return op->func_name; -} -static int -__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__name__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_name, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_qualname); - return op->func_qualname; -} -static int -__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__qualname__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_qualname, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_dict == NULL)) { - op->func_dict = PyDict_New(); - if (unlikely(op->func_dict == NULL)) - return NULL; - } - Py_INCREF(op->func_dict); - return op->func_dict; -} -static int -__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(value == NULL)) { - PyErr_SetString(PyExc_TypeError, - "function's dictionary may not be deleted"); - return -1; - } - if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "setting function's dictionary to a non-dict"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_dict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_globals); - return op->func_globals; -} -static PyObject * -__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(op); - CYTHON_UNUSED_VAR(context); - Py_INCREF(Py_None); - return Py_None; -} -static PyObject * -__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) -{ - PyObject* result = (op->func_code) ? op->func_code : Py_None; - CYTHON_UNUSED_VAR(context); - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { - int result = 0; - PyObject *res = op->defaults_getter((PyObject *) op); - if (unlikely(!res)) - return -1; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - op->defaults_tuple = PyTuple_GET_ITEM(res, 0); - Py_INCREF(op->defaults_tuple); - op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); - Py_INCREF(op->defaults_kwdict); - #else - op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); - if (unlikely(!op->defaults_tuple)) result = -1; - else { - op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); - if (unlikely(!op->defaults_kwdict)) result = -1; - } - #endif - Py_DECREF(res); - return result; -} -static int -__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__defaults__ must be set to a tuple object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_tuple; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_tuple; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__kwdefaults__ must be set to a dict object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_kwdict; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_kwdict; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value || value == Py_None) { - value = NULL; - } else if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__annotations__ must be set to a dict object"); - return -1; - } - Py_XINCREF(value); - __Pyx_Py_XDECREF_SET(op->func_annotations, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->func_annotations; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - result = PyDict_New(); - if (unlikely(!result)) return NULL; - op->func_annotations = result; - } - Py_INCREF(result); - return result; -} -static PyObject * -__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { - int is_coroutine; - CYTHON_UNUSED_VAR(context); - if (op->func_is_coroutine) { - return __Pyx_NewRef(op->func_is_coroutine); - } - is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; -#if PY_VERSION_HEX >= 0x03050000 - if (is_coroutine) { - PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; - fromlist = PyList_New(1); - if (unlikely(!fromlist)) return NULL; - Py_INCREF(marker); -#if CYTHON_ASSUME_SAFE_MACROS - PyList_SET_ITEM(fromlist, 0, marker); -#else - if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { - Py_DECREF(marker); - Py_DECREF(fromlist); - return NULL; - } -#endif - module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); - Py_DECREF(fromlist); - if (unlikely(!module)) goto ignore; - op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); - Py_DECREF(module); - if (likely(op->func_is_coroutine)) { - return __Pyx_NewRef(op->func_is_coroutine); - } -ignore: - PyErr_Clear(); - } -#endif - op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); - return __Pyx_NewRef(op->func_is_coroutine); -} -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject * -__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_GetAttrString(op->func, "__module__"); -} -static int -__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_SetAttrString(op->func, "__module__", value); -} -#endif -static PyGetSetDef __pyx_CyFunction_getsets[] = { - {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, - {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, - {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, - {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, -#if CYTHON_COMPILING_IN_LIMITED_API - {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, -#endif - {0, 0, 0, 0, 0} -}; -static PyMemberDef __pyx_CyFunction_members[] = { -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, -#endif -#if CYTHON_USE_TYPE_SPECS - {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, -#if CYTHON_METH_FASTCALL -#if CYTHON_BACKPORT_VECTORCALL - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, -#else -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, -#endif -#endif -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, -#else - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, -#endif -#endif - {0, 0, 0, 0, 0} -}; -static PyObject * -__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) -{ - CYTHON_UNUSED_VAR(args); -#if PY_MAJOR_VERSION >= 3 - Py_INCREF(m->func_qualname); - return m->func_qualname; -#else - return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); -#endif -} -static PyMethodDef __pyx_CyFunction_methods[] = { - {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, - {0, 0, 0, 0} -}; -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) -#else -#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) -#endif -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { -#if !CYTHON_COMPILING_IN_LIMITED_API - PyCFunctionObject *cf = (PyCFunctionObject*) op; -#endif - if (unlikely(op == NULL)) - return NULL; -#if CYTHON_COMPILING_IN_LIMITED_API - op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); - if (unlikely(!op->func)) return NULL; -#endif - op->flags = flags; - __Pyx_CyFunction_weakreflist(op) = NULL; -#if !CYTHON_COMPILING_IN_LIMITED_API - cf->m_ml = ml; - cf->m_self = (PyObject *) op; -#endif - Py_XINCREF(closure); - op->func_closure = closure; -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_XINCREF(module); - cf->m_module = module; -#endif - op->func_dict = NULL; - op->func_name = NULL; - Py_INCREF(qualname); - op->func_qualname = qualname; - op->func_doc = NULL; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - op->func_classobj = NULL; -#else - ((PyCMethodObject*)op)->mm_class = NULL; -#endif - op->func_globals = globals; - Py_INCREF(op->func_globals); - Py_XINCREF(code); - op->func_code = code; - op->defaults_pyobjects = 0; - op->defaults_size = 0; - op->defaults = NULL; - op->defaults_tuple = NULL; - op->defaults_kwdict = NULL; - op->defaults_getter = NULL; - op->func_annotations = NULL; - op->func_is_coroutine = NULL; -#if CYTHON_METH_FASTCALL - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { - case METH_NOARGS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; - break; - case METH_O: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; - break; - case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; - break; - case METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; - break; - case METH_VARARGS | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = NULL; - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - Py_DECREF(op); - return NULL; - } -#endif - return (PyObject *) op; -} -static int -__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) -{ - Py_CLEAR(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_CLEAR(m->func); -#else - Py_CLEAR(((PyCFunctionObject*)m)->m_module); -#endif - Py_CLEAR(m->func_dict); - Py_CLEAR(m->func_name); - Py_CLEAR(m->func_qualname); - Py_CLEAR(m->func_doc); - Py_CLEAR(m->func_globals); - Py_CLEAR(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API -#if PY_VERSION_HEX < 0x030900B1 - Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); -#else - { - PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; - ((PyCMethodObject *) (m))->mm_class = NULL; - Py_XDECREF(cls); - } -#endif -#endif - Py_CLEAR(m->defaults_tuple); - Py_CLEAR(m->defaults_kwdict); - Py_CLEAR(m->func_annotations); - Py_CLEAR(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_XDECREF(pydefaults[i]); - PyObject_Free(m->defaults); - m->defaults = NULL; - } - return 0; -} -static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - if (__Pyx_CyFunction_weakreflist(m) != NULL) - PyObject_ClearWeakRefs((PyObject *) m); - __Pyx_CyFunction_clear(m); - __Pyx_PyHeapTypeObject_GC_Del(m); -} -static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - PyObject_GC_UnTrack(m); - __Pyx__CyFunction_dealloc(m); -} -static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) -{ - Py_VISIT(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(m->func); -#else - Py_VISIT(((PyCFunctionObject*)m)->m_module); -#endif - Py_VISIT(m->func_dict); - Py_VISIT(m->func_name); - Py_VISIT(m->func_qualname); - Py_VISIT(m->func_doc); - Py_VISIT(m->func_globals); - Py_VISIT(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); -#endif - Py_VISIT(m->defaults_tuple); - Py_VISIT(m->defaults_kwdict); - Py_VISIT(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_VISIT(pydefaults[i]); - } - return 0; -} -static PyObject* -__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) -{ -#if PY_MAJOR_VERSION >= 3 - return PyUnicode_FromFormat("", - op->func_qualname, (void *)op); -#else - return PyString_FromFormat("", - PyString_AsString(op->func_qualname), (void *)op); -#endif -} -static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *f = ((__pyx_CyFunctionObject*)func)->func; - PyObject *py_name = NULL; - PyCFunction meth; - int flags; - meth = PyCFunction_GetFunction(f); - if (unlikely(!meth)) return NULL; - flags = PyCFunction_GetFlags(f); - if (unlikely(flags < 0)) return NULL; -#else - PyCFunctionObject* f = (PyCFunctionObject*)func; - PyCFunction meth = f->m_ml->ml_meth; - int flags = f->m_ml->ml_flags; -#endif - Py_ssize_t size; - switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { - case METH_VARARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) - return (*meth)(self, arg); - break; - case METH_VARARGS | METH_KEYWORDS: - return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); - case METH_NOARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 0)) - return (*meth)(self, NULL); -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - case METH_O: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 1)) { - PyObject *result, *arg0; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - arg0 = PyTuple_GET_ITEM(arg, 0); - #else - arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; - #endif - result = (*meth)(self, arg0); - #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) - Py_DECREF(arg0); - #endif - return result; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - return NULL; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", - py_name); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", - f->m_ml->ml_name); -#endif - return NULL; -} -static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *self, *result; -#if CYTHON_COMPILING_IN_LIMITED_API - self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); - if (unlikely(!self) && PyErr_Occurred()) return NULL; -#else - self = ((PyCFunctionObject*)func)->m_self; -#endif - result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); - return result; -} -static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { - PyObject *result; - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; -#if CYTHON_METH_FASTCALL - __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); - if (vc) { -#if CYTHON_ASSUME_SAFE_MACROS - return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); -#else - (void) &__Pyx_PyVectorcall_FastCallDict; - return PyVectorcall_Call(func, args, kw); -#endif - } -#endif - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - Py_ssize_t argc; - PyObject *new_args; - PyObject *self; -#if CYTHON_ASSUME_SAFE_MACROS - argc = PyTuple_GET_SIZE(args); -#else - argc = PyTuple_Size(args); - if (unlikely(!argc) < 0) return NULL; -#endif - new_args = PyTuple_GetSlice(args, 1, argc); - if (unlikely(!new_args)) - return NULL; - self = PyTuple_GetItem(args, 0); - if (unlikely(!self)) { - Py_DECREF(new_args); -#if PY_MAJOR_VERSION > 2 - PyErr_Format(PyExc_TypeError, - "unbound method %.200S() needs an argument", - cyfunc->func_qualname); -#else - PyErr_SetString(PyExc_TypeError, - "unbound method needs an argument"); -#endif - return NULL; - } - result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); - Py_DECREF(new_args); - } else { - result = __Pyx_CyFunction_Call(func, args, kw); - } - return result; -} -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) -{ - int ret = 0; - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - if (unlikely(nargs < 1)) { - PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", - ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - ret = 1; - } - if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - return ret; -} -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 0)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, NULL); -} -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 1)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, args[0]); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; - PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); -} -#endif -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_CyFunctionType_slots[] = { - {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, - {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, - {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, - {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, - {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, - {Py_tp_methods, (void *)__pyx_CyFunction_methods}, - {Py_tp_members, (void *)__pyx_CyFunction_members}, - {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, - {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, - {0, 0}, -}; -static PyType_Spec __pyx_CyFunctionType_spec = { - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - __pyx_CyFunctionType_slots -}; -#else -static PyTypeObject __pyx_CyFunctionType_type = { - PyVarObject_HEAD_INIT(0, 0) - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, - (destructor) __Pyx_CyFunction_dealloc, -#if !CYTHON_METH_FASTCALL - 0, -#elif CYTHON_BACKPORT_VECTORCALL - (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), -#else - offsetof(PyCFunctionObject, vectorcall), -#endif - 0, - 0, -#if PY_MAJOR_VERSION < 3 - 0, -#else - 0, -#endif - (reprfunc) __Pyx_CyFunction_repr, - 0, - 0, - 0, - 0, - __Pyx_CyFunction_CallAsMethod, - 0, - 0, - 0, - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - 0, - (traverseproc) __Pyx_CyFunction_traverse, - (inquiry) __Pyx_CyFunction_clear, - 0, -#if PY_VERSION_HEX < 0x030500A0 - offsetof(__pyx_CyFunctionObject, func_weakreflist), -#else - offsetof(PyCFunctionObject, m_weakreflist), -#endif - 0, - 0, - __pyx_CyFunction_methods, - __pyx_CyFunction_members, - __pyx_CyFunction_getsets, - 0, - 0, - __Pyx_PyMethod_New, - 0, - offsetof(__pyx_CyFunctionObject, func_dict), - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -#if PY_VERSION_HEX >= 0x030400a1 - 0, -#endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, -#endif -#if __PYX_NEED_TP_PRINT_SLOT - 0, -#endif -#if PY_VERSION_HEX >= 0x030C0000 - 0, -#endif -#if PY_VERSION_HEX >= 0x030d00A4 - 0, -#endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, -#endif -}; -#endif -static int __pyx_CyFunction_init(PyObject *module) { -#if CYTHON_USE_TYPE_SPECS - __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); -#else - CYTHON_UNUSED_VAR(module); - __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); -#endif - if (unlikely(__pyx_CyFunctionType == NULL)) { - return -1; - } - return 0; -} -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults = PyObject_Malloc(size); - if (unlikely(!m->defaults)) - return PyErr_NoMemory(); - memset(m->defaults, 0, size); - m->defaults_pyobjects = pyobjects; - m->defaults_size = size; - return m->defaults; -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_tuple = tuple; - Py_INCREF(tuple); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_kwdict = dict; - Py_INCREF(dict); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->func_annotations = dict; - Py_INCREF(dict); -} - -/* CythonFunction */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { - PyObject *op = __Pyx_CyFunction_Init( - PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), - ml, flags, qualname, closure, module, globals, code - ); - if (likely(op)) { - PyObject_GC_Track(op); - } - return op; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - CYTHON_MAYBE_UNUSED_VAR(tstate); - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} -#endif - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, - PyObject *firstlineno, PyObject *name) { - PyObject *replace = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; - replace = PyObject_GetAttrString(code, "replace"); - if (likely(replace)) { - PyObject *result; - result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); - Py_DECREF(replace); - return result; - } - PyErr_Clear(); - #if __PYX_LIMITED_VERSION_HEX < 0x030780000 - { - PyObject *compiled = NULL, *result = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; - compiled = Py_CompileString( - "out = type(code)(\n" - " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" - " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" - " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" - " code.co_lnotab)\n", "", Py_file_input); - if (!compiled) return NULL; - result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); - Py_DECREF(compiled); - if (!result) PyErr_Print(); - Py_DECREF(result); - result = PyDict_GetItemString(scratch_dict, "out"); - if (result) Py_INCREF(result); - return result; - } - #else - return NULL; - #endif -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; - PyObject *replace = NULL, *getframe = NULL, *frame = NULL; - PyObject *exc_type, *exc_value, *exc_traceback; - int success = 0; - if (c_line) { - (void) __pyx_cfilenm; - (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); - } - PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); - code_object = Py_CompileString("_getframe()", filename, Py_eval_input); - if (unlikely(!code_object)) goto bad; - py_py_line = PyLong_FromLong(py_line); - if (unlikely(!py_py_line)) goto bad; - py_funcname = PyUnicode_FromString(funcname); - if (unlikely(!py_funcname)) goto bad; - dict = PyDict_New(); - if (unlikely(!dict)) goto bad; - { - PyObject *old_code_object = code_object; - code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); - Py_DECREF(old_code_object); - } - if (unlikely(!code_object)) goto bad; - getframe = PySys_GetObject("_getframe"); - if (unlikely(!getframe)) goto bad; - if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; - frame = PyEval_EvalCode(code_object, dict, dict); - if (unlikely(!frame) || frame == Py_None) goto bad; - success = 1; - bad: - PyErr_Restore(exc_type, exc_value, exc_traceback); - Py_XDECREF(code_object); - Py_XDECREF(py_py_line); - Py_XDECREF(py_funcname); - Py_XDECREF(dict); - Py_XDECREF(replace); - if (success) { - PyTraceBack_Here( - (struct _frame*)frame); - } - Py_XDECREF(frame); -} -#else -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = NULL; - PyObject *py_funcname = NULL; - #if PY_MAJOR_VERSION < 3 - PyObject *py_srcfile = NULL; - py_srcfile = PyString_FromString(filename); - if (!py_srcfile) goto bad; - #endif - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - funcname = PyUnicode_AsUTF8(py_funcname); - if (!funcname) goto bad; - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - if (!py_funcname) goto bad; - #endif - } - #if PY_MAJOR_VERSION < 3 - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - #else - py_code = PyCode_NewEmpty(filename, funcname, py_line); - #endif - Py_XDECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_funcname); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_srcfile); - #endif - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject *ptype, *pvalue, *ptraceback; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) { - /* If the code object creation fails, then we should clear the - fetched exception references and propagate the new exception */ - Py_XDECREF(ptype); - Py_XDECREF(pvalue); - Py_XDECREF(ptraceback); - goto bad; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} -#endif - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* FormatTypeName */ -#if CYTHON_COMPILING_IN_LIMITED_API -static __Pyx_TypeName -__Pyx_PyType_GetName(PyTypeObject* tp) -{ - PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, - __pyx_n_s_name); - if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { - PyErr_Clear(); - Py_XDECREF(name); - name = __Pyx_NewRef(__pyx_n_s__11); - } - return name; -} -#endif - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(long) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(long) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(long) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - long val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (long) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (long) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (long) -1; - } else { - stepval = v; - } - v = NULL; - val = (long) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((long) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((long) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (long) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const int neg_one = (int) -1, const_zero = (int) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(int) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(int) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(int) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - int val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (int) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (int) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (int) -1; - } else { - stepval = v; - } - v = NULL; - val = (int) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((int) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((int) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (int) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (cls == a || cls == b) return 1; - mro = cls->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - PyObject *base = PyTuple_GET_ITEM(mro, i); - if (base == (PyObject *)a || base == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - if (exc_type1) { - return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); - } else { - return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030B00A4 - return Py_Version & ~0xFFUL; -#else - const char* rt_version = Py_GetVersion(); - unsigned long version = 0; - unsigned long factor = 0x01000000UL; - unsigned int digit = 0; - int i = 0; - while (factor) { - while ('0' <= rt_version[i] && rt_version[i] <= '9') { - digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); - ++i; - } - version += factor * digit; - if (rt_version[i] != '.') - break; - digit = 0; - factor >>= 8; - ++i; - } - return version; -#endif -} -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { - const unsigned long MAJOR_MINOR = 0xFFFF0000UL; - if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) - return 0; - if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) - return 1; - { - char message[200]; - PyOS_snprintf(message, sizeof(message), - "compile time Python version %d.%d " - "of module '%.100s' " - "%s " - "runtime version %d.%d", - (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), - __Pyx_MODULE_NAME, - (allow_newer) ? "was newer than" : "does not match", - (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) - ); - return PyErr_WarnEx(NULL, message, 1); - } -} - -/* InitStrings */ -#if PY_MAJOR_VERSION >= 3 -static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { - if (t.is_unicode | t.is_str) { - if (t.intern) { - *str = PyUnicode_InternFromString(t.s); - } else if (t.encoding) { - *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); - } else { - *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); - } - } else { - *str = PyBytes_FromStringAndSize(t.s, t.n - 1); - } - if (!*str) - return -1; - if (PyObject_Hash(*str) == -1) - return -1; - return 0; -} -#endif -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { - while (t->p) { - #if PY_MAJOR_VERSION >= 3 - __Pyx_InitString(*t, t->p); - #else - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - #endif - ++t; - } - return 0; -} - -#include -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { - size_t len = strlen(s); - if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { - PyErr_SetString(PyExc_OverflowError, "byte string is too long"); - return -1; - } - return (Py_ssize_t) len; -} -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return __Pyx_PyUnicode_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return PyByteArray_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { - __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " - "The ability to return an instance of a strict subclass of int is deprecated, " - "and may be removed in a future version of Python.", - result_type_name)) { - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; - } - __Pyx_DECREF_TypeName(result_type_name); - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", - type_name, type_name, result_type_name); - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - if (likely(__Pyx_PyLong_IsCompact(b))) { - return __Pyx_PyLong_CompactValue(b); - } else { - const digit* digits = __Pyx_PyLong_Digits(b); - const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { - if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { - return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); -#if PY_MAJOR_VERSION < 3 - } else if (likely(PyInt_CheckExact(o))) { - return PyInt_AS_LONG(o); -#endif - } else { - Py_ssize_t ival; - PyObject *x; - x = PyNumber_Index(o); - if (!x) return -1; - ival = PyInt_AsLong(x); - Py_DECREF(x); - return ival; - } -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -/* #### Code section: utility_code_pragmas_end ### */ -#ifdef _MSC_VER -#pragma warning( pop ) -#endif - - - -/* #### Code section: end ### */ -#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index b04d6d48c28..5e91cdf9e14 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -265,10 +265,10 @@ def _init_global_attributes( @classmethod def create_for_statement( cls, - statement: Executable, - compiler: SQLCompiler, + statement: Union[Select, FromStatement], + compiler: Optional[SQLCompiler], **kw: Any, - ) -> CompileState: + ) -> AbstractORMCompileState: """Create a context for a statement given a :class:`.Compiler`. This method is always invoked in the context of SQLCompiler.process(). @@ -437,30 +437,15 @@ class default_compile_options(CacheableOptions): def __init__(self, *arg, **kw): raise NotImplementedError() - @classmethod - def create_for_statement( - cls, - statement: Executable, - compiler: SQLCompiler, - **kw: Any, - ) -> ORMCompileState: - return cls._create_orm_context( - cast("Union[Select, FromStatement]", statement), - toplevel=not compiler.stack, - compiler=compiler, - **kw, - ) + if TYPE_CHECKING: - @classmethod - def _create_orm_context( - cls, - statement: Union[Select, FromStatement], - *, - toplevel: bool, - compiler: Optional[SQLCompiler], - **kw: Any, - ) -> ORMCompileState: - raise NotImplementedError() + @classmethod + def create_for_statement( + cls, + statement: Union[Select, FromStatement], + compiler: Optional[SQLCompiler], + **kw: Any, + ) -> ORMCompileState: ... def _append_dedupe_col_collection(self, obj, col_collection): dedupe = self.dedupe_columns @@ -770,16 +755,12 @@ class ORMFromStatementCompileState(ORMCompileState): eager_joins = _EMPTY_DICT @classmethod - def _create_orm_context( + def create_for_statement( cls, - statement: Union[Select, FromStatement], - *, - toplevel: bool, + statement_container: Union[Select, FromStatement], compiler: Optional[SQLCompiler], **kw: Any, ) -> ORMFromStatementCompileState: - statement_container = statement - assert isinstance(statement_container, FromStatement) if compiler is not None and compiler.stack: @@ -1086,17 +1067,21 @@ class ORMSelectCompileState(ORMCompileState, SelectState): _having_criteria = () @classmethod - def _create_orm_context( + def create_for_statement( cls, statement: Union[Select, FromStatement], - *, - toplevel: bool, compiler: Optional[SQLCompiler], **kw: Any, ) -> ORMSelectCompileState: + """compiler hook, we arrive here from compiler.visit_select() only.""" self = cls.__new__(cls) + if compiler is not None: + toplevel = not compiler.stack + else: + toplevel = True + select_statement = statement # if we are a select() that was never a legacy Query, we won't diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index af496b245f4..4dbb3009b39 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -3340,9 +3340,7 @@ def _compile_state( ORMCompileState._get_plugin_class_for_plugin(stmt, "orm"), ) - return compile_state_cls._create_orm_context( - stmt, toplevel=True, compiler=None - ) + return compile_state_cls.create_for_statement(stmt, None) def _compile_context(self, for_statement: bool = False) -> QueryContext: compile_state = self._compile_state(for_statement=for_statement) diff --git a/lib/sqlalchemy/sql/_util_cy.c b/lib/sqlalchemy/sql/_util_cy.c deleted file mode 100644 index 70663d86b16..00000000000 --- a/lib/sqlalchemy/sql/_util_cy.c +++ /dev/null @@ -1,11241 +0,0 @@ -/* Generated by Cython 3.0.11 */ - -/* BEGIN: Cython Metadata -{ - "distutils": { - "name": "sqlalchemy.sql._util_cy", - "sources": [ - "lib/sqlalchemy/sql/_util_cy.py" - ] - }, - "module_name": "sqlalchemy.sql._util_cy" -} -END: Cython Metadata */ - -#ifndef PY_SSIZE_T_CLEAN -#define PY_SSIZE_T_CLEAN -#endif /* PY_SSIZE_T_CLEAN */ -#if defined(CYTHON_LIMITED_API) && 0 - #ifndef Py_LIMITED_API - #if CYTHON_LIMITED_API+0 > 0x03030000 - #define Py_LIMITED_API CYTHON_LIMITED_API - #else - #define Py_LIMITED_API 0x03030000 - #endif - #endif -#endif - -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.7+ or Python 3.3+. -#else -#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API -#define __PYX_EXTRA_ABI_MODULE_NAME "limited" -#else -#define __PYX_EXTRA_ABI_MODULE_NAME "" -#endif -#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME -#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI -#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x03000BF0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #define HAVE_LONG_LONG -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX -#if defined(GRAALVM_PYTHON) - /* For very preliminary testing purposes. Most variables are set the same as PyPy. - The existence of this section does not imply that anything works or is even tested */ - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 1 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(PYPY_VERSION) - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #if PY_VERSION_HEX < 0x03090000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(CYTHON_LIMITED_API) - #ifdef Py_LIMITED_API - #undef __PYX_LIMITED_VERSION_HEX - #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API - #endif - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 1 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_CLINE_IN_TRACEBACK - #define CYTHON_CLINE_IN_TRACEBACK 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 1 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #endif - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 1 - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #ifndef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 1 - #endif - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 - #endif -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #ifndef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) - #endif - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #if PY_VERSION_HEX < 0x030400a1 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #elif !defined(CYTHON_USE_TP_FINALIZE) - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #if PY_VERSION_HEX < 0x030600B1 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #elif !defined(CYTHON_USE_DICT_VERSIONS) - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) - #endif - #if PY_VERSION_HEX < 0x030700A3 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #elif !defined(CYTHON_USE_EXC_INFO_STACK) - #define CYTHON_USE_EXC_INFO_STACK 1 - #endif - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 1 - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if !defined(CYTHON_VECTORCALL) -#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) -#endif -#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_MAJOR_VERSION < 3 - #include "longintrepr.h" - #endif - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(maybe_unused) - #define CYTHON_UNUSED [[maybe_unused]] - #endif - #endif - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR - #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_USE_CPP_STD_MOVE - #if defined(__cplusplus) && (\ - __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) - #define CYTHON_USE_CPP_STD_MOVE 1 - #else - #define CYTHON_USE_CPP_STD_MOVE 0 - #endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned short uint16_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - #endif - #endif - #if _MSC_VER < 1300 - #ifdef _WIN64 - typedef unsigned long long __pyx_uintptr_t; - #else - typedef unsigned int __pyx_uintptr_t; - #endif - #else - #ifdef _WIN64 - typedef unsigned __int64 __pyx_uintptr_t; - #else - typedef unsigned __int32 __pyx_uintptr_t; - #endif - #endif -#else - #include - typedef uintptr_t __pyx_uintptr_t; -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif -#ifdef __cplusplus - template - struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; - #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) -#else - #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) -#endif -#if CYTHON_COMPILING_IN_PYPY == 1 - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) -#else - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) -#endif -#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_DefaultClassType PyClass_Type - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" - #define __Pyx_DefaultClassType PyType_Type -#if CYTHON_COMPILING_IN_LIMITED_API - static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyObject *exception_table = NULL; - PyObject *types_module=NULL, *code_type=NULL, *result=NULL; - #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; - PyObject *py_minor_version = NULL; - #endif - long minor_version = 0; - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); - #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; - #else - if (!(version_info = PySys_GetObject("version_info"))) goto end; - if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; - minor_version = PyLong_AsLong(py_minor_version); - Py_DECREF(py_minor_version); - if (minor_version == -1 && PyErr_Occurred()) goto end; - #endif - if (!(types_module = PyImport_ImportModule("types"))) goto end; - if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; - if (minor_version <= 7) { - (void)p; - result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else if (minor_version <= 10) { - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else { - if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); - } - end: - Py_XDECREF(code_type); - Py_XDECREF(exception_table); - Py_XDECREF(types_module); - if (type) { - PyErr_Restore(type, value, traceback); - } - return result; - } - #ifndef CO_OPTIMIZED - #define CO_OPTIMIZED 0x0001 - #endif - #ifndef CO_NEWLOCALS - #define CO_NEWLOCALS 0x0002 - #endif - #ifndef CO_VARARGS - #define CO_VARARGS 0x0004 - #endif - #ifndef CO_VARKEYWORDS - #define CO_VARKEYWORDS 0x0008 - #endif - #ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x0200 - #endif - #ifndef CO_GENERATOR - #define CO_GENERATOR 0x0020 - #endif - #ifndef CO_COROUTINE - #define CO_COROUTINE 0x0080 - #endif -#elif PY_VERSION_HEX >= 0x030B0000 - static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); - if (!empty_bytes) return NULL; - result = - #if PY_VERSION_HEX >= 0x030C0000 - PyUnstable_Code_NewWithPosOnlyArgs - #else - PyCode_NewWithPosOnlyArgs - #endif - (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); - Py_DECREF(empty_bytes); - return result; - } -#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif -#endif -#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) - #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) -#else - #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) - #define __Pyx_Py_Is(x, y) Py_Is(x, y) -#else - #define __Pyx_Py_Is(x, y) ((x) == (y)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) - #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) -#else - #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) - #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) -#else - #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) - #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) -#else - #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) -#endif -#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) -#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) -#else - #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) -#endif -#ifndef CO_COROUTINE - #define CO_COROUTINE 0x80 -#endif -#ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x200 -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef Py_TPFLAGS_SEQUENCE - #define Py_TPFLAGS_SEQUENCE 0 -#endif -#ifndef Py_TPFLAGS_MAPPING - #define Py_TPFLAGS_MAPPING 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #if PY_VERSION_HEX >= 0x030d00A4 - # define __Pyx_PyCFunctionFast PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords - #else - # define __Pyx_PyCFunctionFast _PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords - #endif -#endif -#if CYTHON_METH_FASTCALL - #define __Pyx_METH_FASTCALL METH_FASTCALL - #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast - #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords -#else - #define __Pyx_METH_FASTCALL METH_VARARGS - #define __Pyx_PyCFunction_FastCall PyCFunction - #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords -#endif -#if CYTHON_VECTORCALL - #define __pyx_vectorcallfunc vectorcallfunc - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET - #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) -#elif CYTHON_BACKPORT_VECTORCALL - typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, - size_t nargsf, PyObject *kwnames); - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) -#else - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) -#endif -#if PY_MAJOR_VERSION >= 0x030900B1 -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) -#else -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) -#endif -#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) -#elif !CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) -#endif -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) -static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { - return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; -} -#endif -static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { -#if CYTHON_COMPILING_IN_LIMITED_API - return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; -#else - return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -#endif -} -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) -#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) - typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); -#else - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) - #define __Pyx_PyCMethod PyCMethod -#endif -#ifndef METH_METHOD - #define METH_METHOD 0x200 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyThreadState_Current PyThreadState_Get() -#elif !CYTHON_FAST_THREAD_STATE - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) -{ - void *result; - result = PyModule_GetState(op); - if (!result) - Py_FatalError("Couldn't find the module state"); - return result; -} -#endif -#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) -#else - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if PY_MAJOR_VERSION < 3 - #if CYTHON_COMPILING_IN_PYPY - #if PYPY_VERSION_NUM < 0x07030600 - #if defined(__cplusplus) && __cplusplus >= 201402L - [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] - #elif defined(__GNUC__) || defined(__clang__) - __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) - #elif defined(_MSC_VER) - __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) - #endif - static CYTHON_INLINE int PyGILState_Check(void) { - return 0; - } - #else // PYPY_VERSION_NUM < 0x07030600 - #endif // PYPY_VERSION_NUM < 0x07030600 - #else - static CYTHON_INLINE int PyGILState_Check(void) { - PyThreadState * tstate = _PyThreadState_Current; - return tstate && (tstate == PyGILState_GetThisThreadState()); - } - #endif -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { - PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); - if (res == NULL) PyErr_Clear(); - return res; -} -#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) -#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#else -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { -#if CYTHON_COMPILING_IN_PYPY - return PyDict_GetItem(dict, name); -#else - PyDictEntry *ep; - PyDictObject *mp = (PyDictObject*) dict; - long hash = ((PyStringObject *) name)->ob_shash; - assert(hash != -1); - ep = (mp->ma_lookup)(mp, name, hash); - if (ep == NULL) { - return NULL; - } - return ep->me_value; -#endif -} -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#endif -#if CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) - #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) - #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) -#else - #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) - #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) - #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) -#else - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) -#endif -#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 -#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE((PyObject*)obj);\ - assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ - PyObject_GC_Del(obj);\ - Py_DECREF(type);\ -} -#else -#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) - #define __Pyx_PyUnicode_DATA(u) ((void*)u) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) -#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_READY(op) (0) - #else - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #endif - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #else - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #endif - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #if !defined(PyUnicode_DecodeUnicodeEscape) - #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) - #endif - #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) - #undef PyUnicode_Contains - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) - #endif - #if !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) - #endif - #if !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) - #endif -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#if CYTHON_COMPILING_IN_CPYTHON - #define __Pyx_PySequence_ListKeepNew(obj)\ - (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) -#else - #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) -#else - #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) - #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) -#endif -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) -#else - static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { - PyObject *module = PyImport_AddModule(name); - Py_XINCREF(module); - return module; - } -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define __Pyx_Py3Int_Check(op) PyLong_Check(op) - #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#else - #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) - #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) - #if !defined(_USE_MATH_DEFINES) - #define _USE_MATH_DEFINES - #endif -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifdef CYTHON_EXTERN_C - #undef __PYX_EXTERN_C - #define __PYX_EXTERN_C CYTHON_EXTERN_C -#elif defined(__PYX_EXTERN_C) - #ifdef _MSC_VER - #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") - #else - #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. - #endif -#else - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__sqlalchemy__sql___util_cy -#define __PYX_HAVE_API__sqlalchemy__sql___util_cy -/* Early includes */ -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_VERSION_HEX >= 0x030C00A7 - #ifndef _PyLong_SIGN_MASK - #define _PyLong_SIGN_MASK 3 - #endif - #ifndef _PyLong_NON_SIZE_BITS - #define _PyLong_NON_SIZE_BITS 3 - #endif - #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) - #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) - #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) - #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) - #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_SignedDigitCount(x)\ - ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) - #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) - #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) - #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) - #else - #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) - #endif - typedef Py_ssize_t __Pyx_compact_pylong; - typedef size_t __Pyx_compact_upylong; - #else - #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) - #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) - #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) - #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) - #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) - #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) - #define __Pyx_PyLong_CompactValue(x)\ - ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) - typedef sdigit __Pyx_compact_pylong; - typedef digit __Pyx_compact_upylong; - #endif - #if PY_VERSION_HEX >= 0x030C00A5 - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) - #else - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) - #endif -#endif -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -#include -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = (char) c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#include -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -#if !CYTHON_USE_MODULE_STATE -static PyObject *__pyx_m = NULL; -#endif -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm = __FILE__; -static const char *__pyx_filename; - -/* #### Code section: filename_table ### */ - -static const char *__pyx_f[] = { - "", - "lib/sqlalchemy/sql/_util_cy.py", -}; -/* #### Code section: utility_code_proto_before_types ### */ -/* ForceInitThreads.proto */ -#ifndef __PYX_FORCE_INIT_THREADS - #define __PYX_FORCE_INIT_THREADS 0 -#endif - -/* #### Code section: numeric_typedefs ### */ -/* #### Code section: complex_type_declarations ### */ -/* #### Code section: type_declarations ### */ - -/*--- Type declarations ---*/ -struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map; -struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map; - -/* "sqlalchemy/sql/_util_cy.py":45 - * - * @cython.cclass - * class prefix_anon_map(Dict[str, str]): # <<<<<<<<<<<<<< - * """A map that creates new keys for missing key access. - * - */ -struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map { - PyDictObject __pyx_base; -}; - - -/* "sqlalchemy/sql/_util_cy.py":72 - * - * @cython.cclass - * class anon_map( # <<<<<<<<<<<<<< - * Dict[ - * Union[int, str, "Literal[CacheConst.NO_CACHE]"], - */ -struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map { - PyDictObject __pyx_base; - struct __pyx_vtabstruct_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_vtab; - unsigned int _index; -}; - - - -struct __pyx_vtabstruct_10sqlalchemy_3sql_8_util_cy_anon_map { - PyObject *(*_add_missing)(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *, PyObject *); -}; -static struct __pyx_vtabstruct_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_vtabptr_10sqlalchemy_3sql_8_util_cy_anon_map; -static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *, PyObject *); -/* #### Code section: utility_code_proto ### */ - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, Py_ssize_t); - void (*DECREF)(void*, PyObject*, Py_ssize_t); - void (*GOTREF)(void*, PyObject*, Py_ssize_t); - void (*GIVEREF)(void*, PyObject*, Py_ssize_t); - void* (*SetupContext)(const char*, Py_ssize_t, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - } - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) - #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() -#endif - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContextNogil() - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_Py_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; Py_XDECREF(tmp);\ - } while (0) -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#if PY_VERSION_HEX >= 0x030C00A6 -#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) -#else -#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) -#endif -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) -#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* TupleAndListFromArray.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); -static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); -#endif - -/* IncludeStringH.proto */ -#include - -/* BytesEquals.proto */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); - -/* UnicodeEquals.proto */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); - -/* fastcall.proto */ -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) -#elif CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) -#else - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) -#endif -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) - #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) -#else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg - #define __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) -#define __Pyx_KwValues_VARARGS(args, nargs) NULL -#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) -#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) -#if CYTHON_METH_FASTCALL - #define __Pyx_Arg_FASTCALL(args, i) args[i] - #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) - #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) - static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); - #else - #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) - #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs - to have the same reference counting */ - #define __Pyx_Arg_XDECREF_FASTCALL(arg) -#else - #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS - #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS - #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS - #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS - #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS - #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) - #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) -#else -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) -#endif - -/* ArgTypeTest.proto */ -#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ - ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ - __Pyx__ArgTypeTest(obj, type, name, exact)) -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* RaiseUnexpectedTypeError.proto */ -static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); - -/* dict_getitem_default.proto */ -static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* UnpackUnboundCMethod.proto */ -typedef struct { - PyObject *type; - PyObject **method_name; - PyCFunction func; - PyObject *method; - int flag; -} __Pyx_CachedCFunction; - -/* CallUnboundCMethod1.proto */ -static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); -#else -#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) -#endif - -/* CallUnboundCMethod2.proto */ -static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 -static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); -#else -#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2) -#endif - -/* PyUnicode_Unicode.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj); - -/* PyObjectFormatSimple.proto */ -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - PyObject_Format(s, f)) -#elif PY_MAJOR_VERSION < 3 - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - likely(PyString_CheckExact(s)) ? PyUnicode_FromEncodedObject(s, NULL, "strict") :\ - PyObject_Format(s, f)) -#elif CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - likely(PyLong_CheckExact(s)) ? PyLong_Type.tp_repr(s) :\ - likely(PyFloat_CheckExact(s)) ? PyFloat_Type.tp_repr(s) :\ - PyObject_Format(s, f)) -#else - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - PyObject_Format(s, f)) -#endif - -/* JoinPyUnicode.proto */ -static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, - Py_UCS4 max_char); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* KeywordStringCheck.proto */ -static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); - -/* GetAttr3.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) do {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, - const char* function_name); - -/* PyDictContains.proto */ -static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { - int result = PyDict_Contains(dict, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* DictGetItem.proto */ -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY -static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); -#define __Pyx_PyObject_Dict_GetItem(obj, name)\ - (likely(PyDict_CheckExact(obj)) ?\ - __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) -#else -#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) -#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) -#endif - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* PySequenceContains.proto */ -static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { - int result = PySequence_Contains(seq, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#if !CYTHON_VECTORCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif -#if !CYTHON_VECTORCALL -#if PY_VERSION_HEX >= 0x03080000 - #include "frameobject.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif - #define __Pxy_PyFrame_Initialize_Offsets() - #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) -#else - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif -#endif -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectFastCall.proto */ -#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); - -/* GetAttr.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); - -/* HasAttr.proto */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); - -/* IncludeStructmemberH.proto */ -#include - -/* FixUpExtensionType.proto */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); -#endif - -/* FormatTypeName.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -typedef PyObject *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%U" -static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); -#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) -#else -typedef const char *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%.200s" -#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) -#define __Pyx_DECREF_TypeName(obj) -#endif - -/* ValidateExternBase.proto */ -static int __Pyx_validate_extern_base(PyTypeObject *base); - -/* PyObjectCallNoArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod0.proto */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); - -/* ValidateBasesTuple.proto */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); -#endif - -/* PyType_Ready.proto */ -CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* SetupReduce.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_setup_reduce(PyObject* type_obj); -#endif - -/* SetVTable.proto */ -static int __Pyx_SetVtable(PyTypeObject* typeptr , void* vtable); - -/* GetVTable.proto */ -static void* __Pyx_GetVtable(PyTypeObject *type); - -/* MergeVTables.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_MergeVtables(PyTypeObject *type); -#endif - -/* FetchSharedCythonModule.proto */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void); - -/* FetchCommonType.proto */ -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); -#else -static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); -#endif - -/* PyMethodNew.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - typesModule = PyImport_ImportModule("types"); - if (!typesModule) return NULL; - methodType = PyObject_GetAttrString(typesModule, "MethodType"); - Py_DECREF(typesModule); - if (!methodType) return NULL; - result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); - Py_DECREF(methodType); - return result; -} -#elif PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - return PyMethod_New(func, self); -} -#else - #define __Pyx_PyMethod_New PyMethod_New -#endif - -/* PyVectorcallFastCallDict.proto */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); -#endif - -/* CythonFunctionShared.proto */ -#define __Pyx_CyFunction_USED -#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 -#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 -#define __Pyx_CYFUNCTION_CCLASS 0x04 -#define __Pyx_CYFUNCTION_COROUTINE 0x08 -#define __Pyx_CyFunction_GetClosure(f)\ - (((__pyx_CyFunctionObject *) (f))->func_closure) -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_CyFunction_GetClassObj(f)\ - (((__pyx_CyFunctionObject *) (f))->func_classobj) -#else - #define __Pyx_CyFunction_GetClassObj(f)\ - ((PyObject*) ((PyCMethodObject *) (f))->mm_class) -#endif -#define __Pyx_CyFunction_SetClassObj(f, classobj)\ - __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) -#define __Pyx_CyFunction_Defaults(type, f)\ - ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) -#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ - ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) -typedef struct { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject_HEAD - PyObject *func; -#elif PY_VERSION_HEX < 0x030900B1 - PyCFunctionObject func; -#else - PyCMethodObject func; -#endif -#if CYTHON_BACKPORT_VECTORCALL - __pyx_vectorcallfunc func_vectorcall; -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_weakreflist; -#endif - PyObject *func_dict; - PyObject *func_name; - PyObject *func_qualname; - PyObject *func_doc; - PyObject *func_globals; - PyObject *func_code; - PyObject *func_closure; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_classobj; -#endif - void *defaults; - int defaults_pyobjects; - size_t defaults_size; - int flags; - PyObject *defaults_tuple; - PyObject *defaults_kwdict; - PyObject *(*defaults_getter)(PyObject *); - PyObject *func_annotations; - PyObject *func_is_coroutine; -} __pyx_CyFunctionObject; -#undef __Pyx_CyOrPyCFunction_Check -#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) -#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) -#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); -#undef __Pyx_IsSameCFunction -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, - size_t size, - int pyobjects); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, - PyObject *tuple); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, - PyObject *dict); -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, - PyObject *dict); -static int __pyx_CyFunction_init(PyObject *module); -#if CYTHON_METH_FASTCALL -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -#if CYTHON_BACKPORT_VECTORCALL -#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) -#else -#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) -#endif -#endif - -/* CythonFunction.proto */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); -#endif - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* GCCDiagnostics.proto */ -#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) -#define __Pyx_HAS_GCC_DIAGNOSTIC -#endif - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static unsigned long __Pyx_get_runtime_version(void); -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); - -/* FunctionImport.proto */ -static int __Pyx_ImportFunction_3_0_11(PyObject *module, const char *funcname, void (**f)(void), const char *sig); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -/* #### Code section: module_declarations ### */ -static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key); /* proto*/ - -/* Module declarations from "cython" */ - -/* Module declarations from "sqlalchemy.util._collections_cy" */ -static unsigned PY_LONG_LONG (*__pyx_f_10sqlalchemy_4util_15_collections_cy__get_id)(PyObject *); /*proto*/ - -/* Module declarations from "sqlalchemy.sql._util_cy" */ -static PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy___pyx_unpickle_prefix_anon_map__set_state(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *, PyObject *); /*proto*/ -/* #### Code section: typeinfo ### */ -/* #### Code section: before_global_var ### */ -#define __Pyx_MODULE_NAME "sqlalchemy.sql._util_cy" -extern int __pyx_module_is_main_sqlalchemy__sql___util_cy; -int __pyx_module_is_main_sqlalchemy__sql___util_cy = 0; - -/* Implementation of "sqlalchemy.sql._util_cy" */ -/* #### Code section: global_var ### */ -static PyObject *__pyx_builtin_TypeError; -/* #### Code section: string_decls ### */ -static const char __pyx_k_[] = " "; -static const char __pyx_k__2[] = "_"; -static const char __pyx_k__4[] = "."; -static const char __pyx_k__5[] = "?"; -static const char __pyx_k_gc[] = "gc"; -static const char __pyx_k_get[] = "get"; -static const char __pyx_k_int[] = "int"; -static const char __pyx_k_key[] = "key"; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_obj[] = "obj"; -static const char __pyx_k_str[] = "str"; -static const char __pyx_k_Dict[] = "Dict"; -static const char __pyx_k_None[] = "None"; -static const char __pyx_k_bool[] = "bool"; -static const char __pyx_k_dict[] = "__dict__"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_self[] = "self"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_Tuple[] = "Tuple"; -static const char __pyx_k_Union[] = "Union"; -static const char __pyx_k_state[] = "state"; -static const char __pyx_k_value[] = "value"; -static const char __pyx_k_dict_2[] = "_dict"; -static const char __pyx_k_enable[] = "enable"; -static const char __pyx_k_idself[] = "idself"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_object[] = "object"; -static const char __pyx_k_pickle[] = "pickle"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_return[] = "return"; -static const char __pyx_k_typing[] = "typing"; -static const char __pyx_k_update[] = "update"; -static const char __pyx_k_Literal[] = "Literal"; -static const char __pyx_k_derived[] = "derived"; -static const char __pyx_k_disable[] = "disable"; -static const char __pyx_k_missing[] = "__missing__"; -static const char __pyx_k_anon_map[] = "anon_map"; -static const char __pyx_k_get_anon[] = "get_anon"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_pyx_type[] = "__pyx_type"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_TypeError[] = "TypeError"; -static const char __pyx_k_cache_key[] = "cache_key"; -static const char __pyx_k_isenabled[] = "isenabled"; -static const char __pyx_k_pyx_state[] = "__pyx_state"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_self_dict[] = "self_dict"; -static const char __pyx_k_CacheConst[] = "CacheConst"; -static const char __pyx_k_pyx_result[] = "__pyx_result"; -static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; -static const char __pyx_k_PickleError[] = "PickleError"; -static const char __pyx_k_is_compiled[] = "_is_compiled"; -static const char __pyx_k_util_typing[] = "util.typing"; -static const char __pyx_k_is_coroutine[] = "_is_coroutine"; -static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; -static const char __pyx_k_stringsource[] = ""; -static const char __pyx_k_use_setstate[] = "use_setstate"; -static const char __pyx_k_TYPE_CHECKING[] = "TYPE_CHECKING"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_Tuple_int_bool[] = "Tuple[int, bool]"; -static const char __pyx_k_prefix_anon_map[] = "prefix_anon_map"; -static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_anon_map_get_anon[] = "anon_map.get_anon"; -static const char __pyx_k_anonymous_counter[] = "anonymous_counter"; -static const char __pyx_k_anon_map___missing[] = "anon_map.__missing__"; -static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_sqlalchemy_sql__util_cy[] = "sqlalchemy.sql._util_cy"; -static const char __pyx_k_anon_map___reduce_cython[] = "anon_map.__reduce_cython__"; -static const char __pyx_k_prefix_anon_map___missing[] = "prefix_anon_map.__missing__"; -static const char __pyx_k_anon_map___setstate_cython[] = "anon_map.__setstate_cython__"; -static const char __pyx_k_pyx_unpickle_prefix_anon_map[] = "__pyx_unpickle_prefix_anon_map"; -static const char __pyx_k_lib_sqlalchemy_sql__util_cy_py[] = "lib/sqlalchemy/sql/_util_cy.py"; -static const char __pyx_k_prefix_anon_map___reduce_cython[] = "prefix_anon_map.__reduce_cython__"; -static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())"; -static const char __pyx_k_Union_int_str_Literal_CacheConst[] = "Union[int, str, 'Literal[CacheConst.NO_CACHE]']"; -static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__"; -static const char __pyx_k_prefix_anon_map___setstate_cytho[] = "prefix_anon_map.__setstate_cython__"; -/* #### Code section: decls ### */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map___missing__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_2__reduce_cython__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_4__setstate_cython__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static int __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map___cinit__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_2get_anon(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_obj); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_4__getitem__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_6__missing__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_8__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_10__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_2__pyx_unpickle_prefix_anon_map(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_tp_new_10sqlalchemy_3sql_8_util_cy_anon_map(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_get = {0, 0, 0, 0, 0}; -/* #### Code section: late_includes ### */ -/* #### Code section: module_state ### */ -typedef struct { - PyObject *__pyx_d; - PyObject *__pyx_b; - PyObject *__pyx_cython_runtime; - PyObject *__pyx_empty_tuple; - PyObject *__pyx_empty_bytes; - PyObject *__pyx_empty_unicode; - #ifdef __Pyx_CyFunction_USED - PyTypeObject *__pyx_CyFunctionType; - #endif - #ifdef __Pyx_FusedFunction_USED - PyTypeObject *__pyx_FusedFunctionType; - #endif - #ifdef __Pyx_Generator_USED - PyTypeObject *__pyx_GeneratorType; - #endif - #ifdef __Pyx_IterableCoroutine_USED - PyTypeObject *__pyx_IterableCoroutineType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineAwaitType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineType; - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - PyObject *__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map; - PyObject *__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map; - #endif - PyTypeObject *__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map; - PyTypeObject *__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map; - PyObject *__pyx_kp_u_; - PyObject *__pyx_n_s_CacheConst; - PyObject *__pyx_n_s_Dict; - PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; - PyObject *__pyx_n_s_Literal; - PyObject *__pyx_kp_u_None; - PyObject *__pyx_n_s_PickleError; - PyObject *__pyx_n_s_TYPE_CHECKING; - PyObject *__pyx_n_s_Tuple; - PyObject *__pyx_kp_s_Tuple_int_bool; - PyObject *__pyx_n_s_TypeError; - PyObject *__pyx_n_s_Union; - PyObject *__pyx_kp_s_Union_int_str_Literal_CacheConst; - PyObject *__pyx_n_u__2; - PyObject *__pyx_kp_u__4; - PyObject *__pyx_n_s__5; - PyObject *__pyx_n_s_anon_map; - PyObject *__pyx_n_s_anon_map___missing; - PyObject *__pyx_n_s_anon_map___reduce_cython; - PyObject *__pyx_n_s_anon_map___setstate_cython; - PyObject *__pyx_n_s_anon_map_get_anon; - PyObject *__pyx_n_s_anonymous_counter; - PyObject *__pyx_n_s_asyncio_coroutines; - PyObject *__pyx_n_s_bool; - PyObject *__pyx_n_s_cache_key; - PyObject *__pyx_n_s_cline_in_traceback; - PyObject *__pyx_n_s_derived; - PyObject *__pyx_n_s_dict; - PyObject *__pyx_n_s_dict_2; - PyObject *__pyx_kp_u_disable; - PyObject *__pyx_kp_u_enable; - PyObject *__pyx_kp_u_gc; - PyObject *__pyx_n_s_get; - PyObject *__pyx_n_s_get_anon; - PyObject *__pyx_n_s_getstate; - PyObject *__pyx_n_s_idself; - PyObject *__pyx_n_s_import; - PyObject *__pyx_n_s_int; - PyObject *__pyx_n_s_is_compiled; - PyObject *__pyx_n_s_is_coroutine; - PyObject *__pyx_kp_u_isenabled; - PyObject *__pyx_n_s_key; - PyObject *__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py; - PyObject *__pyx_n_s_main; - PyObject *__pyx_n_s_missing; - PyObject *__pyx_n_s_name; - PyObject *__pyx_n_s_new; - PyObject *__pyx_kp_s_no_default___reduce___due_to_non; - PyObject *__pyx_n_s_obj; - PyObject *__pyx_n_s_object; - PyObject *__pyx_n_s_pickle; - PyObject *__pyx_n_s_prefix_anon_map; - PyObject *__pyx_n_s_prefix_anon_map___missing; - PyObject *__pyx_n_s_prefix_anon_map___reduce_cython; - PyObject *__pyx_n_s_prefix_anon_map___setstate_cytho; - PyObject *__pyx_n_s_pyx_PickleError; - PyObject *__pyx_n_s_pyx_checksum; - PyObject *__pyx_n_s_pyx_result; - PyObject *__pyx_n_s_pyx_state; - PyObject *__pyx_n_s_pyx_type; - PyObject *__pyx_n_s_pyx_unpickle_prefix_anon_map; - PyObject *__pyx_n_s_pyx_vtable; - PyObject *__pyx_n_s_reduce; - PyObject *__pyx_n_s_reduce_cython; - PyObject *__pyx_n_s_reduce_ex; - PyObject *__pyx_n_s_return; - PyObject *__pyx_n_s_self; - PyObject *__pyx_n_s_self_dict; - PyObject *__pyx_n_s_setstate; - PyObject *__pyx_n_s_setstate_cython; - PyObject *__pyx_n_s_sqlalchemy_sql__util_cy; - PyObject *__pyx_n_s_state; - PyObject *__pyx_n_s_str; - PyObject *__pyx_kp_s_stringsource; - PyObject *__pyx_n_s_test; - PyObject *__pyx_n_s_typing; - PyObject *__pyx_n_s_update; - PyObject *__pyx_n_s_use_setstate; - PyObject *__pyx_n_s_util_typing; - PyObject *__pyx_n_s_value; - PyObject *__pyx_int_1; - PyObject *__pyx_int_222419149; - PyObject *__pyx_int_228825662; - PyObject *__pyx_int_238750788; - PyObject *__pyx_tuple__3; - PyObject *__pyx_tuple__7; - PyObject *__pyx_tuple__9; - PyObject *__pyx_tuple__11; - PyObject *__pyx_tuple__13; - PyObject *__pyx_tuple__15; - PyObject *__pyx_tuple__17; - PyObject *__pyx_tuple__20; - PyObject *__pyx_codeobj__6; - PyObject *__pyx_codeobj__8; - PyObject *__pyx_codeobj__10; - PyObject *__pyx_codeobj__12; - PyObject *__pyx_codeobj__14; - PyObject *__pyx_codeobj__16; - PyObject *__pyx_codeobj__18; - PyObject *__pyx_codeobj__19; - PyObject *__pyx_codeobj__21; -} __pyx_mstate; - -#if CYTHON_USE_MODULE_STATE -#ifdef __cplusplus -namespace { - extern struct PyModuleDef __pyx_moduledef; -} /* anonymous namespace */ -#else -static struct PyModuleDef __pyx_moduledef; -#endif - -#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) - -#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) - -#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) -#else -static __pyx_mstate __pyx_mstate_global_static = -#ifdef __cplusplus - {}; -#else - {0}; -#endif -static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; -#endif -/* #### Code section: module_state_clear ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_clear(PyObject *m) { - __pyx_mstate *clear_module_state = __pyx_mstate(m); - if (!clear_module_state) return 0; - Py_CLEAR(clear_module_state->__pyx_d); - Py_CLEAR(clear_module_state->__pyx_b); - Py_CLEAR(clear_module_state->__pyx_cython_runtime); - Py_CLEAR(clear_module_state->__pyx_empty_tuple); - Py_CLEAR(clear_module_state->__pyx_empty_bytes); - Py_CLEAR(clear_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_CLEAR(clear_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); - #endif - Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); - Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); - Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map); - Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map); - Py_CLEAR(clear_module_state->__pyx_kp_u_); - Py_CLEAR(clear_module_state->__pyx_n_s_CacheConst); - Py_CLEAR(clear_module_state->__pyx_n_s_Dict); - Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_CLEAR(clear_module_state->__pyx_n_s_Literal); - Py_CLEAR(clear_module_state->__pyx_kp_u_None); - Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); - Py_CLEAR(clear_module_state->__pyx_n_s_TYPE_CHECKING); - Py_CLEAR(clear_module_state->__pyx_n_s_Tuple); - Py_CLEAR(clear_module_state->__pyx_kp_s_Tuple_int_bool); - Py_CLEAR(clear_module_state->__pyx_n_s_TypeError); - Py_CLEAR(clear_module_state->__pyx_n_s_Union); - Py_CLEAR(clear_module_state->__pyx_kp_s_Union_int_str_Literal_CacheConst); - Py_CLEAR(clear_module_state->__pyx_n_u__2); - Py_CLEAR(clear_module_state->__pyx_kp_u__4); - Py_CLEAR(clear_module_state->__pyx_n_s__5); - Py_CLEAR(clear_module_state->__pyx_n_s_anon_map); - Py_CLEAR(clear_module_state->__pyx_n_s_anon_map___missing); - Py_CLEAR(clear_module_state->__pyx_n_s_anon_map___reduce_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_anon_map___setstate_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_anon_map_get_anon); - Py_CLEAR(clear_module_state->__pyx_n_s_anonymous_counter); - Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); - Py_CLEAR(clear_module_state->__pyx_n_s_bool); - Py_CLEAR(clear_module_state->__pyx_n_s_cache_key); - Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); - Py_CLEAR(clear_module_state->__pyx_n_s_derived); - Py_CLEAR(clear_module_state->__pyx_n_s_dict); - Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); - Py_CLEAR(clear_module_state->__pyx_kp_u_disable); - Py_CLEAR(clear_module_state->__pyx_kp_u_enable); - Py_CLEAR(clear_module_state->__pyx_kp_u_gc); - Py_CLEAR(clear_module_state->__pyx_n_s_get); - Py_CLEAR(clear_module_state->__pyx_n_s_get_anon); - Py_CLEAR(clear_module_state->__pyx_n_s_getstate); - Py_CLEAR(clear_module_state->__pyx_n_s_idself); - Py_CLEAR(clear_module_state->__pyx_n_s_import); - Py_CLEAR(clear_module_state->__pyx_n_s_int); - Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); - Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); - Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); - Py_CLEAR(clear_module_state->__pyx_n_s_key); - Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py); - Py_CLEAR(clear_module_state->__pyx_n_s_main); - Py_CLEAR(clear_module_state->__pyx_n_s_missing); - Py_CLEAR(clear_module_state->__pyx_n_s_name); - Py_CLEAR(clear_module_state->__pyx_n_s_new); - Py_CLEAR(clear_module_state->__pyx_kp_s_no_default___reduce___due_to_non); - Py_CLEAR(clear_module_state->__pyx_n_s_obj); - Py_CLEAR(clear_module_state->__pyx_n_s_object); - Py_CLEAR(clear_module_state->__pyx_n_s_pickle); - Py_CLEAR(clear_module_state->__pyx_n_s_prefix_anon_map); - Py_CLEAR(clear_module_state->__pyx_n_s_prefix_anon_map___missing); - Py_CLEAR(clear_module_state->__pyx_n_s_prefix_anon_map___reduce_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_prefix_anon_map___setstate_cytho); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_result); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_state); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_type); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_prefix_anon_map); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_vtable); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce_ex); - Py_CLEAR(clear_module_state->__pyx_n_s_return); - Py_CLEAR(clear_module_state->__pyx_n_s_self); - Py_CLEAR(clear_module_state->__pyx_n_s_self_dict); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_sql__util_cy); - Py_CLEAR(clear_module_state->__pyx_n_s_state); - Py_CLEAR(clear_module_state->__pyx_n_s_str); - Py_CLEAR(clear_module_state->__pyx_kp_s_stringsource); - Py_CLEAR(clear_module_state->__pyx_n_s_test); - Py_CLEAR(clear_module_state->__pyx_n_s_typing); - Py_CLEAR(clear_module_state->__pyx_n_s_update); - Py_CLEAR(clear_module_state->__pyx_n_s_use_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_util_typing); - Py_CLEAR(clear_module_state->__pyx_n_s_value); - Py_CLEAR(clear_module_state->__pyx_int_1); - Py_CLEAR(clear_module_state->__pyx_int_222419149); - Py_CLEAR(clear_module_state->__pyx_int_228825662); - Py_CLEAR(clear_module_state->__pyx_int_238750788); - Py_CLEAR(clear_module_state->__pyx_tuple__3); - Py_CLEAR(clear_module_state->__pyx_tuple__7); - Py_CLEAR(clear_module_state->__pyx_tuple__9); - Py_CLEAR(clear_module_state->__pyx_tuple__11); - Py_CLEAR(clear_module_state->__pyx_tuple__13); - Py_CLEAR(clear_module_state->__pyx_tuple__15); - Py_CLEAR(clear_module_state->__pyx_tuple__17); - Py_CLEAR(clear_module_state->__pyx_tuple__20); - Py_CLEAR(clear_module_state->__pyx_codeobj__6); - Py_CLEAR(clear_module_state->__pyx_codeobj__8); - Py_CLEAR(clear_module_state->__pyx_codeobj__10); - Py_CLEAR(clear_module_state->__pyx_codeobj__12); - Py_CLEAR(clear_module_state->__pyx_codeobj__14); - Py_CLEAR(clear_module_state->__pyx_codeobj__16); - Py_CLEAR(clear_module_state->__pyx_codeobj__18); - Py_CLEAR(clear_module_state->__pyx_codeobj__19); - Py_CLEAR(clear_module_state->__pyx_codeobj__21); - return 0; -} -#endif -/* #### Code section: module_state_traverse ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { - __pyx_mstate *traverse_module_state = __pyx_mstate(m); - if (!traverse_module_state) return 0; - Py_VISIT(traverse_module_state->__pyx_d); - Py_VISIT(traverse_module_state->__pyx_b); - Py_VISIT(traverse_module_state->__pyx_cython_runtime); - Py_VISIT(traverse_module_state->__pyx_empty_tuple); - Py_VISIT(traverse_module_state->__pyx_empty_bytes); - Py_VISIT(traverse_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_VISIT(traverse_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); - #endif - Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); - Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); - Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map); - Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map); - Py_VISIT(traverse_module_state->__pyx_kp_u_); - Py_VISIT(traverse_module_state->__pyx_n_s_CacheConst); - Py_VISIT(traverse_module_state->__pyx_n_s_Dict); - Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_VISIT(traverse_module_state->__pyx_n_s_Literal); - Py_VISIT(traverse_module_state->__pyx_kp_u_None); - Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); - Py_VISIT(traverse_module_state->__pyx_n_s_TYPE_CHECKING); - Py_VISIT(traverse_module_state->__pyx_n_s_Tuple); - Py_VISIT(traverse_module_state->__pyx_kp_s_Tuple_int_bool); - Py_VISIT(traverse_module_state->__pyx_n_s_TypeError); - Py_VISIT(traverse_module_state->__pyx_n_s_Union); - Py_VISIT(traverse_module_state->__pyx_kp_s_Union_int_str_Literal_CacheConst); - Py_VISIT(traverse_module_state->__pyx_n_u__2); - Py_VISIT(traverse_module_state->__pyx_kp_u__4); - Py_VISIT(traverse_module_state->__pyx_n_s__5); - Py_VISIT(traverse_module_state->__pyx_n_s_anon_map); - Py_VISIT(traverse_module_state->__pyx_n_s_anon_map___missing); - Py_VISIT(traverse_module_state->__pyx_n_s_anon_map___reduce_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_anon_map___setstate_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_anon_map_get_anon); - Py_VISIT(traverse_module_state->__pyx_n_s_anonymous_counter); - Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); - Py_VISIT(traverse_module_state->__pyx_n_s_bool); - Py_VISIT(traverse_module_state->__pyx_n_s_cache_key); - Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); - Py_VISIT(traverse_module_state->__pyx_n_s_derived); - Py_VISIT(traverse_module_state->__pyx_n_s_dict); - Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); - Py_VISIT(traverse_module_state->__pyx_kp_u_disable); - Py_VISIT(traverse_module_state->__pyx_kp_u_enable); - Py_VISIT(traverse_module_state->__pyx_kp_u_gc); - Py_VISIT(traverse_module_state->__pyx_n_s_get); - Py_VISIT(traverse_module_state->__pyx_n_s_get_anon); - Py_VISIT(traverse_module_state->__pyx_n_s_getstate); - Py_VISIT(traverse_module_state->__pyx_n_s_idself); - Py_VISIT(traverse_module_state->__pyx_n_s_import); - Py_VISIT(traverse_module_state->__pyx_n_s_int); - Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); - Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); - Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); - Py_VISIT(traverse_module_state->__pyx_n_s_key); - Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py); - Py_VISIT(traverse_module_state->__pyx_n_s_main); - Py_VISIT(traverse_module_state->__pyx_n_s_missing); - Py_VISIT(traverse_module_state->__pyx_n_s_name); - Py_VISIT(traverse_module_state->__pyx_n_s_new); - Py_VISIT(traverse_module_state->__pyx_kp_s_no_default___reduce___due_to_non); - Py_VISIT(traverse_module_state->__pyx_n_s_obj); - Py_VISIT(traverse_module_state->__pyx_n_s_object); - Py_VISIT(traverse_module_state->__pyx_n_s_pickle); - Py_VISIT(traverse_module_state->__pyx_n_s_prefix_anon_map); - Py_VISIT(traverse_module_state->__pyx_n_s_prefix_anon_map___missing); - Py_VISIT(traverse_module_state->__pyx_n_s_prefix_anon_map___reduce_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_prefix_anon_map___setstate_cytho); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_result); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_state); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_type); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_prefix_anon_map); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_vtable); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce_ex); - Py_VISIT(traverse_module_state->__pyx_n_s_return); - Py_VISIT(traverse_module_state->__pyx_n_s_self); - Py_VISIT(traverse_module_state->__pyx_n_s_self_dict); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_sql__util_cy); - Py_VISIT(traverse_module_state->__pyx_n_s_state); - Py_VISIT(traverse_module_state->__pyx_n_s_str); - Py_VISIT(traverse_module_state->__pyx_kp_s_stringsource); - Py_VISIT(traverse_module_state->__pyx_n_s_test); - Py_VISIT(traverse_module_state->__pyx_n_s_typing); - Py_VISIT(traverse_module_state->__pyx_n_s_update); - Py_VISIT(traverse_module_state->__pyx_n_s_use_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_util_typing); - Py_VISIT(traverse_module_state->__pyx_n_s_value); - Py_VISIT(traverse_module_state->__pyx_int_1); - Py_VISIT(traverse_module_state->__pyx_int_222419149); - Py_VISIT(traverse_module_state->__pyx_int_228825662); - Py_VISIT(traverse_module_state->__pyx_int_238750788); - Py_VISIT(traverse_module_state->__pyx_tuple__3); - Py_VISIT(traverse_module_state->__pyx_tuple__7); - Py_VISIT(traverse_module_state->__pyx_tuple__9); - Py_VISIT(traverse_module_state->__pyx_tuple__11); - Py_VISIT(traverse_module_state->__pyx_tuple__13); - Py_VISIT(traverse_module_state->__pyx_tuple__15); - Py_VISIT(traverse_module_state->__pyx_tuple__17); - Py_VISIT(traverse_module_state->__pyx_tuple__20); - Py_VISIT(traverse_module_state->__pyx_codeobj__6); - Py_VISIT(traverse_module_state->__pyx_codeobj__8); - Py_VISIT(traverse_module_state->__pyx_codeobj__10); - Py_VISIT(traverse_module_state->__pyx_codeobj__12); - Py_VISIT(traverse_module_state->__pyx_codeobj__14); - Py_VISIT(traverse_module_state->__pyx_codeobj__16); - Py_VISIT(traverse_module_state->__pyx_codeobj__18); - Py_VISIT(traverse_module_state->__pyx_codeobj__19); - Py_VISIT(traverse_module_state->__pyx_codeobj__21); - return 0; -} -#endif -/* #### Code section: module_state_defines ### */ -#define __pyx_d __pyx_mstate_global->__pyx_d -#define __pyx_b __pyx_mstate_global->__pyx_b -#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime -#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple -#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes -#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode -#ifdef __Pyx_CyFunction_USED -#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType -#endif -#ifdef __Pyx_FusedFunction_USED -#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType -#endif -#ifdef __Pyx_Generator_USED -#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType -#endif -#ifdef __Pyx_IterableCoroutine_USED -#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#define __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map __pyx_mstate_global->__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map -#define __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map __pyx_mstate_global->__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map -#endif -#define __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map __pyx_mstate_global->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map -#define __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map __pyx_mstate_global->__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map -#define __pyx_kp_u_ __pyx_mstate_global->__pyx_kp_u_ -#define __pyx_n_s_CacheConst __pyx_mstate_global->__pyx_n_s_CacheConst -#define __pyx_n_s_Dict __pyx_mstate_global->__pyx_n_s_Dict -#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 -#define __pyx_n_s_Literal __pyx_mstate_global->__pyx_n_s_Literal -#define __pyx_kp_u_None __pyx_mstate_global->__pyx_kp_u_None -#define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError -#define __pyx_n_s_TYPE_CHECKING __pyx_mstate_global->__pyx_n_s_TYPE_CHECKING -#define __pyx_n_s_Tuple __pyx_mstate_global->__pyx_n_s_Tuple -#define __pyx_kp_s_Tuple_int_bool __pyx_mstate_global->__pyx_kp_s_Tuple_int_bool -#define __pyx_n_s_TypeError __pyx_mstate_global->__pyx_n_s_TypeError -#define __pyx_n_s_Union __pyx_mstate_global->__pyx_n_s_Union -#define __pyx_kp_s_Union_int_str_Literal_CacheConst __pyx_mstate_global->__pyx_kp_s_Union_int_str_Literal_CacheConst -#define __pyx_n_u__2 __pyx_mstate_global->__pyx_n_u__2 -#define __pyx_kp_u__4 __pyx_mstate_global->__pyx_kp_u__4 -#define __pyx_n_s__5 __pyx_mstate_global->__pyx_n_s__5 -#define __pyx_n_s_anon_map __pyx_mstate_global->__pyx_n_s_anon_map -#define __pyx_n_s_anon_map___missing __pyx_mstate_global->__pyx_n_s_anon_map___missing -#define __pyx_n_s_anon_map___reduce_cython __pyx_mstate_global->__pyx_n_s_anon_map___reduce_cython -#define __pyx_n_s_anon_map___setstate_cython __pyx_mstate_global->__pyx_n_s_anon_map___setstate_cython -#define __pyx_n_s_anon_map_get_anon __pyx_mstate_global->__pyx_n_s_anon_map_get_anon -#define __pyx_n_s_anonymous_counter __pyx_mstate_global->__pyx_n_s_anonymous_counter -#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines -#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool -#define __pyx_n_s_cache_key __pyx_mstate_global->__pyx_n_s_cache_key -#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback -#define __pyx_n_s_derived __pyx_mstate_global->__pyx_n_s_derived -#define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict -#define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 -#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable -#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable -#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc -#define __pyx_n_s_get __pyx_mstate_global->__pyx_n_s_get -#define __pyx_n_s_get_anon __pyx_mstate_global->__pyx_n_s_get_anon -#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate -#define __pyx_n_s_idself __pyx_mstate_global->__pyx_n_s_idself -#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import -#define __pyx_n_s_int __pyx_mstate_global->__pyx_n_s_int -#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled -#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine -#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled -#define __pyx_n_s_key __pyx_mstate_global->__pyx_n_s_key -#define __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py -#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main -#define __pyx_n_s_missing __pyx_mstate_global->__pyx_n_s_missing -#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name -#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new -#define __pyx_kp_s_no_default___reduce___due_to_non __pyx_mstate_global->__pyx_kp_s_no_default___reduce___due_to_non -#define __pyx_n_s_obj __pyx_mstate_global->__pyx_n_s_obj -#define __pyx_n_s_object __pyx_mstate_global->__pyx_n_s_object -#define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle -#define __pyx_n_s_prefix_anon_map __pyx_mstate_global->__pyx_n_s_prefix_anon_map -#define __pyx_n_s_prefix_anon_map___missing __pyx_mstate_global->__pyx_n_s_prefix_anon_map___missing -#define __pyx_n_s_prefix_anon_map___reduce_cython __pyx_mstate_global->__pyx_n_s_prefix_anon_map___reduce_cython -#define __pyx_n_s_prefix_anon_map___setstate_cytho __pyx_mstate_global->__pyx_n_s_prefix_anon_map___setstate_cytho -#define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError -#define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum -#define __pyx_n_s_pyx_result __pyx_mstate_global->__pyx_n_s_pyx_result -#define __pyx_n_s_pyx_state __pyx_mstate_global->__pyx_n_s_pyx_state -#define __pyx_n_s_pyx_type __pyx_mstate_global->__pyx_n_s_pyx_type -#define __pyx_n_s_pyx_unpickle_prefix_anon_map __pyx_mstate_global->__pyx_n_s_pyx_unpickle_prefix_anon_map -#define __pyx_n_s_pyx_vtable __pyx_mstate_global->__pyx_n_s_pyx_vtable -#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce -#define __pyx_n_s_reduce_cython __pyx_mstate_global->__pyx_n_s_reduce_cython -#define __pyx_n_s_reduce_ex __pyx_mstate_global->__pyx_n_s_reduce_ex -#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return -#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self -#define __pyx_n_s_self_dict __pyx_mstate_global->__pyx_n_s_self_dict -#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate -#define __pyx_n_s_setstate_cython __pyx_mstate_global->__pyx_n_s_setstate_cython -#define __pyx_n_s_sqlalchemy_sql__util_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_sql__util_cy -#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state -#define __pyx_n_s_str __pyx_mstate_global->__pyx_n_s_str -#define __pyx_kp_s_stringsource __pyx_mstate_global->__pyx_kp_s_stringsource -#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test -#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing -#define __pyx_n_s_update __pyx_mstate_global->__pyx_n_s_update -#define __pyx_n_s_use_setstate __pyx_mstate_global->__pyx_n_s_use_setstate -#define __pyx_n_s_util_typing __pyx_mstate_global->__pyx_n_s_util_typing -#define __pyx_n_s_value __pyx_mstate_global->__pyx_n_s_value -#define __pyx_int_1 __pyx_mstate_global->__pyx_int_1 -#define __pyx_int_222419149 __pyx_mstate_global->__pyx_int_222419149 -#define __pyx_int_228825662 __pyx_mstate_global->__pyx_int_228825662 -#define __pyx_int_238750788 __pyx_mstate_global->__pyx_int_238750788 -#define __pyx_tuple__3 __pyx_mstate_global->__pyx_tuple__3 -#define __pyx_tuple__7 __pyx_mstate_global->__pyx_tuple__7 -#define __pyx_tuple__9 __pyx_mstate_global->__pyx_tuple__9 -#define __pyx_tuple__11 __pyx_mstate_global->__pyx_tuple__11 -#define __pyx_tuple__13 __pyx_mstate_global->__pyx_tuple__13 -#define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 -#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 -#define __pyx_tuple__20 __pyx_mstate_global->__pyx_tuple__20 -#define __pyx_codeobj__6 __pyx_mstate_global->__pyx_codeobj__6 -#define __pyx_codeobj__8 __pyx_mstate_global->__pyx_codeobj__8 -#define __pyx_codeobj__10 __pyx_mstate_global->__pyx_codeobj__10 -#define __pyx_codeobj__12 __pyx_mstate_global->__pyx_codeobj__12 -#define __pyx_codeobj__14 __pyx_mstate_global->__pyx_codeobj__14 -#define __pyx_codeobj__16 __pyx_mstate_global->__pyx_codeobj__16 -#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 -#define __pyx_codeobj__19 __pyx_mstate_global->__pyx_codeobj__19 -#define __pyx_codeobj__21 __pyx_mstate_global->__pyx_codeobj__21 -/* #### Code section: module_code ### */ - -/* "sqlalchemy/sql/_util_cy.py":31 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -PyDoc_STRVAR(__pyx_doc_10sqlalchemy_3sql_8_util_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_3sql_8_util_cy__is_compiled}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy__is_compiled(__pyx_self); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled", 1); - - /* "sqlalchemy/sql/_util_cy.py":33 - * def _is_compiled() -> bool: - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_True); - __pyx_r = Py_True; - goto __pyx_L0; - - /* "sqlalchemy/sql/_util_cy.py":31 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/sql/_util_cy.py":57 - * """ - * - * def __missing__(self, key: str, /) -> str: # <<<<<<<<<<<<<< - * derived: str - * value: str - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__ = {"__missing__", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__missing__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_key), (&PyUnicode_Type), 0, "key", 1))) __PYX_ERR(1, 57, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map___missing__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *)__pyx_v_self), ((PyObject*)__pyx_v_key)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map___missing__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self, PyObject *__pyx_v_key) { - PyObject *__pyx_v_derived = 0; - PyObject *__pyx_v_value = 0; - PyObject *__pyx_v_self_dict = 0; - PyObject *__pyx_v_anonymous_counter = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - Py_ssize_t __pyx_t_3; - Py_UCS4 __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__missing__", 1); - - /* "sqlalchemy/sql/_util_cy.py":60 - * derived: str - * value: str - * self_dict: dict = self # type: ignore[type-arg] # <<<<<<<<<<<<<< - * - * derived = key.split(" ", 1)[1] - */ - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_v_self_dict = ((PyObject*)__pyx_v_self); - - /* "sqlalchemy/sql/_util_cy.py":62 - * self_dict: dict = self # type: ignore[type-arg] - * - * derived = key.split(" ", 1)[1] # <<<<<<<<<<<<<< - * - * anonymous_counter: int = self_dict.get(derived, 1) - */ - __pyx_t_1 = PyUnicode_Split(__pyx_v_key, __Pyx_NoneAsNull(__pyx_kp_u_), 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_t_1, 1, long, 1, __Pyx_PyInt_From_long, 1, 0, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(PyUnicode_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("unicode", __pyx_t_2))) __PYX_ERR(1, 62, __pyx_L1_error) - __pyx_v_derived = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/sql/_util_cy.py":64 - * derived = key.split(" ", 1)[1] - * - * anonymous_counter: int = self_dict.get(derived, 1) # <<<<<<<<<<<<<< - * self_dict[derived] = anonymous_counter + 1 - * value = f"{derived}_{anonymous_counter}" - */ - if (unlikely(__pyx_v_self_dict == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "get"); - __PYX_ERR(1, 64, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyDict_GetItemDefault(__pyx_v_self_dict, __pyx_v_derived, __pyx_int_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 64, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_2))) __PYX_ERR(1, 64, __pyx_L1_error) - __pyx_v_anonymous_counter = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/sql/_util_cy.py":65 - * - * anonymous_counter: int = self_dict.get(derived, 1) - * self_dict[derived] = anonymous_counter + 1 # <<<<<<<<<<<<<< - * value = f"{derived}_{anonymous_counter}" - * self_dict[key] = value - */ - __pyx_t_2 = PyNumber_Add(__pyx_v_anonymous_counter, __pyx_int_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 65, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (unlikely(__pyx_v_self_dict == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 65, __pyx_L1_error) - } - if (unlikely((PyDict_SetItem(__pyx_v_self_dict, __pyx_v_derived, __pyx_t_2) < 0))) __PYX_ERR(1, 65, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/sql/_util_cy.py":66 - * anonymous_counter: int = self_dict.get(derived, 1) - * self_dict[derived] = anonymous_counter + 1 - * value = f"{derived}_{anonymous_counter}" # <<<<<<<<<<<<<< - * self_dict[key] = value - * return value - */ - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = 0; - __pyx_t_4 = 127; - __pyx_t_1 = __Pyx_PyUnicode_Unicode(__pyx_v_derived); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) > __pyx_t_4) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) : __pyx_t_4; - __pyx_t_3 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1); - __pyx_t_1 = 0; - __Pyx_INCREF(__pyx_n_u__2); - __pyx_t_3 += 1; - __Pyx_GIVEREF(__pyx_n_u__2); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_n_u__2); - __pyx_t_1 = __Pyx_PyObject_FormatSimple(__pyx_v_anonymous_counter, __pyx_empty_unicode); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) > __pyx_t_4) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) : __pyx_t_4; - __pyx_t_3 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyUnicode_Join(__pyx_t_2, 3, __pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v_value = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/sql/_util_cy.py":67 - * self_dict[derived] = anonymous_counter + 1 - * value = f"{derived}_{anonymous_counter}" - * self_dict[key] = value # <<<<<<<<<<<<<< - * return value - * - */ - if (unlikely(__pyx_v_self_dict == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 67, __pyx_L1_error) - } - if (unlikely((PyDict_SetItem(__pyx_v_self_dict, __pyx_v_key, __pyx_v_value) < 0))) __PYX_ERR(1, 67, __pyx_L1_error) - - /* "sqlalchemy/sql/_util_cy.py":68 - * value = f"{derived}_{anonymous_counter}" - * self_dict[key] = value - * return value # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_value); - __pyx_r = __pyx_v_value; - goto __pyx_L0; - - /* "sqlalchemy/sql/_util_cy.py":57 - * """ - * - * def __missing__(self, key: str, /) -> str: # <<<<<<<<<<<<<< - * derived: str - * value: str - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.prefix_anon_map.__missing__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_derived); - __Pyx_XDECREF(__pyx_v_value); - __Pyx_XDECREF(__pyx_v_self_dict); - __Pyx_XDECREF(__pyx_v_anonymous_counter); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_2__reduce_cython__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_2__reduce_cython__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 1); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = () # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __Pyx_INCREF(__pyx_empty_tuple); - __pyx_v_state = __pyx_empty_tuple; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = () - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v__dict = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":7 - * state = () - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_2 = (__pyx_v__dict != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(0, 8, __pyx_L1_error); - __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = False - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = () - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = False # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state - */ - /*else*/ { - __pyx_v_use_setstate = 0; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = False - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state - * else: - */ - if (__pyx_v_use_setstate) { - - /* "(tree fragment)":13 - * use_setstate = False - * if use_setstate: - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_prefix_anon_map); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(0, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_238750788); - __Pyx_GIVEREF(__pyx_int_238750788); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_238750788)) __PYX_ERR(0, 13, __pyx_L1_error); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(0, 13, __pyx_L1_error); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_3); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state)) __PYX_ERR(0, 13, __pyx_L1_error); - __pyx_t_3 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = False - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, None), state - * else: - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_prefix_anon_map); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(0, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_238750788); - __Pyx_GIVEREF(__pyx_int_238750788); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_238750788)) __PYX_ERR(0, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(0, 15, __pyx_L1_error); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_4); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4)) __PYX_ERR(0, 15, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error); - __pyx_t_4 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.prefix_anon_map.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 16, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(0, 16, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v___pyx_state = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 16, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.prefix_anon_map.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_4__setstate_cython__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *)__pyx_v_self), __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_4__setstate_cython__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 1); - - /* "(tree fragment)":17 - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(0, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_3sql_8_util_cy___pyx_unpickle_prefix_anon_map__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.prefix_anon_map.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/sql/_util_cy.py":92 - * _index: cython.uint - * - * def __cinit__(self): # type: ignore[no-untyped-def] # <<<<<<<<<<<<<< - * self._index = 0 - * - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, __pyx_nargs); return -1;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map___cinit__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map___cinit__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self) { - int __pyx_r; - - /* "sqlalchemy/sql/_util_cy.py":93 - * - * def __cinit__(self): # type: ignore[no-untyped-def] - * self._index = 0 # <<<<<<<<<<<<<< - * - * else: - */ - __pyx_v_self->_index = 0; - - /* "sqlalchemy/sql/_util_cy.py":92 - * _index: cython.uint - * - * def __cinit__(self): # type: ignore[no-untyped-def] # <<<<<<<<<<<<<< - * self._index = 0 - * - */ - - /* function exit code */ - __pyx_r = 0; - return __pyx_r; -} - -/* "sqlalchemy/sql/_util_cy.py":98 - * _index: int = 0 # type: ignore[no-redef] - * - * @cython.cfunc # type:ignore[misc] # <<<<<<<<<<<<<< - * @cython.inline # type:ignore[misc] - * def _add_missing( - */ - -static CYTHON_INLINE PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key) { - PyObject *__pyx_v_val = 0; - PyObject *__pyx_v_self_dict = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_add_missing", 1); - - /* "sqlalchemy/sql/_util_cy.py":103 - * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / - * ) -> int: - * val: int = self._index # <<<<<<<<<<<<<< - * self._index += 1 - * self_dict: dict = self # type: ignore[type-arg] - */ - __pyx_t_1 = __Pyx_PyInt_From_unsigned_int(__pyx_v_self->_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 103, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_1)) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_1))) __PYX_ERR(1, 103, __pyx_L1_error) - __pyx_v_val = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/sql/_util_cy.py":104 - * ) -> int: - * val: int = self._index - * self._index += 1 # <<<<<<<<<<<<<< - * self_dict: dict = self # type: ignore[type-arg] - * self_dict[key] = val - */ - __pyx_v_self->_index = (__pyx_v_self->_index + 1); - - /* "sqlalchemy/sql/_util_cy.py":105 - * val: int = self._index - * self._index += 1 - * self_dict: dict = self # type: ignore[type-arg] # <<<<<<<<<<<<<< - * self_dict[key] = val - * return val - */ - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_v_self_dict = ((PyObject*)__pyx_v_self); - - /* "sqlalchemy/sql/_util_cy.py":106 - * self._index += 1 - * self_dict: dict = self # type: ignore[type-arg] - * self_dict[key] = val # <<<<<<<<<<<<<< - * return val - * - */ - if (unlikely((PyDict_SetItem(__pyx_v_self_dict, __pyx_v_key, __pyx_v_val) < 0))) __PYX_ERR(1, 106, __pyx_L1_error) - - /* "sqlalchemy/sql/_util_cy.py":107 - * self_dict: dict = self # type: ignore[type-arg] - * self_dict[key] = val - * return val # <<<<<<<<<<<<<< - * - * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_val); - __pyx_r = __pyx_v_val; - goto __pyx_L0; - - /* "sqlalchemy/sql/_util_cy.py":98 - * _index: int = 0 # type: ignore[no-redef] - * - * @cython.cfunc # type:ignore[misc] # <<<<<<<<<<<<<< - * @cython.inline # type:ignore[misc] - * def _add_missing( - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map._add_missing", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_val); - __Pyx_XDECREF(__pyx_v_self_dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/sql/_util_cy.py":109 - * return val - * - * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: # <<<<<<<<<<<<<< - * self_dict: dict = self # type: ignore[type-arg] - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon(PyObject *__pyx_v_self, PyObject *__pyx_v_obj); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon = {"get_anon", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon(PyObject *__pyx_v_self, PyObject *__pyx_v_obj) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("get_anon (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "self"); __PYX_ERR(1, 109, __pyx_L1_error) - } - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_2get_anon(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self), ((PyObject *)__pyx_v_obj)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_2get_anon(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_obj) { - PyObject *__pyx_v_self_dict = 0; - PyObject *__pyx_v_idself = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - unsigned PY_LONG_LONG __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("get_anon", 1); - - /* "sqlalchemy/sql/_util_cy.py":110 - * - * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: - * self_dict: dict = self # type: ignore[type-arg] # <<<<<<<<<<<<<< - * - * idself: int = _get_id(obj) - */ - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_v_self_dict = ((PyObject*)__pyx_v_self); - - /* "sqlalchemy/sql/_util_cy.py":112 - * self_dict: dict = self # type: ignore[type-arg] - * - * idself: int = _get_id(obj) # <<<<<<<<<<<<<< - * if idself in self_dict: - * return self_dict[idself], True - */ - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_obj); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 112, __pyx_L1_error) - __pyx_t_2 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 112, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (!(likely(__Pyx_Py3Int_CheckExact(__pyx_t_2)) || __Pyx_RaiseUnexpectedTypeError("int", __pyx_t_2))) __PYX_ERR(1, 112, __pyx_L1_error) - __pyx_v_idself = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/sql/_util_cy.py":113 - * - * idself: int = _get_id(obj) - * if idself in self_dict: # <<<<<<<<<<<<<< - * return self_dict[idself], True - * else: - */ - __pyx_t_3 = (__Pyx_PyDict_ContainsTF(__pyx_v_idself, __pyx_v_self_dict, Py_EQ)); if (unlikely((__pyx_t_3 < 0))) __PYX_ERR(1, 113, __pyx_L1_error) - if (__pyx_t_3) { - - /* "sqlalchemy/sql/_util_cy.py":114 - * idself: int = _get_id(obj) - * if idself in self_dict: - * return self_dict[idself], True # <<<<<<<<<<<<<< - * else: - * return self._add_missing(idself), False - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyDict_GetItem(__pyx_v_self_dict, __pyx_v_idself); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 114, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 114, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_2); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2)) __PYX_ERR(1, 114, __pyx_L1_error); - __Pyx_INCREF(Py_True); - __Pyx_GIVEREF(Py_True); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, Py_True)) __PYX_ERR(1, 114, __pyx_L1_error); - __pyx_t_2 = 0; - __pyx_r = ((PyObject*)__pyx_t_4); - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "sqlalchemy/sql/_util_cy.py":113 - * - * idself: int = _get_id(obj) - * if idself in self_dict: # <<<<<<<<<<<<<< - * return self_dict[idself], True - * else: - */ - } - - /* "sqlalchemy/sql/_util_cy.py":116 - * return self_dict[idself], True - * else: - * return self._add_missing(idself), False # <<<<<<<<<<<<<< - * - * if cython.compiled: - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(__pyx_v_self, __pyx_v_idself); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 116, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 116, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_4); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_4)) __PYX_ERR(1, 116, __pyx_L1_error); - __Pyx_INCREF(Py_False); - __Pyx_GIVEREF(Py_False); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 1, Py_False)) __PYX_ERR(1, 116, __pyx_L1_error); - __pyx_t_4 = 0; - __pyx_r = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - goto __pyx_L0; - } - - /* "sqlalchemy/sql/_util_cy.py":109 - * return val - * - * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: # <<<<<<<<<<<<<< - * self_dict: dict = self # type: ignore[type-arg] - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.get_anon", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_self_dict); - __Pyx_XDECREF(__pyx_v_idself); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/sql/_util_cy.py":120 - * if cython.compiled: - * - * def __getitem__( # <<<<<<<<<<<<<< - * self: anon_map, - * key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "self"); __PYX_ERR(1, 121, __pyx_L1_error) - } - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_4__getitem__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self), ((PyObject *)__pyx_v_key)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_4__getitem__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key) { - PyObject *__pyx_v_self_dict = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__getitem__", 1); - - /* "sqlalchemy/sql/_util_cy.py":125 - * /, - * ) -> Union[int, Literal[True]]: - * self_dict: dict = self # type: ignore[type-arg] # <<<<<<<<<<<<<< - * - * if key in self_dict: - */ - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_v_self_dict = ((PyObject*)__pyx_v_self); - - /* "sqlalchemy/sql/_util_cy.py":127 - * self_dict: dict = self # type: ignore[type-arg] - * - * if key in self_dict: # <<<<<<<<<<<<<< - * return self_dict[key] # type:ignore[no-any-return] - * else: - */ - __pyx_t_1 = (__Pyx_PyDict_ContainsTF(__pyx_v_key, __pyx_v_self_dict, Py_EQ)); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(1, 127, __pyx_L1_error) - if (__pyx_t_1) { - - /* "sqlalchemy/sql/_util_cy.py":128 - * - * if key in self_dict: - * return self_dict[key] # type:ignore[no-any-return] # <<<<<<<<<<<<<< - * else: - * return self._add_missing(key) # type:ignore[no-any-return] - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyDict_GetItem(__pyx_v_self_dict, __pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 128, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/sql/_util_cy.py":127 - * self_dict: dict = self # type: ignore[type-arg] - * - * if key in self_dict: # <<<<<<<<<<<<<< - * return self_dict[key] # type:ignore[no-any-return] - * else: - */ - } - - /* "sqlalchemy/sql/_util_cy.py":130 - * return self_dict[key] # type:ignore[no-any-return] - * else: - * return self._add_missing(key) # type:ignore[no-any-return] # <<<<<<<<<<<<<< - * - * def __missing__( - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 130, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - } - - /* "sqlalchemy/sql/_util_cy.py":120 - * if cython.compiled: - * - * def __getitem__( # <<<<<<<<<<<<<< - * self: anon_map, - * key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_self_dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/sql/_util_cy.py":132 - * return self._add_missing(key) # type:ignore[no-any-return] - * - * def __missing__( # <<<<<<<<<<<<<< - * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / - * ) -> int: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__ = {"__missing__", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__missing__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "self"); __PYX_ERR(1, 133, __pyx_L1_error) - } - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_6__missing__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self), ((PyObject *)__pyx_v_key)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_6__missing__(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, PyObject *__pyx_v_key) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__missing__", 1); - - /* "sqlalchemy/sql/_util_cy.py":135 - * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / - * ) -> int: - * return self._add_missing(key) # type:ignore[no-any-return] # <<<<<<<<<<<<<< - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 135, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/sql/_util_cy.py":132 - * return self._add_missing(key) # type:ignore[no-any-return] - * - * def __missing__( # <<<<<<<<<<<<<< - * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / - * ) -> int: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__missing__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_8__reduce_cython__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_8__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 1); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - */ - __Pyx_Raise(__pyx_builtin_TypeError, __pyx_kp_s_no_default___reduce___due_to_non, 0, 0); - __PYX_ERR(0, 2, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - CYTHON_UNUSED PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 3, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(0, 3, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v___pyx_state = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 3, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_10__setstate_cython__(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)__pyx_v_self), __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_8anon_map_10__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 1); - - /* "(tree fragment)":4 - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" # <<<<<<<<<<<<<< - */ - __Pyx_Raise(__pyx_builtin_TypeError, __pyx_kp_s_no_default___reduce___due_to_non, 0, 0); - __PYX_ERR(0, 4, __pyx_L1_error) - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.anon_map.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_prefix_anon_map(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map = {"__pyx_unpickle_prefix_anon_map", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[3] = {0,0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_prefix_anon_map (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_prefix_anon_map", 1, 3, 3, 1); __PYX_ERR(0, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_prefix_anon_map", 1, 3, 3, 2); __PYX_ERR(0, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_prefix_anon_map") < 0)) __PYX_ERR(0, 1, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 3)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_prefix_anon_map", 1, 3, 3, __pyx_nargs); __PYX_ERR(0, 1, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.__pyx_unpickle_prefix_anon_map", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_3sql_8_util_cy_2__pyx_unpickle_prefix_anon_map(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_3sql_8_util_cy_2__pyx_unpickle_prefix_anon_map(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_prefix_anon_map", 1); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - */ - __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__3, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(0, 4, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - * __pyx_result = prefix_anon_map.__new__(__pyx_type) - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(0, 5, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_t_1); - __pyx_v___pyx_PickleError = __pyx_t_1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum # <<<<<<<<<<<<<< - * __pyx_result = prefix_anon_map.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - * __pyx_result = prefix_anon_map.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_v___pyx_result = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - * __pyx_result = prefix_anon_map.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_2 = (__pyx_v___pyx_state != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":9 - * __pyx_result = prefix_anon_map.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(0, 9, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_3sql_8_util_cy___pyx_unpickle_prefix_anon_map__set_state(((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - * __pyx_result = prefix_anon_map.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_prefix_anon_map(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.__pyx_unpickle_prefix_anon_map", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[0]) - */ - -static PyObject *__pyx_f_10sqlalchemy_3sql_8_util_cy___pyx_unpickle_prefix_anon_map__set_state(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - Py_ssize_t __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - unsigned int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_prefix_anon_map__set_state", 1); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[0]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 12, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 12, __pyx_L1_error) - __pyx_t_3 = (__pyx_t_2 > 0); - if (__pyx_t_3) { - } else { - __pyx_t_1 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_3 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 12, __pyx_L1_error) - __pyx_t_1 = __pyx_t_3; - __pyx_L4_bool_binop_done:; - if (__pyx_t_1) { - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[0]) # <<<<<<<<<<<<<< - */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 13, __pyx_L1_error) - } - __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_7 = NULL; - __pyx_t_8 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_8 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; - __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[0]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_prefix_anon_map__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_prefix_anon_map__set_state(prefix_anon_map __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[0]) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("sqlalchemy.sql._util_cy.__pyx_unpickle_prefix_anon_map__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_prefix_anon_map(PyObject *o, visitproc v, void *a) { - int e; - if (!(&PyDict_Type)->tp_traverse); else { e = (&PyDict_Type)->tp_traverse(o,v,a); if (e) return e; } - return 0; -} - -static int __pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_prefix_anon_map(PyObject *o) { - if (!(&PyDict_Type)->tp_clear); else (&PyDict_Type)->tp_clear(o); - return 0; -} - -static PyMethodDef __pyx_methods_10sqlalchemy_3sql_8_util_cy_prefix_anon_map[] = { - {"__missing__", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__, METH_O, 0}, - {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {0, 0, 0, 0} -}; -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_slots[] = { - {Py_tp_doc, (void *)PyDoc_STR("A map that creates new keys for missing key access.\n\n Considers keys of the form \" \" to produce\n new symbols \"_\", where \"index\" is an incrementing integer\n corresponding to .\n\n Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which\n is otherwise usually used for this type of operation.\n\n ")}, - {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_prefix_anon_map}, - {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_prefix_anon_map}, - {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_3sql_8_util_cy_prefix_anon_map}, - {0, 0}, -}; -static PyType_Spec __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_spec = { - "sqlalchemy.sql._util_cy.prefix_anon_map", - sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map), - 0, - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, - __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_slots, -}; -#else - -static PyTypeObject __pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map = { - PyVarObject_HEAD_INIT(0, 0) - "sqlalchemy.sql._util_cy.""prefix_anon_map", /*tp_name*/ - sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - 0, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ - PyDoc_STR("A map that creates new keys for missing key access.\n\n Considers keys of the form \" \" to produce\n new symbols \"_\", where \"index\" is an incrementing integer\n corresponding to .\n\n Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which\n is otherwise usually used for this type of operation.\n\n "), /*tp_doc*/ - __pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, /*tp_traverse*/ - __pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - #if !CYTHON_USE_TYPE_SPECS - 0, /*tp_dictoffset*/ - #endif - 0, /*tp_init*/ - 0, /*tp_alloc*/ - 0, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - #if CYTHON_USE_TP_FINALIZE - 0, /*tp_finalize*/ - #else - NULL, /*tp_finalize*/ - #endif - #endif - #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, /*tp_vectorcall*/ - #endif - #if __PYX_NEED_TP_PRINT_SLOT == 1 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030C0000 - 0, /*tp_watched*/ - #endif - #if PY_VERSION_HEX >= 0x030d00A4 - 0, /*tp_versions_used*/ - #endif - #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, /*tp_pypy_flags*/ - #endif -}; -#endif -static struct __pyx_vtabstruct_10sqlalchemy_3sql_8_util_cy_anon_map __pyx_vtable_10sqlalchemy_3sql_8_util_cy_anon_map; - -static PyObject *__pyx_tp_new_10sqlalchemy_3sql_8_util_cy_anon_map(PyTypeObject *t, PyObject *a, PyObject *k) { - struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *p; - PyObject *o = __Pyx_PyType_GetSlot((&PyDict_Type), tp_new, newfunc)(t, a, k); - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *)o); - p->__pyx_vtab = __pyx_vtabptr_10sqlalchemy_3sql_8_util_cy_anon_map; - if (unlikely(__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; - return o; - bad: - Py_DECREF(o); o = 0; - return NULL; -} - -static int __pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_anon_map(PyObject *o, visitproc v, void *a) { - int e; - if (!(&PyDict_Type)->tp_traverse); else { e = (&PyDict_Type)->tp_traverse(o,v,a); if (e) return e; } - return 0; -} - -static int __pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_anon_map(PyObject *o) { - if (!(&PyDict_Type)->tp_clear); else (&PyDict_Type)->tp_clear(o); - return 0; -} -static PyObject *__pyx_sq_item_10sqlalchemy_3sql_8_util_cy_anon_map(PyObject *o, Py_ssize_t i) { - PyObject *r; - PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; - r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); - Py_DECREF(x); - return r; -} - -static PyMethodDef __pyx_methods_10sqlalchemy_3sql_8_util_cy_anon_map[] = { - {"get_anon", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon, METH_O, 0}, - {"__missing__", (PyCFunction)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__, METH_O, 0}, - {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {0, 0, 0, 0} -}; -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_slots[] = { - {Py_sq_item, (void *)__pyx_sq_item_10sqlalchemy_3sql_8_util_cy_anon_map}, - {Py_mp_subscript, (void *)__pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_5__getitem__}, - {Py_tp_doc, (void *)PyDoc_STR("A map that creates new keys for missing key access.\n\n Produces an incrementing sequence given a series of unique keys.\n\n This is similar to the compiler prefix_anon_map class although simpler.\n\n Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which\n is otherwise usually used for this type of operation.\n\n ")}, - {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_anon_map}, - {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_anon_map}, - {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_3sql_8_util_cy_anon_map}, - {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_3sql_8_util_cy_anon_map}, - {0, 0}, -}; -static PyType_Spec __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_spec = { - "sqlalchemy.sql._util_cy.anon_map", - sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map), - 0, - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, - __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_slots, -}; -#else - -static PySequenceMethods __pyx_tp_as_sequence_anon_map = { - 0, /*sq_length*/ - 0, /*sq_concat*/ - 0, /*sq_repeat*/ - __pyx_sq_item_10sqlalchemy_3sql_8_util_cy_anon_map, /*sq_item*/ - 0, /*sq_slice*/ - 0, /*sq_ass_item*/ - 0, /*sq_ass_slice*/ - 0, /*sq_contains*/ - 0, /*sq_inplace_concat*/ - 0, /*sq_inplace_repeat*/ -}; - -static PyMappingMethods __pyx_tp_as_mapping_anon_map = { - 0, /*mp_length*/ - __pyx_pw_10sqlalchemy_3sql_8_util_cy_8anon_map_5__getitem__, /*mp_subscript*/ - 0, /*mp_ass_subscript*/ -}; - -static PyTypeObject __pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map = { - PyVarObject_HEAD_INIT(0, 0) - "sqlalchemy.sql._util_cy.""anon_map", /*tp_name*/ - sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - 0, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - &__pyx_tp_as_sequence_anon_map, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_anon_map, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ - PyDoc_STR("A map that creates new keys for missing key access.\n\n Produces an incrementing sequence given a series of unique keys.\n\n This is similar to the compiler prefix_anon_map class although simpler.\n\n Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which\n is otherwise usually used for this type of operation.\n\n "), /*tp_doc*/ - __pyx_tp_traverse_10sqlalchemy_3sql_8_util_cy_anon_map, /*tp_traverse*/ - __pyx_tp_clear_10sqlalchemy_3sql_8_util_cy_anon_map, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_10sqlalchemy_3sql_8_util_cy_anon_map, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - #if !CYTHON_USE_TYPE_SPECS - 0, /*tp_dictoffset*/ - #endif - 0, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_10sqlalchemy_3sql_8_util_cy_anon_map, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - #if CYTHON_USE_TP_FINALIZE - 0, /*tp_finalize*/ - #else - NULL, /*tp_finalize*/ - #endif - #endif - #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, /*tp_vectorcall*/ - #endif - #if __PYX_NEED_TP_PRINT_SLOT == 1 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030C0000 - 0, /*tp_watched*/ - #endif - #if PY_VERSION_HEX >= 0x030d00A4 - 0, /*tp_versions_used*/ - #endif - #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, /*tp_pypy_flags*/ - #endif -}; -#endif - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif -/* #### Code section: pystring_table ### */ - -static int __Pyx_CreateStringTabAndInitStrings(void) { - __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, - {&__pyx_n_s_CacheConst, __pyx_k_CacheConst, sizeof(__pyx_k_CacheConst), 0, 0, 1, 1}, - {&__pyx_n_s_Dict, __pyx_k_Dict, sizeof(__pyx_k_Dict), 0, 0, 1, 1}, - {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, - {&__pyx_n_s_Literal, __pyx_k_Literal, sizeof(__pyx_k_Literal), 0, 0, 1, 1}, - {&__pyx_kp_u_None, __pyx_k_None, sizeof(__pyx_k_None), 0, 1, 0, 0}, - {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_TYPE_CHECKING, __pyx_k_TYPE_CHECKING, sizeof(__pyx_k_TYPE_CHECKING), 0, 0, 1, 1}, - {&__pyx_n_s_Tuple, __pyx_k_Tuple, sizeof(__pyx_k_Tuple), 0, 0, 1, 1}, - {&__pyx_kp_s_Tuple_int_bool, __pyx_k_Tuple_int_bool, sizeof(__pyx_k_Tuple_int_bool), 0, 0, 1, 0}, - {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, - {&__pyx_n_s_Union, __pyx_k_Union, sizeof(__pyx_k_Union), 0, 0, 1, 1}, - {&__pyx_kp_s_Union_int_str_Literal_CacheConst, __pyx_k_Union_int_str_Literal_CacheConst, sizeof(__pyx_k_Union_int_str_Literal_CacheConst), 0, 0, 1, 0}, - {&__pyx_n_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 1}, - {&__pyx_kp_u__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 1, 0, 0}, - {&__pyx_n_s__5, __pyx_k__5, sizeof(__pyx_k__5), 0, 0, 1, 1}, - {&__pyx_n_s_anon_map, __pyx_k_anon_map, sizeof(__pyx_k_anon_map), 0, 0, 1, 1}, - {&__pyx_n_s_anon_map___missing, __pyx_k_anon_map___missing, sizeof(__pyx_k_anon_map___missing), 0, 0, 1, 1}, - {&__pyx_n_s_anon_map___reduce_cython, __pyx_k_anon_map___reduce_cython, sizeof(__pyx_k_anon_map___reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_anon_map___setstate_cython, __pyx_k_anon_map___setstate_cython, sizeof(__pyx_k_anon_map___setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_anon_map_get_anon, __pyx_k_anon_map_get_anon, sizeof(__pyx_k_anon_map_get_anon), 0, 0, 1, 1}, - {&__pyx_n_s_anonymous_counter, __pyx_k_anonymous_counter, sizeof(__pyx_k_anonymous_counter), 0, 0, 1, 1}, - {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, - {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, - {&__pyx_n_s_cache_key, __pyx_k_cache_key, sizeof(__pyx_k_cache_key), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_derived, __pyx_k_derived, sizeof(__pyx_k_derived), 0, 0, 1, 1}, - {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, - {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, - {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, - {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, - {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, - {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, - {&__pyx_n_s_get_anon, __pyx_k_get_anon, sizeof(__pyx_k_get_anon), 0, 0, 1, 1}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_idself, __pyx_k_idself, sizeof(__pyx_k_idself), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_int, __pyx_k_int, sizeof(__pyx_k_int), 0, 0, 1, 1}, - {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, - {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, - {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, - {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, - {&__pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_k_lib_sqlalchemy_sql__util_cy_py, sizeof(__pyx_k_lib_sqlalchemy_sql__util_cy_py), 0, 0, 1, 0}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_missing, __pyx_k_missing, sizeof(__pyx_k_missing), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0}, - {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1}, - {&__pyx_n_s_object, __pyx_k_object, sizeof(__pyx_k_object), 0, 0, 1, 1}, - {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, - {&__pyx_n_s_prefix_anon_map, __pyx_k_prefix_anon_map, sizeof(__pyx_k_prefix_anon_map), 0, 0, 1, 1}, - {&__pyx_n_s_prefix_anon_map___missing, __pyx_k_prefix_anon_map___missing, sizeof(__pyx_k_prefix_anon_map___missing), 0, 0, 1, 1}, - {&__pyx_n_s_prefix_anon_map___reduce_cython, __pyx_k_prefix_anon_map___reduce_cython, sizeof(__pyx_k_prefix_anon_map___reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_prefix_anon_map___setstate_cytho, __pyx_k_prefix_anon_map___setstate_cytho, sizeof(__pyx_k_prefix_anon_map___setstate_cytho), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_prefix_anon_map, __pyx_k_pyx_unpickle_prefix_anon_map, sizeof(__pyx_k_pyx_unpickle_prefix_anon_map), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, - {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, - {&__pyx_n_s_self_dict, __pyx_k_self_dict, sizeof(__pyx_k_self_dict), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_sqlalchemy_sql__util_cy, __pyx_k_sqlalchemy_sql__util_cy, sizeof(__pyx_k_sqlalchemy_sql__util_cy), 0, 0, 1, 1}, - {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, - {&__pyx_n_s_str, __pyx_k_str, sizeof(__pyx_k_str), 0, 0, 1, 1}, - {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, - {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, - {&__pyx_n_s_use_setstate, __pyx_k_use_setstate, sizeof(__pyx_k_use_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_util_typing, __pyx_k_util_typing, sizeof(__pyx_k_util_typing), 0, 0, 1, 1}, - {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} - }; - return __Pyx_InitStrings(__pyx_string_tab); -} -/* #### Code section: cached_builtins ### */ -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 2, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: cached_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - */ - __pyx_tuple__3 = PyTuple_Pack(3, __pyx_int_238750788, __pyx_int_228825662, __pyx_int_222419149); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__3); - __Pyx_GIVEREF(__pyx_tuple__3); - - /* "sqlalchemy/sql/_util_cy.py":31 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_codeobj__6 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_n_s_is_compiled, 31, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__6)) __PYX_ERR(1, 31, __pyx_L1_error) - - /* "sqlalchemy/sql/_util_cy.py":57 - * """ - * - * def __missing__(self, key: str, /) -> str: # <<<<<<<<<<<<<< - * derived: str - * value: str - */ - __pyx_tuple__7 = PyTuple_Pack(6, __pyx_n_s_self, __pyx_n_s_key, __pyx_n_s_derived, __pyx_n_s_value, __pyx_n_s_self_dict, __pyx_n_s_anonymous_counter); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__7); - __Pyx_GIVEREF(__pyx_tuple__7); - __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__7, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_n_s_missing, 57, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(1, 57, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_tuple__9 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__9); - __Pyx_GIVEREF(__pyx_tuple__9); - __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__9, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 1, __pyx_L1_error) - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) - */ - __pyx_tuple__11 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__11); - __Pyx_GIVEREF(__pyx_tuple__11); - __pyx_codeobj__12 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__11, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__12)) __PYX_ERR(0, 16, __pyx_L1_error) - - /* "sqlalchemy/sql/_util_cy.py":109 - * return val - * - * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: # <<<<<<<<<<<<<< - * self_dict: dict = self # type: ignore[type-arg] - * - */ - __pyx_tuple__13 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_obj, __pyx_n_s_self_dict, __pyx_n_s_idself); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(1, 109, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__13); - __Pyx_GIVEREF(__pyx_tuple__13); - __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_n_s_get_anon, 109, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(1, 109, __pyx_L1_error) - - /* "sqlalchemy/sql/_util_cy.py":132 - * return self._add_missing(key) # type:ignore[no-any-return] - * - * def __missing__( # <<<<<<<<<<<<<< - * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / - * ) -> int: - */ - __pyx_tuple__15 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_key); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 132, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); - __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_sql__util_cy_py, __pyx_n_s_missing, 132, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(1, 132, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): - */ - __pyx_tuple__17 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__17); - __Pyx_GIVEREF(__pyx_tuple__17); - __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 1, __pyx_L1_error) - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - */ - __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__11, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 3, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 3, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __pyx_unpickle_prefix_anon_map(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_tuple__20 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__20); - __Pyx_GIVEREF(__pyx_tuple__20); - __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_prefix_anon_map, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} -/* #### Code section: init_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { - __pyx_umethod_PyDict_Type_get.type = (PyObject*)&PyDict_Type; - __pyx_umethod_PyDict_Type_get.method_name = &__pyx_n_s_get; - if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(1, 1, __pyx_L1_error); - __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_int_222419149 = PyInt_FromLong(222419149L); if (unlikely(!__pyx_int_222419149)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_int_228825662 = PyInt_FromLong(228825662L); if (unlikely(!__pyx_int_228825662)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_int_238750788 = PyInt_FromLong(238750788L); if (unlikely(!__pyx_int_238750788)) __PYX_ERR(1, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: init_globals ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - return 0; -} -/* #### Code section: init_module ### */ - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - #if CYTHON_USE_TYPE_SPECS - __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PyDict_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_spec, __pyx_t_1); - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map)) __PYX_ERR(1, 45, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map_spec, __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) < 0) __PYX_ERR(1, 45, __pyx_L1_error) - #else - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map = &__pyx_type_10sqlalchemy_3sql_8_util_cy_prefix_anon_map; - #endif - if (sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) != sizeof(PyDictObject)) { - if (__Pyx_validate_extern_base((&PyDict_Type)) < 0) __PYX_ERR(1, 45, __pyx_L1_error) - } - #if !CYTHON_COMPILING_IN_LIMITED_API - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_dealloc = (&PyDict_Type)->tp_dealloc; - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_base = (&PyDict_Type); - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_new = (&PyDict_Type)->tp_new; - #endif - #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) < 0) __PYX_ERR(1, 45, __pyx_L1_error) - #endif - #if PY_MAJOR_VERSION < 3 - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_print = 0; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_dictoffset && __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_getattro == PyObject_GenericGetAttr)) { - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map->tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_prefix_anon_map, (PyObject *) __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) < 0) __PYX_ERR(1, 45, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map) < 0) __PYX_ERR(1, 45, __pyx_L1_error) - #endif - __pyx_vtabptr_10sqlalchemy_3sql_8_util_cy_anon_map = &__pyx_vtable_10sqlalchemy_3sql_8_util_cy_anon_map; - __pyx_vtable_10sqlalchemy_3sql_8_util_cy_anon_map._add_missing = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map *, PyObject *))__pyx_f_10sqlalchemy_3sql_8_util_cy_8anon_map__add_missing; - #if CYTHON_USE_TYPE_SPECS - __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PyDict_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 72, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_spec, __pyx_t_1); - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map)) __PYX_ERR(1, 72, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map_spec, __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) - #else - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map = &__pyx_type_10sqlalchemy_3sql_8_util_cy_anon_map; - #endif - if (sizeof(struct __pyx_obj_10sqlalchemy_3sql_8_util_cy_anon_map) != sizeof(PyDictObject)) { - if (__Pyx_validate_extern_base((&PyDict_Type)) < 0) __PYX_ERR(1, 72, __pyx_L1_error) - } - #if !CYTHON_COMPILING_IN_LIMITED_API - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_dealloc = (&PyDict_Type)->tp_dealloc; - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_base = (&PyDict_Type); - #endif - #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) - #endif - #if PY_MAJOR_VERSION < 3 - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_print = 0; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_dictoffset && __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_getattro == PyObject_GenericGetAttr)) { - __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map->tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - #endif - if (__Pyx_SetVtable(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map, __pyx_vtabptr_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_MergeVtables(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) - #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_anon_map, (PyObject *) __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map) < 0) __PYX_ERR(1, 72, __pyx_L1_error) - #endif - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __pyx_t_1 = PyImport_ImportModule("sqlalchemy.util._collections_cy"); if (!__pyx_t_1) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (__Pyx_ImportFunction_3_0_11(__pyx_t_1, "_get_id", (void (**)(void))&__pyx_f_10sqlalchemy_4util_15_collections_cy__get_id, "unsigned PY_LONG_LONG (PyObject *)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__util_cy(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__util_cy}, - {0, NULL} -}; -#endif - -#ifdef __cplusplus -namespace { - struct PyModuleDef __pyx_moduledef = - #else - static struct PyModuleDef __pyx_moduledef = - #endif - { - PyModuleDef_HEAD_INIT, - "_util_cy", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #elif CYTHON_USE_MODULE_STATE - sizeof(__pyx_mstate), /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - #if CYTHON_USE_MODULE_STATE - __pyx_m_traverse, /* m_traverse */ - __pyx_m_clear, /* m_clear */ - NULL /* m_free */ - #else - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ - #endif - }; - #ifdef __cplusplus -} /* anonymous namespace */ -#endif -#endif - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_util_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_util_cy(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__util_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__util_cy(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) -#else -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) -#endif -{ - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { -#if CYTHON_COMPILING_IN_LIMITED_API - result = PyModule_AddObject(module, to_name, value); -#else - result = PyDict_SetItemString(moddict, to_name, value); -#endif - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - CYTHON_UNUSED_VAR(def); - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - moddict = module; -#else - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; -#endif - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__util_cy(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - int stringtab_initialized = 0; - #if CYTHON_USE_MODULE_STATE - int pystate_addmodule_run = 0; - #endif - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_util_cy' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_util_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - if (unlikely(!__pyx_m)) __PYX_ERR(1, 1, __pyx_L1_error) - #elif CYTHON_USE_MODULE_STATE - __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) - { - int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_util_cy" pseudovariable */ - if (unlikely((add_module_result < 0))) __PYX_ERR(1, 1, __pyx_L1_error) - pystate_addmodule_run = 1; - } - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - if (unlikely(!__pyx_m)) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #endif - CYTHON_UNUSED_VAR(__pyx_t_1); - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(1, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(1, 1, __pyx_L1_error) - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__util_cy(void)", 0); - if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(1, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - PyEval_InitThreads(); - #endif - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitConstants() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - stringtab_initialized = 1; - if (__Pyx_InitGlobals() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_sqlalchemy__sql___util_cy) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(1, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "sqlalchemy.sql._util_cy")) { - if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.sql._util_cy", __pyx_m) < 0))) __PYX_ERR(1, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(1, 1, __pyx_L1_error) - (void)__Pyx_modinit_type_import_code(); - (void)__Pyx_modinit_variable_import_code(); - if (unlikely((__Pyx_modinit_function_import_code() < 0))) __PYX_ERR(1, 1, __pyx_L1_error) - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - - /* "sqlalchemy/sql/_util_cy.py":10 - * from __future__ import annotations - * - * from typing import Dict # <<<<<<<<<<<<<< - * from typing import Tuple - * from typing import TYPE_CHECKING - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Dict); - __Pyx_GIVEREF(__pyx_n_s_Dict); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Dict)) __PYX_ERR(1, 10, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Dict); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Dict, __pyx_t_2) < 0) __PYX_ERR(1, 10, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/sql/_util_cy.py":11 - * - * from typing import Dict - * from typing import Tuple # <<<<<<<<<<<<<< - * from typing import TYPE_CHECKING - * from typing import Union - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Tuple); - __Pyx_GIVEREF(__pyx_n_s_Tuple); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Tuple)) __PYX_ERR(1, 11, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Tuple); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Tuple, __pyx_t_3) < 0) __PYX_ERR(1, 11, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/sql/_util_cy.py":12 - * from typing import Dict - * from typing import Tuple - * from typing import TYPE_CHECKING # <<<<<<<<<<<<<< - * from typing import Union - * - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_TYPE_CHECKING); - __Pyx_GIVEREF(__pyx_n_s_TYPE_CHECKING); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_TYPE_CHECKING)) __PYX_ERR(1, 12, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_TYPE_CHECKING, __pyx_t_2) < 0) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/sql/_util_cy.py":13 - * from typing import Tuple - * from typing import TYPE_CHECKING - * from typing import Union # <<<<<<<<<<<<<< - * - * from ..util.typing import Literal - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Union); - __Pyx_GIVEREF(__pyx_n_s_Union); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Union)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Union); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Union, __pyx_t_3) < 0) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/sql/_util_cy.py":15 - * from typing import Union - * - * from ..util.typing import Literal # <<<<<<<<<<<<<< - * - * if TYPE_CHECKING: - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Literal); - __Pyx_GIVEREF(__pyx_n_s_Literal); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Literal)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_util_typing, __pyx_t_2, 2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Literal); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Literal, __pyx_t_2) < 0) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/sql/_util_cy.py":17 - * from ..util.typing import Literal - * - * if TYPE_CHECKING: # <<<<<<<<<<<<<< - * from .cache_key import CacheConst - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TYPE_CHECKING); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_4 < 0))) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "sqlalchemy/sql/_util_cy.py":18 - * - * if TYPE_CHECKING: - * from .cache_key import CacheConst # <<<<<<<<<<<<<< - * - * # START GENERATED CYTHON IMPORT - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_CacheConst); - __Pyx_GIVEREF(__pyx_n_s_CacheConst); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_CacheConst)) __PYX_ERR(1, 18, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_cache_key, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CacheConst); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_CacheConst, __pyx_t_3) < 0) __PYX_ERR(1, 18, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/sql/_util_cy.py":17 - * from ..util.typing import Literal - * - * if TYPE_CHECKING: # <<<<<<<<<<<<<< - * from .cache_key import CacheConst - * - */ - } - - /* "sqlalchemy/sql/_util_cy.py":22 - * # START GENERATED CYTHON IMPORT - * # This section is automatically generated by the script tools/cython_imports.py - * try: # <<<<<<<<<<<<<< - * # NOTE: the cython compiler needs this "import cython" in the file, it - * # can't be only "from sqlalchemy.util import cython" with the fallback - */ - { - (void)__pyx_t_1; (void)__pyx_t_5; (void)__pyx_t_6; /* mark used */ - /*try:*/ { - - /* "sqlalchemy/sql/_util_cy.py":26 - * # can't be only "from sqlalchemy.util import cython" with the fallback - * # in that module - * import cython # <<<<<<<<<<<<<< - * except ModuleNotFoundError: - * from sqlalchemy.util import cython - */ - } - } - - /* "sqlalchemy/sql/_util_cy.py":31 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 31, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(1, 31, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__6)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 31, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(1, 31, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/sql/_util_cy.py":57 - * """ - * - * def __missing__(self, key: str, /) -> str: # <<<<<<<<<<<<<< - * derived: str - * value: str - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_str) < 0) __PYX_ERR(1, 57, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_str) < 0) __PYX_ERR(1, 57, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_1__missing__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_prefix_anon_map___missing, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__8)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, __pyx_n_s_missing, __pyx_t_2) < 0) __PYX_ERR(1, 57, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_3__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_prefix_anon_map___reduce_cython, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__10)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, __pyx_n_s_reduce_cython, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_prefix_anon_map, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_prefix_anon_map__set_state(self, __pyx_state) - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_15prefix_anon_map_5__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_prefix_anon_map___setstate_cytho, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__12)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map, __pyx_n_s_setstate_cython, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_prefix_anon_map); - - /* "sqlalchemy/sql/_util_cy.py":109 - * return val - * - * def get_anon(self: anon_map, obj: object, /) -> Tuple[int, bool]: # <<<<<<<<<<<<<< - * self_dict: dict = self # type: ignore[type-arg] - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 109, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_self, __pyx_n_s_anon_map) < 0) __PYX_ERR(1, 109, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_obj, __pyx_n_s_object) < 0) __PYX_ERR(1, 109, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_Tuple_int_bool) < 0) __PYX_ERR(1, 109, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_3get_anon, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_anon_map_get_anon, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__14)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 109, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map, __pyx_n_s_get_anon, __pyx_t_3) < 0) __PYX_ERR(1, 109, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map); - - /* "sqlalchemy/sql/_util_cy.py":132 - * return self._add_missing(key) # type:ignore[no-any-return] - * - * def __missing__( # <<<<<<<<<<<<<< - * self: anon_map, key: Union[int, str, "Literal[CacheConst.NO_CACHE]"], / - * ) -> int: - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 132, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_self, __pyx_n_s_anon_map) < 0) __PYX_ERR(1, 132, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_kp_s_Union_int_str_Literal_CacheConst) < 0) __PYX_ERR(1, 132, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_int) < 0) __PYX_ERR(1, 132, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_7__missing__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_anon_map___missing, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__16)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 132, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map, __pyx_n_s_missing, __pyx_t_2) < 0) __PYX_ERR(1, 132, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_3sql_8_util_cy_anon_map); - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_9__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_anon_map___reduce_cython, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_reduce_cython, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError, "no default __reduce__ due to non-trivial __cinit__" - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_8anon_map_11__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_anon_map___setstate_cython, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__19)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_setstate_cython, __pyx_t_2) < 0) __PYX_ERR(0, 3, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_prefix_anon_map(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_3sql_8_util_cy_3__pyx_unpickle_prefix_anon_map, 0, __pyx_n_s_pyx_unpickle_prefix_anon_map, NULL, __pyx_n_s_sqlalchemy_sql__util_cy, __pyx_d, ((PyObject *)__pyx_codeobj__21)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_prefix_anon_map, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/sql/_util_cy.py":1 - * # sql/_util_cy.py # <<<<<<<<<<<<<< - * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors - * # - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - if (__pyx_m) { - if (__pyx_d && stringtab_initialized) { - __Pyx_AddTraceback("init sqlalchemy.sql._util_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - #if !CYTHON_USE_MODULE_STATE - Py_CLEAR(__pyx_m); - #else - Py_DECREF(__pyx_m); - if (pystate_addmodule_run) { - PyObject *tp, *value, *tb; - PyErr_Fetch(&tp, &value, &tb); - PyState_RemoveModule(&__pyx_moduledef); - PyErr_Restore(tp, value, tb); - } - #endif - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init sqlalchemy.sql._util_cy"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} -/* #### Code section: cleanup_globals ### */ -/* #### Code section: cleanup_module ### */ -/* #### Code section: main_method ### */ -/* #### Code section: utility_code_pragmas ### */ -#ifdef _MSC_VER -#pragma warning( push ) -/* Warning 4127: conditional expression is constant - * Cython uses constant conditional expressions to allow in inline functions to be optimized at - * compile-time, so this warning is not useful - */ -#pragma warning( disable : 4127 ) -#endif - - - -/* #### Code section: utility_code_def ### */ - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030C00A6 - PyObject *current_exception = tstate->current_exception; - if (unlikely(!current_exception)) return 0; - exc_type = (PyObject*) Py_TYPE(current_exception); - if (exc_type == err) return 1; -#else - exc_type = tstate->curexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; -#endif - #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(exc_type); - #endif - if (unlikely(PyTuple_Check(err))) { - result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - } else { - result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); - } - #if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(exc_type); - #endif - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject *tmp_value; - assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); - if (value) { - #if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) - #endif - PyException_SetTraceback(value, tb); - } - tmp_value = tstate->current_exception; - tstate->current_exception = value; - Py_XDECREF(tmp_value); - Py_XDECREF(type); - Py_XDECREF(tb); -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#endif -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject* exc_value; - exc_value = tstate->current_exception; - tstate->current_exception = 0; - *value = exc_value; - *type = NULL; - *tb = NULL; - if (exc_value) { - *type = (PyObject*) Py_TYPE(exc_value); - Py_INCREF(*type); - #if CYTHON_COMPILING_IN_CPYTHON - *tb = ((PyBaseExceptionObject*) exc_value)->traceback; - Py_XINCREF(*tb); - #else - *tb = PyException_GetTraceback(exc_value); - #endif - } -#else - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#endif -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* PyObjectGetAttrStrNoError */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - (void) PyObject_GetOptionalAttr(obj, attr_name, &result); - return result; -#else -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -#endif -} - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); - if (unlikely(!result) && !PyErr_Occurred()) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* TupleAndListFromArray */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { - PyObject *v; - Py_ssize_t i; - for (i = 0; i < length; i++) { - v = dest[i] = src[i]; - Py_INCREF(v); - } -} -static CYTHON_INLINE PyObject * -__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - Py_INCREF(__pyx_empty_tuple); - return __pyx_empty_tuple; - } - res = PyTuple_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); - return res; -} -static CYTHON_INLINE PyObject * -__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - return PyList_New(0); - } - res = PyList_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); - return res; -} -#endif - -/* BytesEquals */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else - if (s1 == s2) { - return (equals == Py_EQ); - } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { - const char *ps1, *ps2; - Py_ssize_t length = PyBytes_GET_SIZE(s1); - if (length != PyBytes_GET_SIZE(s2)) - return (equals == Py_NE); - ps1 = PyBytes_AS_STRING(s1); - ps2 = PyBytes_AS_STRING(s2); - if (ps1[0] != ps2[0]) { - return (equals == Py_NE); - } else if (length == 1) { - return (equals == Py_EQ); - } else { - int result; -#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) - Py_hash_t hash1, hash2; - hash1 = ((PyBytesObject*)s1)->ob_shash; - hash2 = ((PyBytesObject*)s2)->ob_shash; - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - return (equals == Py_NE); - } -#endif - result = memcmp(ps1, ps2, (size_t)length); - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { - return (equals == Py_NE); - } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { - return (equals == Py_NE); - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -#endif -} - -/* UnicodeEquals */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else -#if PY_MAJOR_VERSION < 3 - PyObject* owned_ref = NULL; -#endif - int s1_is_unicode, s2_is_unicode; - if (s1 == s2) { - goto return_eq; - } - s1_is_unicode = PyUnicode_CheckExact(s1); - s2_is_unicode = PyUnicode_CheckExact(s2); -#if PY_MAJOR_VERSION < 3 - if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { - owned_ref = PyUnicode_FromObject(s2); - if (unlikely(!owned_ref)) - return -1; - s2 = owned_ref; - s2_is_unicode = 1; - } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { - owned_ref = PyUnicode_FromObject(s1); - if (unlikely(!owned_ref)) - return -1; - s1 = owned_ref; - s1_is_unicode = 1; - } else if (((!s2_is_unicode) & (!s1_is_unicode))) { - return __Pyx_PyBytes_Equals(s1, s2, equals); - } -#endif - if (s1_is_unicode & s2_is_unicode) { - Py_ssize_t length; - int kind; - void *data1, *data2; - if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) - return -1; - length = __Pyx_PyUnicode_GET_LENGTH(s1); - if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { - goto return_ne; - } -#if CYTHON_USE_UNICODE_INTERNALS - { - Py_hash_t hash1, hash2; - #if CYTHON_PEP393_ENABLED - hash1 = ((PyASCIIObject*)s1)->hash; - hash2 = ((PyASCIIObject*)s2)->hash; - #else - hash1 = ((PyUnicodeObject*)s1)->hash; - hash2 = ((PyUnicodeObject*)s2)->hash; - #endif - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - goto return_ne; - } - } -#endif - kind = __Pyx_PyUnicode_KIND(s1); - if (kind != __Pyx_PyUnicode_KIND(s2)) { - goto return_ne; - } - data1 = __Pyx_PyUnicode_DATA(s1); - data2 = __Pyx_PyUnicode_DATA(s2); - if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { - goto return_ne; - } else if (length == 1) { - goto return_eq; - } else { - int result = memcmp(data1, data2, (size_t)(length * kind)); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & s2_is_unicode) { - goto return_ne; - } else if ((s2 == Py_None) & s1_is_unicode) { - goto return_ne; - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -return_eq: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ); -return_ne: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_NE); -#endif -} - -/* fastcall */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) -{ - Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); - for (i = 0; i < n; i++) - { - if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; - } - for (i = 0; i < n; i++) - { - int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); - if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; - return kwvalues[i]; - } - } - return NULL; -} -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { - Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); - PyObject *dict; - dict = PyDict_New(); - if (unlikely(!dict)) - return NULL; - for (i=0; i= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; - PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; - if (mm && mm->mp_subscript) { - PyObject *r, *key = PyInt_FromSsize_t(i); - if (unlikely(!key)) return NULL; - r = mm->mp_subscript(o, key); - Py_DECREF(key); - return r; - } - if (likely(sm && sm->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { - Py_ssize_t l = sm->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return sm->sq_item(o, i); - } - } -#else - if (is_list || !PyMapping_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* RaiseUnexpectedTypeError */ -static int -__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) -{ - __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, - expected, obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = Py_TYPE(func)->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* UnpackUnboundCMethod */ -static PyObject *__Pyx_SelflessCall(PyObject *method, PyObject *args, PyObject *kwargs) { - PyObject *result; - PyObject *selfless_args = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); - if (unlikely(!selfless_args)) return NULL; - result = PyObject_Call(method, selfless_args, kwargs); - Py_DECREF(selfless_args); - return result; -} -static PyMethodDef __Pyx_UnboundCMethod_Def = { - "CythonUnboundCMethod", - __PYX_REINTERPRET_FUNCION(PyCFunction, __Pyx_SelflessCall), - METH_VARARGS | METH_KEYWORDS, - NULL -}; -static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { - PyObject *method; - method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); - if (unlikely(!method)) - return -1; - target->method = method; -#if CYTHON_COMPILING_IN_CPYTHON - #if PY_MAJOR_VERSION >= 3 - if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) - #else - if (likely(!__Pyx_CyOrPyCFunction_Check(method))) - #endif - { - PyMethodDescrObject *descr = (PyMethodDescrObject*) method; - target->func = descr->d_method->ml_meth; - target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); - } else -#endif -#if CYTHON_COMPILING_IN_PYPY -#else - if (PyCFunction_Check(method)) -#endif - { - PyObject *self; - int self_found; -#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY - self = PyObject_GetAttrString(method, "__self__"); - if (!self) { - PyErr_Clear(); - } -#else - self = PyCFunction_GET_SELF(method); -#endif - self_found = (self && self != Py_None); -#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY - Py_XDECREF(self); -#endif - if (self_found) { - PyObject *unbound_method = PyCFunction_New(&__Pyx_UnboundCMethod_Def, method); - if (unlikely(!unbound_method)) return -1; - Py_DECREF(method); - target->method = unbound_method; - } - } - return 0; -} - -/* CallUnboundCMethod1 */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { - if (likely(cfunc->func)) { - int flag = cfunc->flag; - if (flag == METH_O) { - return (*(cfunc->func))(self, arg); - } else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) { - #if PY_VERSION_HEX >= 0x030700A0 - return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); - #else - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - #endif - } else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) { - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - } - } - return __Pyx__CallUnboundCMethod1(cfunc, self, arg); -} -#endif -static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ - PyObject *args, *result = NULL; - if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; -#if CYTHON_COMPILING_IN_CPYTHON - if (cfunc->func && (cfunc->flag & METH_VARARGS)) { - args = PyTuple_New(1); - if (unlikely(!args)) goto bad; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - if (cfunc->flag & METH_KEYWORDS) - result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); - else - result = (*cfunc->func)(self, args); - } else { - args = PyTuple_New(2); - if (unlikely(!args)) goto bad; - Py_INCREF(self); - PyTuple_SET_ITEM(args, 0, self); - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 1, arg); - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); - } -#else - args = PyTuple_Pack(2, self, arg); - if (unlikely(!args)) goto bad; - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); -#endif -bad: - Py_XDECREF(args); - return result; -} - -/* CallUnboundCMethod2 */ -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1 -static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) { - if (likely(cfunc->func)) { - PyObject *args[2] = {arg1, arg2}; - if (cfunc->flag == METH_FASTCALL) { - #if PY_VERSION_HEX >= 0x030700A0 - return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, args, 2); - #else - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); - #endif - } - #if PY_VERSION_HEX >= 0x030700A0 - if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS)) - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL); - #endif - } - return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2); -} -#endif -static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){ - PyObject *args, *result = NULL; - if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; -#if CYTHON_COMPILING_IN_CPYTHON - if (cfunc->func && (cfunc->flag & METH_VARARGS)) { - args = PyTuple_New(2); - if (unlikely(!args)) goto bad; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - if (cfunc->flag & METH_KEYWORDS) - result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); - else - result = (*cfunc->func)(self, args); - } else { - args = PyTuple_New(3); - if (unlikely(!args)) goto bad; - Py_INCREF(self); - PyTuple_SET_ITEM(args, 0, self); - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 1, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 2, arg2); - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); - } -#else - args = PyTuple_Pack(3, self, arg1, arg2); - if (unlikely(!args)) goto bad; - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); -#endif -bad: - Py_XDECREF(args); - return result; -} - -/* dict_getitem_default */ -static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { - PyObject* value; -#if PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) - value = PyDict_GetItemWithError(d, key); - if (unlikely(!value)) { - if (unlikely(PyErr_Occurred())) - return NULL; - value = default_value; - } - Py_INCREF(value); - if ((1)); -#else - if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { - value = PyDict_GetItem(d, key); - if (unlikely(!value)) { - value = default_value; - } - Py_INCREF(value); - } -#endif - else { - if (default_value == Py_None) - value = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_get, d, key); - else - value = __Pyx_CallUnboundCMethod2(&__pyx_umethod_PyDict_Type_get, d, key, default_value); - } - return value; -} - -/* PyUnicode_Unicode */ -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj) { - if (unlikely(obj == Py_None)) - obj = __pyx_kp_u_None; - return __Pyx_NewRef(obj); -} - -/* JoinPyUnicode */ -static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, - Py_UCS4 max_char) { -#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - PyObject *result_uval; - int result_ukind, kind_shift; - Py_ssize_t i, char_pos; - void *result_udata; - CYTHON_MAYBE_UNUSED_VAR(max_char); -#if CYTHON_PEP393_ENABLED - result_uval = PyUnicode_New(result_ulength, max_char); - if (unlikely(!result_uval)) return NULL; - result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; - kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; - result_udata = PyUnicode_DATA(result_uval); -#else - result_uval = PyUnicode_FromUnicode(NULL, result_ulength); - if (unlikely(!result_uval)) return NULL; - result_ukind = sizeof(Py_UNICODE); - kind_shift = (result_ukind == 4) ? 2 : result_ukind - 1; - result_udata = PyUnicode_AS_UNICODE(result_uval); -#endif - assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); - char_pos = 0; - for (i=0; i < value_count; i++) { - int ukind; - Py_ssize_t ulength; - void *udata; - PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); - if (unlikely(__Pyx_PyUnicode_READY(uval))) - goto bad; - ulength = __Pyx_PyUnicode_GET_LENGTH(uval); - if (unlikely(!ulength)) - continue; - if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) - goto overflow; - ukind = __Pyx_PyUnicode_KIND(uval); - udata = __Pyx_PyUnicode_DATA(uval); - if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { - memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); - } else { - #if PY_VERSION_HEX >= 0x030d0000 - if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; - #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) - _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); - #else - Py_ssize_t j; - for (j=0; j < ulength; j++) { - Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); - __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); - } - #endif - } - char_pos += ulength; - } - return result_uval; -overflow: - PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); -bad: - Py_DECREF(result_uval); - return NULL; -#else - CYTHON_UNUSED_VAR(max_char); - CYTHON_UNUSED_VAR(result_ulength); - CYTHON_UNUSED_VAR(value_count); - return PyUnicode_Join(__pyx_empty_unicode, value_tuple); -#endif -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* KeywordStringCheck */ -static int __Pyx_CheckKeywordStrings( - PyObject *kw, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; -#if CYTHON_COMPILING_IN_PYPY - if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) - goto invalid_keyword; - return 1; -#else - if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { - Py_ssize_t kwsize; -#if CYTHON_ASSUME_SAFE_MACROS - kwsize = PyTuple_GET_SIZE(kw); -#else - kwsize = PyTuple_Size(kw); - if (kwsize < 0) return 0; -#endif - if (unlikely(kwsize == 0)) - return 1; - if (!kw_allowed) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, 0); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - goto invalid_keyword; - } -#if PY_VERSION_HEX < 0x03090000 - for (pos = 0; pos < kwsize; pos++) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, pos); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } -#endif - return 1; - } - while (PyDict_Next(kw, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_Check(key))) - #endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } - if (!kw_allowed && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - return 0; -#endif -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - -/* GetAttr3 */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static PyObject *__Pyx_GetAttr3Default(PyObject *d) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - return NULL; - __Pyx_PyErr_Clear(); - Py_INCREF(d); - return d; -} -#endif -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - int res = PyObject_GetOptionalAttr(o, n, &r); - return (res != 0) ? r : __Pyx_NewRef(d); -#else - #if CYTHON_USE_TYPE_SLOTS - if (likely(PyString_Check(n))) { - r = __Pyx_PyObject_GetAttrStrNoError(o, n); - if (unlikely(!r) && likely(!PyErr_Occurred())) { - r = __Pyx_NewRef(d); - } - return r; - } - #endif - r = PyObject_GetAttr(o, n); - return (likely(r)) ? r : __Pyx_GetAttr3Default(d); -#endif -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#elif CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(!__pyx_m)) { - return NULL; - } - result = PyObject_GetAttr(__pyx_m, name); - if (likely(result)) { - return result; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); - while (1) { - Py_XDECREF(key); key = NULL; - Py_XDECREF(value); value = NULL; - if (kwds_is_tuple) { - Py_ssize_t size; -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(kwds); -#else - size = PyTuple_Size(kwds); - if (size < 0) goto bad; -#endif - if (pos >= size) break; -#if CYTHON_AVOID_BORROWED_REFS - key = __Pyx_PySequence_ITEM(kwds, pos); - if (!key) goto bad; -#elif CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kwds, pos); -#else - key = PyTuple_GetItem(kwds, pos); - if (!key) goto bad; -#endif - value = kwvalues[pos]; - pos++; - } - else - { - if (!PyDict_Next(kwds, &pos, &key, &value)) break; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - } - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); - Py_DECREF(key); -#endif - key = NULL; - value = NULL; - continue; - } -#if !CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - Py_INCREF(value); - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = ( - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key) - ); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - Py_XDECREF(key); - Py_XDECREF(value); - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - Py_XDECREF(key); - Py_XDECREF(value); - return -1; -} - -/* DictGetItem */ -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY -static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { - PyObject *value; - value = PyDict_GetItemWithError(d, key); - if (unlikely(!value)) { - if (!PyErr_Occurred()) { - if (unlikely(PyTuple_Check(key))) { - PyObject* args = PyTuple_Pack(1, key); - if (likely(args)) { - PyErr_SetObject(PyExc_KeyError, args); - Py_DECREF(args); - } - } else { - PyErr_SetObject(PyExc_KeyError, key); - } - } - return NULL; - } - Py_INCREF(value); - return value; -} -#endif - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - __Pyx_PyThreadState_declare - CYTHON_UNUSED_VAR(cause); - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { - #if PY_VERSION_HEX >= 0x030C00A6 - PyException_SetTraceback(value, tb); - #elif CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *module = 0; - PyObject *empty_dict = 0; - PyObject *empty_list = 0; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (unlikely(!py_import)) - goto bad; - if (!from_list) { - empty_list = PyList_New(0); - if (unlikely(!empty_list)) - goto bad; - from_list = empty_list; - } - #endif - empty_dict = PyDict_New(); - if (unlikely(!empty_dict)) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, 1); - if (unlikely(!module)) { - if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (unlikely(!py_level)) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, level); - #endif - } - } -bad: - Py_XDECREF(empty_dict); - Py_XDECREF(empty_list); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - const char* module_name_str = 0; - PyObject* module_name = 0; - PyObject* module_dot = 0; - PyObject* full_name = 0; - PyErr_Clear(); - module_name_str = PyModule_GetName(module); - if (unlikely(!module_name_str)) { goto modbad; } - module_name = PyUnicode_FromString(module_name_str); - if (unlikely(!module_name)) { goto modbad; } - module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__4); - if (unlikely(!module_dot)) { goto modbad; } - full_name = PyUnicode_Concat(module_dot, name); - if (unlikely(!full_name)) { goto modbad; } - #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) - { - PyObject *modules = PyImport_GetModuleDict(); - if (unlikely(!modules)) - goto modbad; - value = PyObject_GetItem(modules, full_name); - } - #else - value = PyImport_GetModule(full_name); - #endif - modbad: - Py_XDECREF(full_name); - Py_XDECREF(module_dot); - Py_XDECREF(module_name); - } - if (unlikely(!value)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { - return NULL; - } - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { - return NULL; - } - #endif - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); - self = __Pyx_CyOrPyCFunction_GET_SELF(func); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectFastCall */ -#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API -static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { - PyObject *argstuple; - PyObject *result = 0; - size_t i; - argstuple = PyTuple_New((Py_ssize_t)nargs); - if (unlikely(!argstuple)) return NULL; - for (i = 0; i < nargs; i++) { - Py_INCREF(args[i]); - if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; - } - result = __Pyx_PyObject_Call(func, argstuple, kwargs); - bad: - Py_DECREF(argstuple); - return result; -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { - Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); -#if CYTHON_COMPILING_IN_CPYTHON - if (nargs == 0 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) - return __Pyx_PyObject_CallMethO(func, NULL); - } - else if (nargs == 1 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) - return __Pyx_PyObject_CallMethO(func, args[0]); - } -#endif - #if PY_VERSION_HEX < 0x030800B1 - #if CYTHON_FAST_PYCCALL - if (PyCFunction_Check(func)) { - if (kwargs) { - return _PyCFunction_FastCallDict(func, args, nargs, kwargs); - } else { - return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); - } - } - #if PY_VERSION_HEX >= 0x030700A1 - if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { - return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); - } - #endif - #endif - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); - } - #endif - #endif - if (kwargs == NULL) { - #if CYTHON_VECTORCALL - #if PY_VERSION_HEX < 0x03090000 - vectorcallfunc f = _PyVectorcall_Function(func); - #else - vectorcallfunc f = PyVectorcall_Function(func); - #endif - if (f) { - return f(func, args, (size_t)nargs, NULL); - } - #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL - if (__Pyx_CyFunction_CheckExact(func)) { - __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); - if (f) return f(func, args, (size_t)nargs, NULL); - } - #endif - } - if (nargs == 0) { - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); - } - #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API - return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); - #else - return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); - #endif -} - -/* GetAttr */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { -#if CYTHON_USE_TYPE_SLOTS -#if PY_MAJOR_VERSION >= 3 - if (likely(PyUnicode_Check(n))) -#else - if (likely(PyString_Check(n))) -#endif - return __Pyx_PyObject_GetAttrStr(o, n); -#endif - return PyObject_GetAttr(o, n); -} - -/* HasAttr */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { - PyObject *r; - if (unlikely(!__Pyx_PyBaseString_Check(n))) { - PyErr_SetString(PyExc_TypeError, - "hasattr(): attribute name must be string"); - return -1; - } - r = __Pyx_GetAttr(o, n); - if (!r) { - PyErr_Clear(); - return 0; - } else { - Py_DECREF(r); - return 1; - } -} - -/* FixUpExtensionType */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { -#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - CYTHON_UNUSED_VAR(spec); - CYTHON_UNUSED_VAR(type); -#else - const PyType_Slot *slot = spec->slots; - while (slot && slot->slot && slot->slot != Py_tp_members) - slot++; - if (slot && slot->slot == Py_tp_members) { - int changed = 0; -#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) - const -#endif - PyMemberDef *memb = (PyMemberDef*) slot->pfunc; - while (memb && memb->name) { - if (memb->name[0] == '_' && memb->name[1] == '_') { -#if PY_VERSION_HEX < 0x030900b1 - if (strcmp(memb->name, "__weaklistoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_weaklistoffset = memb->offset; - changed = 1; - } - else if (strcmp(memb->name, "__dictoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_dictoffset = memb->offset; - changed = 1; - } -#if CYTHON_METH_FASTCALL - else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); -#if PY_VERSION_HEX >= 0x030800b4 - type->tp_vectorcall_offset = memb->offset; -#else - type->tp_print = (printfunc) memb->offset; -#endif - changed = 1; - } -#endif -#else - if ((0)); -#endif -#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON - else if (strcmp(memb->name, "__module__") == 0) { - PyObject *descr; - assert(memb->type == T_OBJECT); - assert(memb->flags == 0 || memb->flags == READONLY); - descr = PyDescr_NewMember(type, memb); - if (unlikely(!descr)) - return -1; - if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { - Py_DECREF(descr); - return -1; - } - Py_DECREF(descr); - changed = 1; - } -#endif - } - memb++; - } - if (changed) - PyType_Modified(type); - } -#endif - return 0; -} -#endif - -/* FormatTypeName */ -#if CYTHON_COMPILING_IN_LIMITED_API -static __Pyx_TypeName -__Pyx_PyType_GetName(PyTypeObject* tp) -{ - PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, - __pyx_n_s_name); - if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { - PyErr_Clear(); - Py_XDECREF(name); - name = __Pyx_NewRef(__pyx_n_s__5); - } - return name; -} -#endif - -/* ValidateExternBase */ -static int __Pyx_validate_extern_base(PyTypeObject *base) { - Py_ssize_t itemsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_itemsize; -#endif -#if !CYTHON_COMPILING_IN_LIMITED_API - itemsize = ((PyTypeObject *)base)->tp_itemsize; -#else - py_itemsize = PyObject_GetAttrString((PyObject*)base, "__itemsize__"); - if (!py_itemsize) - return -1; - itemsize = PyLong_AsSsize_t(py_itemsize); - Py_DECREF(py_itemsize); - py_itemsize = 0; - if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) - return -1; -#endif - if (itemsize) { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(base); - PyErr_Format(PyExc_TypeError, - "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name); - __Pyx_DECREF_TypeName(b_name); - return -1; - } - return 0; -} - -/* PyObjectCallNoArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { - PyObject *arg[2] = {NULL, NULL}; - return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* PyObjectCallOneArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *args[2] = {NULL, arg}; - return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - __Pyx_TypeName type_name; - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR - if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) -#elif PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (likely(descr != NULL)) { - *method = descr; - return 0; - } - type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod0 */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { - PyObject *method = NULL, *result = NULL; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_CallOneArg(method, obj); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) goto bad; - result = __Pyx_PyObject_CallNoArg(method); - Py_DECREF(method); -bad: - return result; -} - -/* ValidateBasesTuple */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { - Py_ssize_t i, n; -#if CYTHON_ASSUME_SAFE_MACROS - n = PyTuple_GET_SIZE(bases); -#else - n = PyTuple_Size(bases); - if (n < 0) return -1; -#endif - for (i = 1; i < n; i++) - { -#if CYTHON_AVOID_BORROWED_REFS - PyObject *b0 = PySequence_GetItem(bases, i); - if (!b0) return -1; -#elif CYTHON_ASSUME_SAFE_MACROS - PyObject *b0 = PyTuple_GET_ITEM(bases, i); -#else - PyObject *b0 = PyTuple_GetItem(bases, i); - if (!b0) return -1; -#endif - PyTypeObject *b; -#if PY_MAJOR_VERSION < 3 - if (PyClass_Check(b0)) - { - PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", - PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } -#endif - b = (PyTypeObject*) b0; - if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); - __Pyx_DECREF_TypeName(b_name); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - if (dictoffset == 0) - { - Py_ssize_t b_dictoffset = 0; -#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY - b_dictoffset = b->tp_dictoffset; -#else - PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); - if (!py_b_dictoffset) goto dictoffset_return; - b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); - Py_DECREF(py_b_dictoffset); - if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; -#endif - if (b_dictoffset) { - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "extension type '%.200s' has no __dict__ slot, " - "but base type '" __Pyx_FMT_TYPENAME "' has: " - "either add 'cdef dict __dict__' to the extension type " - "or add '__slots__ = [...]' to the base type", - type_name, b_name); - __Pyx_DECREF_TypeName(b_name); - } -#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) - dictoffset_return: -#endif -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - } -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - } - return 0; -} -#endif - -/* PyType_Ready */ -static int __Pyx_PyType_Ready(PyTypeObject *t) { -#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) - (void)__Pyx_PyObject_CallMethod0; -#if CYTHON_USE_TYPE_SPECS - (void)__Pyx_validate_bases_tuple; -#endif - return PyType_Ready(t); -#else - int r; - PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); - if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) - return -1; -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - { - int gc_was_enabled; - #if PY_VERSION_HEX >= 0x030A00b1 - gc_was_enabled = PyGC_Disable(); - (void)__Pyx_PyObject_CallMethod0; - #else - PyObject *ret, *py_status; - PyObject *gc = NULL; - #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) - gc = PyImport_GetModule(__pyx_kp_u_gc); - #endif - if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); - if (unlikely(!gc)) return -1; - py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); - if (unlikely(!py_status)) { - Py_DECREF(gc); - return -1; - } - gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); - Py_DECREF(py_status); - if (gc_was_enabled > 0) { - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); - if (unlikely(!ret)) { - Py_DECREF(gc); - return -1; - } - Py_DECREF(ret); - } else if (unlikely(gc_was_enabled == -1)) { - Py_DECREF(gc); - return -1; - } - #endif - t->tp_flags |= Py_TPFLAGS_HEAPTYPE; -#if PY_VERSION_HEX >= 0x030A0000 - t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; -#endif -#else - (void)__Pyx_PyObject_CallMethod0; -#endif - r = PyType_Ready(t); -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; - #if PY_VERSION_HEX >= 0x030A00b1 - if (gc_was_enabled) - PyGC_Enable(); - #else - if (gc_was_enabled) { - PyObject *tp, *v, *tb; - PyErr_Fetch(&tp, &v, &tb); - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); - if (likely(ret || r == -1)) { - Py_XDECREF(ret); - PyErr_Restore(tp, v, tb); - } else { - Py_XDECREF(tp); - Py_XDECREF(v); - Py_XDECREF(tb); - r = -1; - } - } - Py_DECREF(gc); - #endif - } -#endif - return r; -#endif -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, attr_name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(attr_name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* SetupReduce */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStrNoError(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_getstate = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; - PyObject *getstate = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); -#else - getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); - if (!getstate && PyErr_Occurred()) { - goto __PYX_BAD; - } -#endif - if (getstate) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); -#else - object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); - if (!object_getstate && PyErr_Occurred()) { - goto __PYX_BAD; - } -#endif - if (object_getstate != getstate) { - goto __PYX_GOOD; - } - } -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) { - __Pyx_TypeName type_obj_name = - __Pyx_PyType_GetName((PyTypeObject*)type_obj); - PyErr_Format(PyExc_RuntimeError, - "Unable to initialize pickling for " __Pyx_FMT_TYPENAME, type_obj_name); - __Pyx_DECREF_TypeName(type_obj_name); - } - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); - Py_XDECREF(object_getstate); - Py_XDECREF(getstate); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} -#endif - -/* SetVTable */ -static int __Pyx_SetVtable(PyTypeObject *type, void *vtable) { - PyObject *ob = PyCapsule_New(vtable, 0, 0); - if (unlikely(!ob)) - goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(PyObject_SetAttr((PyObject *) type, __pyx_n_s_pyx_vtable, ob) < 0)) -#else - if (unlikely(PyDict_SetItem(type->tp_dict, __pyx_n_s_pyx_vtable, ob) < 0)) -#endif - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -/* GetVTable */ -static void* __Pyx_GetVtable(PyTypeObject *type) { - void* ptr; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *ob = PyObject_GetAttr((PyObject *)type, __pyx_n_s_pyx_vtable); -#else - PyObject *ob = PyObject_GetItem(type->tp_dict, __pyx_n_s_pyx_vtable); -#endif - if (!ob) - goto bad; - ptr = PyCapsule_GetPointer(ob, 0); - if (!ptr && !PyErr_Occurred()) - PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); - Py_DECREF(ob); - return ptr; -bad: - Py_XDECREF(ob); - return NULL; -} - -/* MergeVTables */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_MergeVtables(PyTypeObject *type) { - int i; - void** base_vtables; - __Pyx_TypeName tp_base_name; - __Pyx_TypeName base_name; - void* unknown = (void*)-1; - PyObject* bases = type->tp_bases; - int base_depth = 0; - { - PyTypeObject* base = type->tp_base; - while (base) { - base_depth += 1; - base = base->tp_base; - } - } - base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1)); - base_vtables[0] = unknown; - for (i = 1; i < PyTuple_GET_SIZE(bases); i++) { - void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))); - if (base_vtable != NULL) { - int j; - PyTypeObject* base = type->tp_base; - for (j = 0; j < base_depth; j++) { - if (base_vtables[j] == unknown) { - base_vtables[j] = __Pyx_GetVtable(base); - base_vtables[j + 1] = unknown; - } - if (base_vtables[j] == base_vtable) { - break; - } else if (base_vtables[j] == NULL) { - goto bad; - } - base = base->tp_base; - } - } - } - PyErr_Clear(); - free(base_vtables); - return 0; -bad: - tp_base_name = __Pyx_PyType_GetName(type->tp_base); - base_name = __Pyx_PyType_GetName((PyTypeObject*)PyTuple_GET_ITEM(bases, i)); - PyErr_Format(PyExc_TypeError, - "multiple bases have vtable conflict: '" __Pyx_FMT_TYPENAME "' and '" __Pyx_FMT_TYPENAME "'", tp_base_name, base_name); - __Pyx_DECREF_TypeName(tp_base_name); - __Pyx_DECREF_TypeName(base_name); - free(base_vtables); - return -1; -} -#endif - -/* FetchSharedCythonModule */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void) { - return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); -} - -/* FetchCommonType */ -static int __Pyx_VerifyCachedType(PyObject *cached_type, - const char *name, - Py_ssize_t basicsize, - Py_ssize_t expected_basicsize) { - if (!PyType_Check(cached_type)) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s is not a type object", name); - return -1; - } - if (basicsize != expected_basicsize) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s has the wrong size, try recompiling", - name); - return -1; - } - return 0; -} -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { - PyObject* abi_module; - const char* object_name; - PyTypeObject *cached_type = NULL; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - object_name = strrchr(type->tp_name, '.'); - object_name = object_name ? object_name+1 : type->tp_name; - cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - if (__Pyx_VerifyCachedType( - (PyObject *)cached_type, - object_name, - cached_type->tp_basicsize, - type->tp_basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - if (PyType_Ready(type) < 0) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) - goto bad; - Py_INCREF(type); - cached_type = type; -done: - Py_DECREF(abi_module); - return cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#else -static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { - PyObject *abi_module, *cached_type = NULL; - const char* object_name = strrchr(spec->name, '.'); - object_name = object_name ? object_name+1 : spec->name; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - cached_type = PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - Py_ssize_t basicsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_basicsize; - py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); - if (unlikely(!py_basicsize)) goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; -#else - basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; -#endif - if (__Pyx_VerifyCachedType( - cached_type, - object_name, - basicsize, - spec->basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - CYTHON_UNUSED_VAR(module); - cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); - if (unlikely(!cached_type)) goto bad; - if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; -done: - Py_DECREF(abi_module); - assert(cached_type == NULL || PyType_Check(cached_type)); - return (PyTypeObject *) cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#endif - -/* PyVectorcallFastCallDict */ -#if CYTHON_METH_FASTCALL -static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - PyObject *res = NULL; - PyObject *kwnames; - PyObject **newargs; - PyObject **kwvalues; - Py_ssize_t i, pos; - size_t j; - PyObject *key, *value; - unsigned long keys_are_strings; - Py_ssize_t nkw = PyDict_GET_SIZE(kw); - newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); - if (unlikely(newargs == NULL)) { - PyErr_NoMemory(); - return NULL; - } - for (j = 0; j < nargs; j++) newargs[j] = args[j]; - kwnames = PyTuple_New(nkw); - if (unlikely(kwnames == NULL)) { - PyMem_Free(newargs); - return NULL; - } - kwvalues = newargs + nargs; - pos = i = 0; - keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; - while (PyDict_Next(kw, &pos, &key, &value)) { - keys_are_strings &= Py_TYPE(key)->tp_flags; - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(kwnames, i, key); - kwvalues[i] = value; - i++; - } - if (unlikely(!keys_are_strings)) { - PyErr_SetString(PyExc_TypeError, "keywords must be strings"); - goto cleanup; - } - res = vc(func, newargs, nargs, kwnames); -cleanup: - Py_DECREF(kwnames); - for (i = 0; i < nkw; i++) - Py_DECREF(kwvalues[i]); - PyMem_Free(newargs); - return res; -} -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { - return vc(func, args, nargs, NULL); - } - return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); -} -#endif - -/* CythonFunctionShared */ -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - if (__Pyx_CyFunction_Check(func)) { - return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; - } else if (PyCFunction_Check(func)) { - return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; - } - return 0; -} -#else -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -} -#endif -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - __Pyx_Py_XDECREF_SET( - __Pyx_CyFunction_GetClassObj(f), - ((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#else - __Pyx_Py_XDECREF_SET( - ((PyCMethodObject *) (f))->mm_class, - (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#endif -} -static PyObject * -__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) -{ - CYTHON_UNUSED_VAR(closure); - if (unlikely(op->func_doc == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); - if (unlikely(!op->func_doc)) return NULL; -#else - if (((PyCFunctionObject*)op)->m_ml->ml_doc) { -#if PY_MAJOR_VERSION >= 3 - op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#else - op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#endif - if (unlikely(op->func_doc == NULL)) - return NULL; - } else { - Py_INCREF(Py_None); - return Py_None; - } -#endif - } - Py_INCREF(op->func_doc); - return op->func_doc; -} -static int -__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (value == NULL) { - value = Py_None; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_doc, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_name == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_name = PyObject_GetAttrString(op->func, "__name__"); -#elif PY_MAJOR_VERSION >= 3 - op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#else - op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#endif - if (unlikely(op->func_name == NULL)) - return NULL; - } - Py_INCREF(op->func_name); - return op->func_name; -} -static int -__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__name__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_name, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_qualname); - return op->func_qualname; -} -static int -__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__qualname__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_qualname, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_dict == NULL)) { - op->func_dict = PyDict_New(); - if (unlikely(op->func_dict == NULL)) - return NULL; - } - Py_INCREF(op->func_dict); - return op->func_dict; -} -static int -__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(value == NULL)) { - PyErr_SetString(PyExc_TypeError, - "function's dictionary may not be deleted"); - return -1; - } - if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "setting function's dictionary to a non-dict"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_dict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_globals); - return op->func_globals; -} -static PyObject * -__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(op); - CYTHON_UNUSED_VAR(context); - Py_INCREF(Py_None); - return Py_None; -} -static PyObject * -__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) -{ - PyObject* result = (op->func_code) ? op->func_code : Py_None; - CYTHON_UNUSED_VAR(context); - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { - int result = 0; - PyObject *res = op->defaults_getter((PyObject *) op); - if (unlikely(!res)) - return -1; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - op->defaults_tuple = PyTuple_GET_ITEM(res, 0); - Py_INCREF(op->defaults_tuple); - op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); - Py_INCREF(op->defaults_kwdict); - #else - op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); - if (unlikely(!op->defaults_tuple)) result = -1; - else { - op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); - if (unlikely(!op->defaults_kwdict)) result = -1; - } - #endif - Py_DECREF(res); - return result; -} -static int -__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__defaults__ must be set to a tuple object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_tuple; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_tuple; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__kwdefaults__ must be set to a dict object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_kwdict; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_kwdict; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value || value == Py_None) { - value = NULL; - } else if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__annotations__ must be set to a dict object"); - return -1; - } - Py_XINCREF(value); - __Pyx_Py_XDECREF_SET(op->func_annotations, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->func_annotations; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - result = PyDict_New(); - if (unlikely(!result)) return NULL; - op->func_annotations = result; - } - Py_INCREF(result); - return result; -} -static PyObject * -__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { - int is_coroutine; - CYTHON_UNUSED_VAR(context); - if (op->func_is_coroutine) { - return __Pyx_NewRef(op->func_is_coroutine); - } - is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; -#if PY_VERSION_HEX >= 0x03050000 - if (is_coroutine) { - PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; - fromlist = PyList_New(1); - if (unlikely(!fromlist)) return NULL; - Py_INCREF(marker); -#if CYTHON_ASSUME_SAFE_MACROS - PyList_SET_ITEM(fromlist, 0, marker); -#else - if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { - Py_DECREF(marker); - Py_DECREF(fromlist); - return NULL; - } -#endif - module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); - Py_DECREF(fromlist); - if (unlikely(!module)) goto ignore; - op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); - Py_DECREF(module); - if (likely(op->func_is_coroutine)) { - return __Pyx_NewRef(op->func_is_coroutine); - } -ignore: - PyErr_Clear(); - } -#endif - op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); - return __Pyx_NewRef(op->func_is_coroutine); -} -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject * -__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_GetAttrString(op->func, "__module__"); -} -static int -__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_SetAttrString(op->func, "__module__", value); -} -#endif -static PyGetSetDef __pyx_CyFunction_getsets[] = { - {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, - {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, - {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, - {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, -#if CYTHON_COMPILING_IN_LIMITED_API - {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, -#endif - {0, 0, 0, 0, 0} -}; -static PyMemberDef __pyx_CyFunction_members[] = { -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, -#endif -#if CYTHON_USE_TYPE_SPECS - {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, -#if CYTHON_METH_FASTCALL -#if CYTHON_BACKPORT_VECTORCALL - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, -#else -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, -#endif -#endif -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, -#else - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, -#endif -#endif - {0, 0, 0, 0, 0} -}; -static PyObject * -__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) -{ - CYTHON_UNUSED_VAR(args); -#if PY_MAJOR_VERSION >= 3 - Py_INCREF(m->func_qualname); - return m->func_qualname; -#else - return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); -#endif -} -static PyMethodDef __pyx_CyFunction_methods[] = { - {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, - {0, 0, 0, 0} -}; -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) -#else -#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) -#endif -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { -#if !CYTHON_COMPILING_IN_LIMITED_API - PyCFunctionObject *cf = (PyCFunctionObject*) op; -#endif - if (unlikely(op == NULL)) - return NULL; -#if CYTHON_COMPILING_IN_LIMITED_API - op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); - if (unlikely(!op->func)) return NULL; -#endif - op->flags = flags; - __Pyx_CyFunction_weakreflist(op) = NULL; -#if !CYTHON_COMPILING_IN_LIMITED_API - cf->m_ml = ml; - cf->m_self = (PyObject *) op; -#endif - Py_XINCREF(closure); - op->func_closure = closure; -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_XINCREF(module); - cf->m_module = module; -#endif - op->func_dict = NULL; - op->func_name = NULL; - Py_INCREF(qualname); - op->func_qualname = qualname; - op->func_doc = NULL; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - op->func_classobj = NULL; -#else - ((PyCMethodObject*)op)->mm_class = NULL; -#endif - op->func_globals = globals; - Py_INCREF(op->func_globals); - Py_XINCREF(code); - op->func_code = code; - op->defaults_pyobjects = 0; - op->defaults_size = 0; - op->defaults = NULL; - op->defaults_tuple = NULL; - op->defaults_kwdict = NULL; - op->defaults_getter = NULL; - op->func_annotations = NULL; - op->func_is_coroutine = NULL; -#if CYTHON_METH_FASTCALL - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { - case METH_NOARGS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; - break; - case METH_O: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; - break; - case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; - break; - case METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; - break; - case METH_VARARGS | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = NULL; - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - Py_DECREF(op); - return NULL; - } -#endif - return (PyObject *) op; -} -static int -__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) -{ - Py_CLEAR(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_CLEAR(m->func); -#else - Py_CLEAR(((PyCFunctionObject*)m)->m_module); -#endif - Py_CLEAR(m->func_dict); - Py_CLEAR(m->func_name); - Py_CLEAR(m->func_qualname); - Py_CLEAR(m->func_doc); - Py_CLEAR(m->func_globals); - Py_CLEAR(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API -#if PY_VERSION_HEX < 0x030900B1 - Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); -#else - { - PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; - ((PyCMethodObject *) (m))->mm_class = NULL; - Py_XDECREF(cls); - } -#endif -#endif - Py_CLEAR(m->defaults_tuple); - Py_CLEAR(m->defaults_kwdict); - Py_CLEAR(m->func_annotations); - Py_CLEAR(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_XDECREF(pydefaults[i]); - PyObject_Free(m->defaults); - m->defaults = NULL; - } - return 0; -} -static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - if (__Pyx_CyFunction_weakreflist(m) != NULL) - PyObject_ClearWeakRefs((PyObject *) m); - __Pyx_CyFunction_clear(m); - __Pyx_PyHeapTypeObject_GC_Del(m); -} -static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - PyObject_GC_UnTrack(m); - __Pyx__CyFunction_dealloc(m); -} -static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) -{ - Py_VISIT(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(m->func); -#else - Py_VISIT(((PyCFunctionObject*)m)->m_module); -#endif - Py_VISIT(m->func_dict); - Py_VISIT(m->func_name); - Py_VISIT(m->func_qualname); - Py_VISIT(m->func_doc); - Py_VISIT(m->func_globals); - Py_VISIT(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); -#endif - Py_VISIT(m->defaults_tuple); - Py_VISIT(m->defaults_kwdict); - Py_VISIT(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_VISIT(pydefaults[i]); - } - return 0; -} -static PyObject* -__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) -{ -#if PY_MAJOR_VERSION >= 3 - return PyUnicode_FromFormat("", - op->func_qualname, (void *)op); -#else - return PyString_FromFormat("", - PyString_AsString(op->func_qualname), (void *)op); -#endif -} -static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *f = ((__pyx_CyFunctionObject*)func)->func; - PyObject *py_name = NULL; - PyCFunction meth; - int flags; - meth = PyCFunction_GetFunction(f); - if (unlikely(!meth)) return NULL; - flags = PyCFunction_GetFlags(f); - if (unlikely(flags < 0)) return NULL; -#else - PyCFunctionObject* f = (PyCFunctionObject*)func; - PyCFunction meth = f->m_ml->ml_meth; - int flags = f->m_ml->ml_flags; -#endif - Py_ssize_t size; - switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { - case METH_VARARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) - return (*meth)(self, arg); - break; - case METH_VARARGS | METH_KEYWORDS: - return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); - case METH_NOARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 0)) - return (*meth)(self, NULL); -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - case METH_O: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 1)) { - PyObject *result, *arg0; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - arg0 = PyTuple_GET_ITEM(arg, 0); - #else - arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; - #endif - result = (*meth)(self, arg0); - #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) - Py_DECREF(arg0); - #endif - return result; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - return NULL; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", - py_name); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", - f->m_ml->ml_name); -#endif - return NULL; -} -static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *self, *result; -#if CYTHON_COMPILING_IN_LIMITED_API - self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); - if (unlikely(!self) && PyErr_Occurred()) return NULL; -#else - self = ((PyCFunctionObject*)func)->m_self; -#endif - result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); - return result; -} -static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { - PyObject *result; - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; -#if CYTHON_METH_FASTCALL - __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); - if (vc) { -#if CYTHON_ASSUME_SAFE_MACROS - return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); -#else - (void) &__Pyx_PyVectorcall_FastCallDict; - return PyVectorcall_Call(func, args, kw); -#endif - } -#endif - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - Py_ssize_t argc; - PyObject *new_args; - PyObject *self; -#if CYTHON_ASSUME_SAFE_MACROS - argc = PyTuple_GET_SIZE(args); -#else - argc = PyTuple_Size(args); - if (unlikely(!argc) < 0) return NULL; -#endif - new_args = PyTuple_GetSlice(args, 1, argc); - if (unlikely(!new_args)) - return NULL; - self = PyTuple_GetItem(args, 0); - if (unlikely(!self)) { - Py_DECREF(new_args); -#if PY_MAJOR_VERSION > 2 - PyErr_Format(PyExc_TypeError, - "unbound method %.200S() needs an argument", - cyfunc->func_qualname); -#else - PyErr_SetString(PyExc_TypeError, - "unbound method needs an argument"); -#endif - return NULL; - } - result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); - Py_DECREF(new_args); - } else { - result = __Pyx_CyFunction_Call(func, args, kw); - } - return result; -} -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) -{ - int ret = 0; - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - if (unlikely(nargs < 1)) { - PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", - ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - ret = 1; - } - if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - return ret; -} -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 0)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, NULL); -} -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 1)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, args[0]); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; - PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); -} -#endif -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_CyFunctionType_slots[] = { - {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, - {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, - {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, - {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, - {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, - {Py_tp_methods, (void *)__pyx_CyFunction_methods}, - {Py_tp_members, (void *)__pyx_CyFunction_members}, - {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, - {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, - {0, 0}, -}; -static PyType_Spec __pyx_CyFunctionType_spec = { - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - __pyx_CyFunctionType_slots -}; -#else -static PyTypeObject __pyx_CyFunctionType_type = { - PyVarObject_HEAD_INIT(0, 0) - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, - (destructor) __Pyx_CyFunction_dealloc, -#if !CYTHON_METH_FASTCALL - 0, -#elif CYTHON_BACKPORT_VECTORCALL - (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), -#else - offsetof(PyCFunctionObject, vectorcall), -#endif - 0, - 0, -#if PY_MAJOR_VERSION < 3 - 0, -#else - 0, -#endif - (reprfunc) __Pyx_CyFunction_repr, - 0, - 0, - 0, - 0, - __Pyx_CyFunction_CallAsMethod, - 0, - 0, - 0, - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - 0, - (traverseproc) __Pyx_CyFunction_traverse, - (inquiry) __Pyx_CyFunction_clear, - 0, -#if PY_VERSION_HEX < 0x030500A0 - offsetof(__pyx_CyFunctionObject, func_weakreflist), -#else - offsetof(PyCFunctionObject, m_weakreflist), -#endif - 0, - 0, - __pyx_CyFunction_methods, - __pyx_CyFunction_members, - __pyx_CyFunction_getsets, - 0, - 0, - __Pyx_PyMethod_New, - 0, - offsetof(__pyx_CyFunctionObject, func_dict), - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -#if PY_VERSION_HEX >= 0x030400a1 - 0, -#endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, -#endif -#if __PYX_NEED_TP_PRINT_SLOT - 0, -#endif -#if PY_VERSION_HEX >= 0x030C0000 - 0, -#endif -#if PY_VERSION_HEX >= 0x030d00A4 - 0, -#endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, -#endif -}; -#endif -static int __pyx_CyFunction_init(PyObject *module) { -#if CYTHON_USE_TYPE_SPECS - __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); -#else - CYTHON_UNUSED_VAR(module); - __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); -#endif - if (unlikely(__pyx_CyFunctionType == NULL)) { - return -1; - } - return 0; -} -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults = PyObject_Malloc(size); - if (unlikely(!m->defaults)) - return PyErr_NoMemory(); - memset(m->defaults, 0, size); - m->defaults_pyobjects = pyobjects; - m->defaults_size = size; - return m->defaults; -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_tuple = tuple; - Py_INCREF(tuple); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_kwdict = dict; - Py_INCREF(dict); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->func_annotations = dict; - Py_INCREF(dict); -} - -/* CythonFunction */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { - PyObject *op = __Pyx_CyFunction_Init( - PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), - ml, flags, qualname, closure, module, globals, code - ); - if (likely(op)) { - PyObject_GC_Track(op); - } - return op; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - CYTHON_MAYBE_UNUSED_VAR(tstate); - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} -#endif - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, - PyObject *firstlineno, PyObject *name) { - PyObject *replace = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; - replace = PyObject_GetAttrString(code, "replace"); - if (likely(replace)) { - PyObject *result; - result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); - Py_DECREF(replace); - return result; - } - PyErr_Clear(); - #if __PYX_LIMITED_VERSION_HEX < 0x030780000 - { - PyObject *compiled = NULL, *result = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; - compiled = Py_CompileString( - "out = type(code)(\n" - " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" - " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" - " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" - " code.co_lnotab)\n", "", Py_file_input); - if (!compiled) return NULL; - result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); - Py_DECREF(compiled); - if (!result) PyErr_Print(); - Py_DECREF(result); - result = PyDict_GetItemString(scratch_dict, "out"); - if (result) Py_INCREF(result); - return result; - } - #else - return NULL; - #endif -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; - PyObject *replace = NULL, *getframe = NULL, *frame = NULL; - PyObject *exc_type, *exc_value, *exc_traceback; - int success = 0; - if (c_line) { - (void) __pyx_cfilenm; - (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); - } - PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); - code_object = Py_CompileString("_getframe()", filename, Py_eval_input); - if (unlikely(!code_object)) goto bad; - py_py_line = PyLong_FromLong(py_line); - if (unlikely(!py_py_line)) goto bad; - py_funcname = PyUnicode_FromString(funcname); - if (unlikely(!py_funcname)) goto bad; - dict = PyDict_New(); - if (unlikely(!dict)) goto bad; - { - PyObject *old_code_object = code_object; - code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); - Py_DECREF(old_code_object); - } - if (unlikely(!code_object)) goto bad; - getframe = PySys_GetObject("_getframe"); - if (unlikely(!getframe)) goto bad; - if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; - frame = PyEval_EvalCode(code_object, dict, dict); - if (unlikely(!frame) || frame == Py_None) goto bad; - success = 1; - bad: - PyErr_Restore(exc_type, exc_value, exc_traceback); - Py_XDECREF(code_object); - Py_XDECREF(py_py_line); - Py_XDECREF(py_funcname); - Py_XDECREF(dict); - Py_XDECREF(replace); - if (success) { - PyTraceBack_Here( - (struct _frame*)frame); - } - Py_XDECREF(frame); -} -#else -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = NULL; - PyObject *py_funcname = NULL; - #if PY_MAJOR_VERSION < 3 - PyObject *py_srcfile = NULL; - py_srcfile = PyString_FromString(filename); - if (!py_srcfile) goto bad; - #endif - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - funcname = PyUnicode_AsUTF8(py_funcname); - if (!funcname) goto bad; - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - if (!py_funcname) goto bad; - #endif - } - #if PY_MAJOR_VERSION < 3 - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - #else - py_code = PyCode_NewEmpty(filename, funcname, py_line); - #endif - Py_XDECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_funcname); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_srcfile); - #endif - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject *ptype, *pvalue, *ptraceback; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) { - /* If the code object creation fails, then we should clear the - fetched exception references and propagate the new exception */ - Py_XDECREF(ptype); - Py_XDECREF(pvalue); - Py_XDECREF(ptraceback); - goto bad; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} -#endif - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(long) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(long) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(long) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - long val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (long) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (long) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (long) -1; - } else { - stepval = v; - } - v = NULL; - val = (long) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((long) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((long) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (long) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const unsigned int neg_one = (unsigned int) -1, const_zero = (unsigned int) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(unsigned int) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(unsigned int) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned int) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(unsigned int) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned int) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(unsigned int), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(unsigned int)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG) -1, const_zero = (unsigned PY_LONG_LONG) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(unsigned PY_LONG_LONG) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(unsigned PY_LONG_LONG) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(unsigned PY_LONG_LONG), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(unsigned PY_LONG_LONG)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const int neg_one = (int) -1, const_zero = (int) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(int) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(int) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(int) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - int val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (int) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (int) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (int) -1; - } else { - stepval = v; - } - v = NULL; - val = (int) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((int) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((int) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (int) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (cls == a || cls == b) return 1; - mro = cls->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - PyObject *base = PyTuple_GET_ITEM(mro, i); - if (base == (PyObject *)a || base == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - if (exc_type1) { - return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); - } else { - return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030B00A4 - return Py_Version & ~0xFFUL; -#else - const char* rt_version = Py_GetVersion(); - unsigned long version = 0; - unsigned long factor = 0x01000000UL; - unsigned int digit = 0; - int i = 0; - while (factor) { - while ('0' <= rt_version[i] && rt_version[i] <= '9') { - digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); - ++i; - } - version += factor * digit; - if (rt_version[i] != '.') - break; - digit = 0; - factor >>= 8; - ++i; - } - return version; -#endif -} -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { - const unsigned long MAJOR_MINOR = 0xFFFF0000UL; - if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) - return 0; - if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) - return 1; - { - char message[200]; - PyOS_snprintf(message, sizeof(message), - "compile time Python version %d.%d " - "of module '%.100s' " - "%s " - "runtime version %d.%d", - (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), - __Pyx_MODULE_NAME, - (allow_newer) ? "was newer than" : "does not match", - (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) - ); - return PyErr_WarnEx(NULL, message, 1); - } -} - -/* FunctionImport */ -#ifndef __PYX_HAVE_RT_ImportFunction_3_0_11 -#define __PYX_HAVE_RT_ImportFunction_3_0_11 -static int __Pyx_ImportFunction_3_0_11(PyObject *module, const char *funcname, void (**f)(void), const char *sig) { - PyObject *d = 0; - PyObject *cobj = 0; - union { - void (*fp)(void); - void *p; - } tmp; - d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); - if (!d) - goto bad; - cobj = PyDict_GetItemString(d, funcname); - if (!cobj) { - PyErr_Format(PyExc_ImportError, - "%.200s does not export expected C function %.200s", - PyModule_GetName(module), funcname); - goto bad; - } - if (!PyCapsule_IsValid(cobj, sig)) { - PyErr_Format(PyExc_TypeError, - "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", - PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); - goto bad; - } - tmp.p = PyCapsule_GetPointer(cobj, sig); - *f = tmp.fp; - if (!(*f)) - goto bad; - Py_DECREF(d); - return 0; -bad: - Py_XDECREF(d); - return -1; -} -#endif - -/* InitStrings */ -#if PY_MAJOR_VERSION >= 3 -static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { - if (t.is_unicode | t.is_str) { - if (t.intern) { - *str = PyUnicode_InternFromString(t.s); - } else if (t.encoding) { - *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); - } else { - *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); - } - } else { - *str = PyBytes_FromStringAndSize(t.s, t.n - 1); - } - if (!*str) - return -1; - if (PyObject_Hash(*str) == -1) - return -1; - return 0; -} -#endif -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { - while (t->p) { - #if PY_MAJOR_VERSION >= 3 - __Pyx_InitString(*t, t->p); - #else - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - #endif - ++t; - } - return 0; -} - -#include -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { - size_t len = strlen(s); - if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { - PyErr_SetString(PyExc_OverflowError, "byte string is too long"); - return -1; - } - return (Py_ssize_t) len; -} -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return __Pyx_PyUnicode_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return PyByteArray_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { - __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " - "The ability to return an instance of a strict subclass of int is deprecated, " - "and may be removed in a future version of Python.", - result_type_name)) { - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; - } - __Pyx_DECREF_TypeName(result_type_name); - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", - type_name, type_name, result_type_name); - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - if (likely(__Pyx_PyLong_IsCompact(b))) { - return __Pyx_PyLong_CompactValue(b); - } else { - const digit* digits = __Pyx_PyLong_Digits(b); - const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { - if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { - return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); -#if PY_MAJOR_VERSION < 3 - } else if (likely(PyInt_CheckExact(o))) { - return PyInt_AS_LONG(o); -#endif - } else { - Py_ssize_t ival; - PyObject *x; - x = PyNumber_Index(o); - if (!x) return -1; - ival = PyInt_AsLong(x); - Py_DECREF(x); - return ival; - } -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -/* #### Code section: utility_code_pragmas_end ### */ -#ifdef _MSC_VER -#pragma warning( pop ) -#endif - - - -/* #### Code section: end ### */ -#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 7ccef84e0d5..6d409a9fb7e 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -68,7 +68,6 @@ from ._orm_types import DMLStrategyArgument from ._orm_types import SynchronizeSessionArgument from ._typing import _CLE - from .compiler import SQLCompiler from .elements import BindParameter from .elements import ClauseList from .elements import ColumnClause # noqa @@ -658,9 +657,7 @@ class CompileState: _ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] @classmethod - def create_for_statement( - cls, statement: Executable, compiler: SQLCompiler, **kw: Any - ) -> CompileState: + def create_for_statement(cls, statement, compiler, **kw): # factory construction. if statement._propagate_attrs: diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index cd1dc34e0a1..fde503aaf9b 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -298,7 +298,8 @@ def compile( if bind: dialect = bind.dialect elif self.stringify_dialect == "default": - dialect = self._default_dialect() + default = util.preloaded.engine_default + dialect = default.StrCompileDialect() else: url = util.preloaded.engine_url dialect = url.URL.create( @@ -307,10 +308,6 @@ def compile( return self._compiler(dialect, **kw) - def _default_dialect(self): - default = util.preloaded.engine_default - return default.StrCompileDialect() - def _compiler(self, dialect: Dialect, **kw: Any) -> Compiled: """Return a compiler appropriate for this ClauseElement, given a Dialect.""" @@ -407,10 +404,6 @@ def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self: self._propagate_attrs = util.immutabledict(values) return self - def _default_compiler(self) -> SQLCompiler: - dialect = self._default_dialect() - return dialect.statement_compiler(dialect, self) # type: ignore - def _clone(self, **kw: Any) -> Self: """Create a shallow copy of this ClauseElement. diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index d137ab504ea..5db1e729e7a 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -4694,7 +4694,7 @@ def get_plugin_class( def __init__( self, statement: Select[Any], - compiler: SQLCompiler, + compiler: Optional[SQLCompiler], **kw: Any, ): self.statement = statement @@ -5742,9 +5742,8 @@ def get_final_froms(self) -> Sequence[FromClause]: :attr:`_sql.Select.columns_clause_froms` """ - compiler = self._default_compiler() - return self._compile_state_factory(self, compiler)._get_display_froms() + return self._compile_state_factory(self, None)._get_display_froms() @property @util.deprecated( diff --git a/lib/sqlalchemy/util/_collections_cy.c b/lib/sqlalchemy/util/_collections_cy.c deleted file mode 100644 index 3753b20a2fe..00000000000 --- a/lib/sqlalchemy/util/_collections_cy.c +++ /dev/null @@ -1,24882 +0,0 @@ -/* Generated by Cython 3.0.11 */ - -/* BEGIN: Cython Metadata -{ - "distutils": { - "name": "sqlalchemy.util._collections_cy", - "sources": [ - "lib/sqlalchemy/util/_collections_cy.py" - ] - }, - "module_name": "sqlalchemy.util._collections_cy" -} -END: Cython Metadata */ - -#ifndef PY_SSIZE_T_CLEAN -#define PY_SSIZE_T_CLEAN -#endif /* PY_SSIZE_T_CLEAN */ -#if defined(CYTHON_LIMITED_API) && 0 - #ifndef Py_LIMITED_API - #if CYTHON_LIMITED_API+0 > 0x03030000 - #define Py_LIMITED_API CYTHON_LIMITED_API - #else - #define Py_LIMITED_API 0x03030000 - #endif - #endif -#endif - -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.7+ or Python 3.3+. -#else -#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API -#define __PYX_EXTRA_ABI_MODULE_NAME "limited" -#else -#define __PYX_EXTRA_ABI_MODULE_NAME "" -#endif -#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME -#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI -#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x03000BF0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #define HAVE_LONG_LONG -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX -#if defined(GRAALVM_PYTHON) - /* For very preliminary testing purposes. Most variables are set the same as PyPy. - The existence of this section does not imply that anything works or is even tested */ - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 1 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(PYPY_VERSION) - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #if PY_VERSION_HEX < 0x03090000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(CYTHON_LIMITED_API) - #ifdef Py_LIMITED_API - #undef __PYX_LIMITED_VERSION_HEX - #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API - #endif - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 1 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_CLINE_IN_TRACEBACK - #define CYTHON_CLINE_IN_TRACEBACK 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 1 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #endif - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 1 - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #ifndef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 1 - #endif - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 - #endif -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #ifndef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) - #endif - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #if PY_VERSION_HEX < 0x030400a1 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #elif !defined(CYTHON_USE_TP_FINALIZE) - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #if PY_VERSION_HEX < 0x030600B1 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #elif !defined(CYTHON_USE_DICT_VERSIONS) - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) - #endif - #if PY_VERSION_HEX < 0x030700A3 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #elif !defined(CYTHON_USE_EXC_INFO_STACK) - #define CYTHON_USE_EXC_INFO_STACK 1 - #endif - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 1 - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if !defined(CYTHON_VECTORCALL) -#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) -#endif -#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_MAJOR_VERSION < 3 - #include "longintrepr.h" - #endif - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(maybe_unused) - #define CYTHON_UNUSED [[maybe_unused]] - #endif - #endif - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR - #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_USE_CPP_STD_MOVE - #if defined(__cplusplus) && (\ - __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) - #define CYTHON_USE_CPP_STD_MOVE 1 - #else - #define CYTHON_USE_CPP_STD_MOVE 0 - #endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned short uint16_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - #endif - #endif - #if _MSC_VER < 1300 - #ifdef _WIN64 - typedef unsigned long long __pyx_uintptr_t; - #else - typedef unsigned int __pyx_uintptr_t; - #endif - #else - #ifdef _WIN64 - typedef unsigned __int64 __pyx_uintptr_t; - #else - typedef unsigned __int32 __pyx_uintptr_t; - #endif - #endif -#else - #include - typedef uintptr_t __pyx_uintptr_t; -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif -#ifdef __cplusplus - template - struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; - #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) -#else - #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) -#endif -#if CYTHON_COMPILING_IN_PYPY == 1 - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) -#else - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) -#endif -#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_DefaultClassType PyClass_Type - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" - #define __Pyx_DefaultClassType PyType_Type -#if CYTHON_COMPILING_IN_LIMITED_API - static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyObject *exception_table = NULL; - PyObject *types_module=NULL, *code_type=NULL, *result=NULL; - #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; - PyObject *py_minor_version = NULL; - #endif - long minor_version = 0; - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); - #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; - #else - if (!(version_info = PySys_GetObject("version_info"))) goto end; - if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; - minor_version = PyLong_AsLong(py_minor_version); - Py_DECREF(py_minor_version); - if (minor_version == -1 && PyErr_Occurred()) goto end; - #endif - if (!(types_module = PyImport_ImportModule("types"))) goto end; - if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; - if (minor_version <= 7) { - (void)p; - result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else if (minor_version <= 10) { - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else { - if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); - } - end: - Py_XDECREF(code_type); - Py_XDECREF(exception_table); - Py_XDECREF(types_module); - if (type) { - PyErr_Restore(type, value, traceback); - } - return result; - } - #ifndef CO_OPTIMIZED - #define CO_OPTIMIZED 0x0001 - #endif - #ifndef CO_NEWLOCALS - #define CO_NEWLOCALS 0x0002 - #endif - #ifndef CO_VARARGS - #define CO_VARARGS 0x0004 - #endif - #ifndef CO_VARKEYWORDS - #define CO_VARKEYWORDS 0x0008 - #endif - #ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x0200 - #endif - #ifndef CO_GENERATOR - #define CO_GENERATOR 0x0020 - #endif - #ifndef CO_COROUTINE - #define CO_COROUTINE 0x0080 - #endif -#elif PY_VERSION_HEX >= 0x030B0000 - static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); - if (!empty_bytes) return NULL; - result = - #if PY_VERSION_HEX >= 0x030C0000 - PyUnstable_Code_NewWithPosOnlyArgs - #else - PyCode_NewWithPosOnlyArgs - #endif - (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); - Py_DECREF(empty_bytes); - return result; - } -#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif -#endif -#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) - #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) -#else - #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) - #define __Pyx_Py_Is(x, y) Py_Is(x, y) -#else - #define __Pyx_Py_Is(x, y) ((x) == (y)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) - #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) -#else - #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) - #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) -#else - #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) - #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) -#else - #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) -#endif -#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) -#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) -#else - #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) -#endif -#ifndef CO_COROUTINE - #define CO_COROUTINE 0x80 -#endif -#ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x200 -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef Py_TPFLAGS_SEQUENCE - #define Py_TPFLAGS_SEQUENCE 0 -#endif -#ifndef Py_TPFLAGS_MAPPING - #define Py_TPFLAGS_MAPPING 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #if PY_VERSION_HEX >= 0x030d00A4 - # define __Pyx_PyCFunctionFast PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords - #else - # define __Pyx_PyCFunctionFast _PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords - #endif -#endif -#if CYTHON_METH_FASTCALL - #define __Pyx_METH_FASTCALL METH_FASTCALL - #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast - #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords -#else - #define __Pyx_METH_FASTCALL METH_VARARGS - #define __Pyx_PyCFunction_FastCall PyCFunction - #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords -#endif -#if CYTHON_VECTORCALL - #define __pyx_vectorcallfunc vectorcallfunc - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET - #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) -#elif CYTHON_BACKPORT_VECTORCALL - typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, - size_t nargsf, PyObject *kwnames); - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) -#else - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) -#endif -#if PY_MAJOR_VERSION >= 0x030900B1 -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) -#else -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) -#endif -#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) -#elif !CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) -#endif -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) -static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { - return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; -} -#endif -static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { -#if CYTHON_COMPILING_IN_LIMITED_API - return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; -#else - return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -#endif -} -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) -#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) - typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); -#else - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) - #define __Pyx_PyCMethod PyCMethod -#endif -#ifndef METH_METHOD - #define METH_METHOD 0x200 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyThreadState_Current PyThreadState_Get() -#elif !CYTHON_FAST_THREAD_STATE - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) -{ - void *result; - result = PyModule_GetState(op); - if (!result) - Py_FatalError("Couldn't find the module state"); - return result; -} -#endif -#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) -#else - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if PY_MAJOR_VERSION < 3 - #if CYTHON_COMPILING_IN_PYPY - #if PYPY_VERSION_NUM < 0x07030600 - #if defined(__cplusplus) && __cplusplus >= 201402L - [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] - #elif defined(__GNUC__) || defined(__clang__) - __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) - #elif defined(_MSC_VER) - __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) - #endif - static CYTHON_INLINE int PyGILState_Check(void) { - return 0; - } - #else // PYPY_VERSION_NUM < 0x07030600 - #endif // PYPY_VERSION_NUM < 0x07030600 - #else - static CYTHON_INLINE int PyGILState_Check(void) { - PyThreadState * tstate = _PyThreadState_Current; - return tstate && (tstate == PyGILState_GetThisThreadState()); - } - #endif -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { - PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); - if (res == NULL) PyErr_Clear(); - return res; -} -#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) -#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#else -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { -#if CYTHON_COMPILING_IN_PYPY - return PyDict_GetItem(dict, name); -#else - PyDictEntry *ep; - PyDictObject *mp = (PyDictObject*) dict; - long hash = ((PyStringObject *) name)->ob_shash; - assert(hash != -1); - ep = (mp->ma_lookup)(mp, name, hash); - if (ep == NULL) { - return NULL; - } - return ep->me_value; -#endif -} -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#endif -#if CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) - #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) - #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) -#else - #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) - #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) - #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) -#else - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) -#endif -#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 -#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE((PyObject*)obj);\ - assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ - PyObject_GC_Del(obj);\ - Py_DECREF(type);\ -} -#else -#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) - #define __Pyx_PyUnicode_DATA(u) ((void*)u) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) -#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_READY(op) (0) - #else - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #endif - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #else - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #endif - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #if !defined(PyUnicode_DecodeUnicodeEscape) - #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) - #endif - #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) - #undef PyUnicode_Contains - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) - #endif - #if !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) - #endif - #if !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) - #endif -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#if CYTHON_COMPILING_IN_CPYTHON - #define __Pyx_PySequence_ListKeepNew(obj)\ - (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) -#else - #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) -#else - #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) - #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) -#endif -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) -#else - static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { - PyObject *module = PyImport_AddModule(name); - Py_XINCREF(module); - return module; - } -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define __Pyx_Py3Int_Check(op) PyLong_Check(op) - #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#else - #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) - #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) - #if !defined(_USE_MATH_DEFINES) - #define _USE_MATH_DEFINES - #endif -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifdef CYTHON_EXTERN_C - #undef __PYX_EXTERN_C - #define __PYX_EXTERN_C CYTHON_EXTERN_C -#elif defined(__PYX_EXTERN_C) - #ifdef _MSC_VER - #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") - #else - #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. - #endif -#else - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__sqlalchemy__util___collections_cy -#define __PYX_HAVE_API__sqlalchemy__util___collections_cy -/* Early includes */ -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_VERSION_HEX >= 0x030C00A7 - #ifndef _PyLong_SIGN_MASK - #define _PyLong_SIGN_MASK 3 - #endif - #ifndef _PyLong_NON_SIZE_BITS - #define _PyLong_NON_SIZE_BITS 3 - #endif - #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) - #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) - #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) - #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) - #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_SignedDigitCount(x)\ - ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) - #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) - #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) - #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) - #else - #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) - #endif - typedef Py_ssize_t __Pyx_compact_pylong; - typedef size_t __Pyx_compact_upylong; - #else - #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) - #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) - #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) - #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) - #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) - #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) - #define __Pyx_PyLong_CompactValue(x)\ - ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) - typedef sdigit __Pyx_compact_pylong; - typedef digit __Pyx_compact_upylong; - #endif - #if PY_VERSION_HEX >= 0x030C00A5 - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) - #else - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) - #endif -#endif -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -#include -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = (char) c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#include -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -#if !CYTHON_USE_MODULE_STATE -static PyObject *__pyx_m = NULL; -#endif -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm = __FILE__; -static const char *__pyx_filename; - -/* #### Code section: filename_table ### */ - -static const char *__pyx_f[] = { - "lib/sqlalchemy/util/_collections_cy.py", - "", -}; -/* #### Code section: utility_code_proto_before_types ### */ -/* ForceInitThreads.proto */ -#ifndef __PYX_FORCE_INIT_THREADS - #define __PYX_FORCE_INIT_THREADS 0 -#endif - -/* #### Code section: numeric_typedefs ### */ -/* #### Code section: complex_type_declarations ### */ -/* #### Code section: type_declarations ### */ - -/*--- Type declarations ---*/ -struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet; -struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet; - -/* "sqlalchemy/util/_collections_cy.py":80 - * - * @cython.cclass - * class OrderedSet(Set[_T]): # <<<<<<<<<<<<<< - * """A set implementation that maintains insertion order.""" - * - */ -struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet { - PySetObject __pyx_base; - struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_vtab; - PyObject *_list; -}; - - -/* "sqlalchemy/util/_collections_cy.py":274 - * - * @cython.cclass - * class IdentitySet: # <<<<<<<<<<<<<< - * """A set that considers only object id() for uniqueness. - * - */ -struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet { - PyObject_HEAD - struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_vtab; - PyObject *_members; -}; - - - -/* "sqlalchemy/util/_collections_cy.py":80 - * - * @cython.cclass - * class OrderedSet(Set[_T]): # <<<<<<<<<<<<<< - * """A set implementation that maintains insertion order.""" - * - */ - -struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *(*_from_list)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *); - PyObject *(*symmetric_difference)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*symmetric_difference_update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *, int __pyx_skip_dispatch); -}; -static struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_OrderedSet; -static CYTHON_INLINE struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *); - - -/* "sqlalchemy/util/_collections_cy.py":274 - * - * @cython.cclass - * class IdentitySet: # <<<<<<<<<<<<<< - * """A set that considers only object id() for uniqueness. - * - */ - -struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet { - PyObject *(*remove)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - int (*issubset)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - int (*issuperset)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*__pyx_union)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*difference)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*difference_update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*intersection)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*intersection_update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*symmetric_difference)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*symmetric_difference_update)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch); - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*copy)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, int __pyx_skip_dispatch); -}; -static struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_IdentitySet; -/* #### Code section: utility_code_proto ### */ - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, Py_ssize_t); - void (*DECREF)(void*, PyObject*, Py_ssize_t); - void (*GOTREF)(void*, PyObject*, Py_ssize_t); - void (*GIVEREF)(void*, PyObject*, Py_ssize_t); - void* (*SetupContext)(const char*, Py_ssize_t, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - } - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) - #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() -#endif - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContextNogil() - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_Py_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; Py_XDECREF(tmp);\ - } while (0) -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#if PY_VERSION_HEX >= 0x030C00A6 -#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) -#else -#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) -#endif -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) -#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* TupleAndListFromArray.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); -static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); -#endif - -/* IncludeStringH.proto */ -#include - -/* BytesEquals.proto */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); - -/* UnicodeEquals.proto */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); - -/* fastcall.proto */ -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) -#elif CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) -#else - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) -#endif -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) - #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) -#else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg - #define __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) -#define __Pyx_KwValues_VARARGS(args, nargs) NULL -#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) -#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) -#if CYTHON_METH_FASTCALL - #define __Pyx_Arg_FASTCALL(args, i) args[i] - #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) - #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) - static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); - #else - #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) - #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs - to have the same reference counting */ - #define __Pyx_Arg_XDECREF_FASTCALL(arg) -#else - #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS - #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS - #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS - #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS - #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS - #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) - #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) -#else -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) -#endif - -/* pyfrozenset_new.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it); - -/* PySetContains.proto */ -static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq); - -/* ListCompAppend.proto */ -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) { - PyListObject* L = (PyListObject*) list; - Py_ssize_t len = Py_SIZE(list); - if (likely(L->allocated > len)) { - Py_INCREF(x); - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - L->ob_item[len] = x; - #else - PyList_SET_ITEM(list, len, x); - #endif - __Pyx_SET_SIZE(list, len + 1); - return 0; - } - return PyList_Append(list, x); -} -#else -#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x) -#endif - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, - const char* function_name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#if !CYTHON_VECTORCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif -#if !CYTHON_VECTORCALL -#if PY_VERSION_HEX >= 0x03080000 - #include "frameobject.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif - #define __Pxy_PyFrame_Initialize_Offsets() - #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) -#else - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif -#endif -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectFastCall.proto */ -#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); - -/* KeywordStringCheck.proto */ -static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); - -/* PySequenceContains.proto */ -static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { - int result = PySequence_Contains(seq, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* ListAppend.proto */ -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { - PyListObject* L = (PyListObject*) list; - Py_ssize_t len = Py_SIZE(list); - if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { - Py_INCREF(x); - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - L->ob_item[len] = x; - #else - PyList_SET_ITEM(list, len, x); - #endif - __Pyx_SET_SIZE(list, len + 1); - return 0; - } - return PyList_Append(list, x); -} -#else -#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) -#endif - -/* py_set_remove.proto */ -static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key); - -/* PyObjectCallNoArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod0.proto */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); - -/* pop.proto */ -static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L); -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L); -#define __Pyx_PyObject_Pop(L) (likely(PyList_CheckExact(L)) ?\ - __Pyx_PyList_Pop(L) : __Pyx__PyObject_Pop(L)) -#else -#define __Pyx_PyList_Pop(L) __Pyx__PyObject_Pop(L) -#define __Pyx_PyObject_Pop(L) __Pyx__PyObject_Pop(L) -#endif - -/* UnpackUnboundCMethod.proto */ -typedef struct { - PyObject *type; - PyObject **method_name; - PyCFunction func; - PyObject *method; - int flag; -} __Pyx_CachedCFunction; - -/* CallUnboundCMethod0.proto */ -static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CallUnboundCMethod0(cfunc, self)\ - (likely((cfunc)->func) ?\ - (likely((cfunc)->flag == METH_NOARGS) ? (*((cfunc)->func))(self, NULL) :\ - (PY_VERSION_HEX >= 0x030600B1 && likely((cfunc)->flag == METH_FASTCALL) ?\ - (PY_VERSION_HEX >= 0x030700A0 ?\ - (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0) :\ - (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0, NULL)) :\ - (PY_VERSION_HEX >= 0x030700A0 && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ?\ - (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0, NULL) :\ - (likely((cfunc)->flag == (METH_VARARGS | METH_KEYWORDS)) ? ((*(PyCFunctionWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, __pyx_empty_tuple, NULL)) :\ - ((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, __pyx_empty_tuple) :\ - __Pyx__CallUnboundCMethod0(cfunc, self)))))) :\ - __Pyx__CallUnboundCMethod0(cfunc, self)) -#else -#define __Pyx_CallUnboundCMethod0(cfunc, self) __Pyx__CallUnboundCMethod0(cfunc, self) -#endif - -/* GetTopmostException.proto */ -#if CYTHON_USE_EXC_INFO_STACK && CYTHON_FAST_THREAD_STATE -static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); -#endif - -/* SaveResetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -#else -#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) -#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) -#endif - -/* GetException.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); -#endif - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* PyObjectFormatAndDecref.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatSimpleAndDecref(PyObject* s, PyObject* f); -static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObject* f); - -/* JoinPyUnicode.proto */ -static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, - Py_UCS4 max_char); - -/* RaiseUnexpectedTypeError.proto */ -static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetAttr.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); - -/* HasAttr.proto */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); - -/* GetAttr3.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) do {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* PyDictContains.proto */ -static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { - int result = PyDict_Contains(dict, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* py_dict_clear.proto */ -#define __Pyx_PyDict_Clear(d) (PyDict_Clear(d), 0) - -/* ExtTypeTest.proto */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); - -/* py_dict_keys.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyDict_Keys(PyObject* d); - -/* CallUnboundCMethod1.proto */ -static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg); -#else -#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg) -#endif - -/* IterFinish.proto */ -static CYTHON_INLINE int __Pyx_IterFinish(void); - -/* RaiseNeedMoreValuesToUnpack.proto */ -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); - -/* RaiseTooManyValuesToUnpack.proto */ -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); - -/* UnpackItemEndCheck.proto */ -static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); - -/* RaiseNoneIterError.proto */ -static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); - -/* UnpackTupleError.proto */ -static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); - -/* UnpackTuple2.proto */ -#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple)\ - (likely(is_tuple || PyTuple_Check(tuple)) ?\ - (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ?\ - __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) :\ - (__Pyx_UnpackTupleError(tuple, 2), -1)) :\ - __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple)) -static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( - PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple); -static int __Pyx_unpack_tuple2_generic( - PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple); - -/* dict_iter.proto */ -static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name, - Py_ssize_t* p_orig_length, int* p_is_dict); -static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos, - PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict); - -/* ArgTypeTest.proto */ -#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ - ((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 :\ - __Pyx__ArgTypeTest(obj, type, name, exact)) -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); - -/* py_dict_values.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d); - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* PyTrashcan.proto */ -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03080000 -#define __Pyx_TRASHCAN_BEGIN Py_TRASHCAN_BEGIN -#define __Pyx_TRASHCAN_END Py_TRASHCAN_END -#elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x02070400 -#define __Pyx_TRASHCAN_BEGIN_CONDITION(op, cond)\ - do {\ - PyThreadState *_tstate = NULL;\ - if (cond) {\ - _tstate = PyThreadState_GET();\ - if (_tstate->trash_delete_nesting >= PyTrash_UNWIND_LEVEL) {\ - _PyTrash_thread_deposit_object((PyObject*)(op));\ - break;\ - }\ - ++_tstate->trash_delete_nesting;\ - } -#define __Pyx_TRASHCAN_END\ - if (_tstate) {\ - --_tstate->trash_delete_nesting;\ - if (_tstate->trash_delete_later && _tstate->trash_delete_nesting <= 0)\ - _PyTrash_thread_destroy_chain();\ - }\ - } while (0); -#define __Pyx_TRASHCAN_BEGIN(op, dealloc) __Pyx_TRASHCAN_BEGIN_CONDITION(op,\ - __Pyx_PyObject_GetSlot(op, tp_dealloc, destructor) == (destructor)(dealloc)) -#else -#define __Pyx_TRASHCAN_BEGIN(op, dealloc) -#define __Pyx_TRASHCAN_END -#endif - -/* IncludeStructmemberH.proto */ -#include - -/* FixUpExtensionType.proto */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); -#endif - -/* FormatTypeName.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -typedef PyObject *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%U" -static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); -#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) -#else -typedef const char *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%.200s" -#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) -#define __Pyx_DECREF_TypeName(obj) -#endif - -/* ValidateExternBase.proto */ -static int __Pyx_validate_extern_base(PyTypeObject *base); - -/* ValidateBasesTuple.proto */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); -#endif - -/* PyType_Ready.proto */ -CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* SetVTable.proto */ -static int __Pyx_SetVtable(PyTypeObject* typeptr , void* vtable); - -/* GetVTable.proto */ -static void* __Pyx_GetVtable(PyTypeObject *type); - -/* MergeVTables.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_MergeVtables(PyTypeObject *type); -#endif - -/* SetupReduce.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_setup_reduce(PyObject* type_obj); -#endif - -/* FetchSharedCythonModule.proto */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void); - -/* FetchCommonType.proto */ -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); -#else -static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); -#endif - -/* PyMethodNew.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - typesModule = PyImport_ImportModule("types"); - if (!typesModule) return NULL; - methodType = PyObject_GetAttrString(typesModule, "MethodType"); - Py_DECREF(typesModule); - if (!methodType) return NULL; - result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); - Py_DECREF(methodType); - return result; -} -#elif PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - return PyMethod_New(func, self); -} -#else - #define __Pyx_PyMethod_New PyMethod_New -#endif - -/* PyVectorcallFastCallDict.proto */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); -#endif - -/* CythonFunctionShared.proto */ -#define __Pyx_CyFunction_USED -#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 -#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 -#define __Pyx_CYFUNCTION_CCLASS 0x04 -#define __Pyx_CYFUNCTION_COROUTINE 0x08 -#define __Pyx_CyFunction_GetClosure(f)\ - (((__pyx_CyFunctionObject *) (f))->func_closure) -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_CyFunction_GetClassObj(f)\ - (((__pyx_CyFunctionObject *) (f))->func_classobj) -#else - #define __Pyx_CyFunction_GetClassObj(f)\ - ((PyObject*) ((PyCMethodObject *) (f))->mm_class) -#endif -#define __Pyx_CyFunction_SetClassObj(f, classobj)\ - __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) -#define __Pyx_CyFunction_Defaults(type, f)\ - ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) -#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ - ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) -typedef struct { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject_HEAD - PyObject *func; -#elif PY_VERSION_HEX < 0x030900B1 - PyCFunctionObject func; -#else - PyCMethodObject func; -#endif -#if CYTHON_BACKPORT_VECTORCALL - __pyx_vectorcallfunc func_vectorcall; -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_weakreflist; -#endif - PyObject *func_dict; - PyObject *func_name; - PyObject *func_qualname; - PyObject *func_doc; - PyObject *func_globals; - PyObject *func_code; - PyObject *func_closure; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_classobj; -#endif - void *defaults; - int defaults_pyobjects; - size_t defaults_size; - int flags; - PyObject *defaults_tuple; - PyObject *defaults_kwdict; - PyObject *(*defaults_getter)(PyObject *); - PyObject *func_annotations; - PyObject *func_is_coroutine; -} __pyx_CyFunctionObject; -#undef __Pyx_CyOrPyCFunction_Check -#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) -#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) -#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); -#undef __Pyx_IsSameCFunction -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, - size_t size, - int pyobjects); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, - PyObject *tuple); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, - PyObject *dict); -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, - PyObject *dict); -static int __pyx_CyFunction_init(PyObject *module); -#if CYTHON_METH_FASTCALL -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -#if CYTHON_BACKPORT_VECTORCALL -#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) -#else -#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) -#endif -#endif - -/* CythonFunction.proto */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); - -/* ClassMethod.proto */ -#include "descrobject.h" -CYTHON_UNUSED static PyObject* __Pyx_Method_ClassMethod(PyObject *method); - -/* GetNameInClass.proto */ -#define __Pyx_GetNameInClass(var, nmspace, name) (var) = __Pyx__GetNameInClass(nmspace, name) -static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); -#endif - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* GCCDiagnostics.proto */ -#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) -#define __Pyx_HAS_GCC_DIAGNOSTIC -#endif - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static unsigned long __Pyx_get_runtime_version(void); -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); - -/* FunctionExport.proto */ -static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -/* #### Code section: module_declarations ### */ -static CYTHON_INLINE struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_new_list); /* proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value, int __pyx_skip_dispatch); /* proto*/ -static int __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issubset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static int __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issuperset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch); /* proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, int __pyx_skip_dispatch); /* proto*/ - -/* Module declarations from "cython" */ - -/* Module declarations from "sqlalchemy.util._collections_cy" */ -static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(PyObject *); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_unique_list(PyObject *, int __pyx_skip_dispatch); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_OrderedSet__set_state(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_IdentitySet__set_state(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *); /*proto*/ -/* #### Code section: typeinfo ### */ -/* #### Code section: before_global_var ### */ -#define __Pyx_MODULE_NAME "sqlalchemy.util._collections_cy" -extern int __pyx_module_is_main_sqlalchemy__util___collections_cy; -int __pyx_module_is_main_sqlalchemy__util___collections_cy = 0; - -/* Implementation of "sqlalchemy.util._collections_cy" */ -/* #### Code section: global_var ### */ -static PyObject *__pyx_builtin_IndexError; -static PyObject *__pyx_builtin_KeyError; -static PyObject *__pyx_builtin_NotImplemented; -static PyObject *__pyx_builtin_TypeError; -/* #### Code section: string_decls ### */ -static const char __pyx_k_S[] = "_S"; -static const char __pyx_k_T[] = "_T"; -static const char __pyx_k_a[] = "a"; -static const char __pyx_k_d[] = "d"; -static const char __pyx_k__2[] = "("; -static const char __pyx_k__3[] = ")"; -static const char __pyx_k__6[] = "."; -static const char __pyx_k__8[] = "?"; -static const char __pyx_k_gc[] = "gc"; -static const char __pyx_k_Any[] = "Any"; -static const char __pyx_k_Set[] = "Set"; -static const char __pyx_k_add[] = "add"; -static const char __pyx_k_cls[] = "cls"; -static const char __pyx_k_key[] = "key"; -static const char __pyx_k_len[] = "__len__"; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_pop[] = "pop"; -static const char __pyx_k_pos[] = "pos"; -static const char __pyx_k_seq[] = "seq"; -static const char __pyx_k_str[] = "__str__"; -static const char __pyx_k_Dict[] = "Dict"; -static const char __pyx_k_List[] = "List"; -static const char __pyx_k_None[] = "None"; -static const char __pyx_k_Self[] = "Self"; -static const char __pyx_k_bool[] = "bool"; -static const char __pyx_k_copy[] = "copy"; -static const char __pyx_k_dict[] = "__dict__"; -static const char __pyx_k_init[] = "__init__"; -static const char __pyx_k_keys[] = "keys"; -static const char __pyx_k_list[] = "_list"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_pair[] = "pair"; -static const char __pyx_k_repr[] = "__repr__"; -static const char __pyx_k_self[] = "self"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_Tuple[] = "Tuple"; -static const char __pyx_k_Union[] = "Union"; -static const char __pyx_k_class[] = "__class__"; -static const char __pyx_k_clear[] = "clear"; -static const char __pyx_k_items[] = "items"; -static const char __pyx_k_other[] = "other"; -static const char __pyx_k_slots[] = "__slots__"; -static const char __pyx_k_state[] = "state"; -static const char __pyx_k_union[] = "union"; -static const char __pyx_k_value[] = "value"; -static const char __pyx_k_copy_2[] = "__copy__"; -static const char __pyx_k_dict_2[] = "_dict"; -static const char __pyx_k_enable[] = "enable"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_insert[] = "insert"; -static const char __pyx_k_pickle[] = "pickle"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_remove[] = "remove"; -static const char __pyx_k_result[] = "result"; -static const char __pyx_k_return[] = "return"; -static const char __pyx_k_typing[] = "typing"; -static const char __pyx_k_update[] = "update"; -static const char __pyx_k_values[] = "values"; -static const char __pyx_k_TypeVar[] = "TypeVar"; -static const char __pyx_k_disable[] = "disable"; -static const char __pyx_k_discard[] = "discard"; -static const char __pyx_k_element[] = "element"; -static const char __pyx_k_members[] = "_members"; -static const char __pyx_k_popitem[] = "popitem"; -static const char __pyx_k_Hashable[] = "Hashable"; -static const char __pyx_k_Iterable[] = "Iterable"; -static const char __pyx_k_Iterator[] = "Iterator"; -static const char __pyx_k_KeyError[] = "KeyError"; -static const char __pyx_k_NoReturn[] = "NoReturn"; -static const char __pyx_k_Optional[] = "Optional"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_issubset[] = "issubset"; -static const char __pyx_k_iterable[] = "iterable"; -static const char __pyx_k_pyx_type[] = "__pyx_type"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_TypeError[] = "TypeError"; -static const char __pyx_k_isenabled[] = "isenabled"; -static const char __pyx_k_iterables[] = "iterables"; -static const char __pyx_k_other_set[] = "other_set"; -static const char __pyx_k_pyx_state[] = "__pyx_state"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_type_Self[] = "type[Self]"; -static const char __pyx_k_IndexError[] = "IndexError"; -static const char __pyx_k_OrderedSet[] = "OrderedSet"; -static const char __pyx_k_difference[] = "difference"; -static const char __pyx_k_issuperset[] = "issuperset"; -static const char __pyx_k_pyx_result[] = "__pyx_result"; -static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; -static const char __pyx_k_AbstractSet[] = "AbstractSet"; -static const char __pyx_k_IdentitySet[] = "IdentitySet"; -static const char __pyx_k_Iterable__S[] = "Iterable[_S]"; -static const char __pyx_k_Iterable__T[] = "Iterable[_T]"; -static const char __pyx_k_PickleError[] = "PickleError"; -static const char __pyx_k_is_compiled[] = "_is_compiled"; -static const char __pyx_k_unique_list[] = "unique_list"; -static const char __pyx_k_Iterable_Any[] = "Iterable[Any]"; -static const char __pyx_k_intersection[] = "intersection"; -static const char __pyx_k_is_coroutine[] = "_is_coroutine"; -static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; -static const char __pyx_k_stringsource[] = ""; -static const char __pyx_k_use_setstate[] = "use_setstate"; -static const char __pyx_k_OrderedSet__T[] = "OrderedSet[_T]"; -static const char __pyx_k_class_getitem[] = "__class_getitem__"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_NotImplemented[] = "NotImplemented"; -static const char __pyx_k_OrderedSet_add[] = "OrderedSet.add"; -static const char __pyx_k_OrderedSet_pop[] = "OrderedSet.pop"; -static const char __pyx_k_IdentitySet_add[] = "IdentitySet.add"; -static const char __pyx_k_IdentitySet_pop[] = "IdentitySet.pop"; -static const char __pyx_k_OrderedSet_copy[] = "OrderedSet.copy"; -static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_IdentitySet_copy[] = "IdentitySet.copy"; -static const char __pyx_k_OrderedSet_clear[] = "OrderedSet.clear"; -static const char __pyx_k_OrderedSet_union[] = "OrderedSet.union"; -static const char __pyx_k_IdentitySet_clear[] = "IdentitySet.clear"; -static const char __pyx_k_IdentitySet_union[] = "IdentitySet.union"; -static const char __pyx_k_Iterable_Hashable[] = "Iterable[Hashable]"; -static const char __pyx_k_OrderedSet_insert[] = "OrderedSet.insert"; -static const char __pyx_k_OrderedSet_remove[] = "OrderedSet.remove"; -static const char __pyx_k_OrderedSet_update[] = "OrderedSet.update"; -static const char __pyx_k_cython_Py_ssize_t[] = "cython.Py_ssize_t"; -static const char __pyx_k_difference_update[] = "difference_update"; -static const char __pyx_k_IdentitySet___copy[] = "IdentitySet.__copy__"; -static const char __pyx_k_IdentitySet_remove[] = "IdentitySet.remove"; -static const char __pyx_k_IdentitySet_update[] = "IdentitySet.update"; -static const char __pyx_k_OrderedSet_discard[] = "OrderedSet.discard"; -static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_IdentitySet_discard[] = "IdentitySet.discard"; -static const char __pyx_k_intersection_update[] = "intersection_update"; -static const char __pyx_k_IdentitySet_issubset[] = "IdentitySet.issubset"; -static const char __pyx_k_symmetric_difference[] = "symmetric_difference"; -static const char __pyx_k_OrderedSet_difference[] = "OrderedSet.difference"; -static const char __pyx_k_pop_from_an_empty_set[] = "pop from an empty set"; -static const char __pyx_k_IdentitySet_difference[] = "IdentitySet.difference"; -static const char __pyx_k_IdentitySet_issuperset[] = "IdentitySet.issuperset"; -static const char __pyx_k_OrderedSet_Union__T__S[] = "OrderedSet[Union[_T, _S]]"; -static const char __pyx_k_OrderedSet_intersection[] = "OrderedSet.intersection"; -static const char __pyx_k_pyx_unpickle_OrderedSet[] = "__pyx_unpickle_OrderedSet"; -static const char __pyx_k_IdentitySet_intersection[] = "IdentitySet.intersection"; -static const char __pyx_k_pyx_unpickle_IdentitySet[] = "__pyx_unpickle_IdentitySet"; -static const char __pyx_k_OrderedSet___class_getitem[] = "OrderedSet.__class_getitem__"; -static const char __pyx_k_OrderedSet___reduce_cython[] = "OrderedSet.__reduce_cython__"; -static const char __pyx_k_set_objects_are_unhashable[] = "set objects are unhashable"; -static const char __pyx_k_IdentitySet___reduce_cython[] = "IdentitySet.__reduce_cython__"; -static const char __pyx_k_symmetric_difference_update[] = "symmetric_difference_update"; -static const char __pyx_k_OrderedSet___setstate_cython[] = "OrderedSet.__setstate_cython__"; -static const char __pyx_k_OrderedSet_difference_update[] = "OrderedSet.difference_update"; -static const char __pyx_k_IdentitySet___setstate_cython[] = "IdentitySet.__setstate_cython__"; -static const char __pyx_k_IdentitySet_difference_update[] = "IdentitySet.difference_update"; -static const char __pyx_k_OrderedSet_intersection_update[] = "OrderedSet.intersection_update"; -static const char __pyx_k_IdentitySet_intersection_update[] = "IdentitySet.intersection_update"; -static const char __pyx_k_OrderedSet_symmetric_difference[] = "OrderedSet.symmetric_difference"; -static const char __pyx_k_sqlalchemy_util__collections_cy[] = "sqlalchemy.util._collections_cy"; -static const char __pyx_k_IdentitySet_symmetric_difference[] = "IdentitySet.symmetric_difference"; -static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))"; -static const char __pyx_k_lib_sqlalchemy_util__collections[] = "lib/sqlalchemy/util/_collections_cy.py"; -static const char __pyx_k_OrderedSet_symmetric_difference_2[] = "OrderedSet.symmetric_difference_update"; -static const char __pyx_k_IdentitySet_symmetric_difference_2[] = "IdentitySet.symmetric_difference_update"; -static const char __pyx_k_Incompatible_checksums_0x_x_vs_0_2[] = "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))"; -/* #### Code section: decls ### */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_2unique_list(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_seq); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_2__init__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_d); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_4copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_6add(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_8remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_10pop(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_12insert(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, Py_ssize_t __pyx_v_pos, PyObject *__pyx_v_element); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_14discard(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_16clear(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_18__getitem__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, Py_ssize_t __pyx_v_key); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_20__iter__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_22__add__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_24__repr__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_26update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_iterables); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_28__ior__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_30union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_32__or__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_34intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_36__and__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_38symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_40__xor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_42difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_44__sub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_46intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_48__iand__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_50symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_52__ixor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_54difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_56__isub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_58__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_60__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet___init__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_2add(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_4__contains__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_6remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_8discard(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_10pop(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_12clear(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_14__eq__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_16__ne__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_18issubset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_20__le__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_22__lt__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_24issuperset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_26__ge__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_28__gt__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_30union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_32__or__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_34update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_36__ior__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_38difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_40__sub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_42difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_44__isub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_46intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_48__and__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_50intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_52__iand__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_54symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_56__xor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_58symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_60__ixor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_62copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_64__copy__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static Py_ssize_t __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_66__len__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_68__iter__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static Py_hash_t __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_70__hash__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_72__repr__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_74__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_76__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_4__pyx_unpickle_OrderedSet(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_6__pyx_unpickle_IdentitySet(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static PyObject *__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_keys = {0, 0, 0, 0, 0}; -static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_update = {0, 0, 0, 0, 0}; -static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_values = {0, 0, 0, 0, 0}; -static __Pyx_CachedCFunction __pyx_umethod_PyList_Type_pop = {0, 0, 0, 0, 0}; -/* #### Code section: late_includes ### */ -/* #### Code section: module_state ### */ -typedef struct { - PyObject *__pyx_d; - PyObject *__pyx_b; - PyObject *__pyx_cython_runtime; - PyObject *__pyx_empty_tuple; - PyObject *__pyx_empty_bytes; - PyObject *__pyx_empty_unicode; - #ifdef __Pyx_CyFunction_USED - PyTypeObject *__pyx_CyFunctionType; - #endif - #ifdef __Pyx_FusedFunction_USED - PyTypeObject *__pyx_FusedFunctionType; - #endif - #ifdef __Pyx_Generator_USED - PyTypeObject *__pyx_GeneratorType; - #endif - #ifdef __Pyx_IterableCoroutine_USED - PyTypeObject *__pyx_IterableCoroutineType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineAwaitType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineType; - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - PyObject *__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet; - PyObject *__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet; - #endif - PyTypeObject *__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet; - PyTypeObject *__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet; - PyObject *__pyx_n_s_AbstractSet; - PyObject *__pyx_n_s_Any; - PyObject *__pyx_n_s_Dict; - PyObject *__pyx_n_s_Hashable; - PyObject *__pyx_n_s_IdentitySet; - PyObject *__pyx_n_s_IdentitySet___copy; - PyObject *__pyx_n_s_IdentitySet___reduce_cython; - PyObject *__pyx_n_s_IdentitySet___setstate_cython; - PyObject *__pyx_n_s_IdentitySet_add; - PyObject *__pyx_n_s_IdentitySet_clear; - PyObject *__pyx_n_s_IdentitySet_copy; - PyObject *__pyx_n_s_IdentitySet_difference; - PyObject *__pyx_n_s_IdentitySet_difference_update; - PyObject *__pyx_n_s_IdentitySet_discard; - PyObject *__pyx_n_s_IdentitySet_intersection; - PyObject *__pyx_n_s_IdentitySet_intersection_update; - PyObject *__pyx_n_s_IdentitySet_issubset; - PyObject *__pyx_n_s_IdentitySet_issuperset; - PyObject *__pyx_n_s_IdentitySet_pop; - PyObject *__pyx_n_s_IdentitySet_remove; - PyObject *__pyx_n_s_IdentitySet_symmetric_difference; - PyObject *__pyx_n_s_IdentitySet_symmetric_difference_2; - PyObject *__pyx_n_s_IdentitySet_union; - PyObject *__pyx_n_s_IdentitySet_update; - PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; - PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2; - PyObject *__pyx_n_s_IndexError; - PyObject *__pyx_n_s_Iterable; - PyObject *__pyx_kp_s_Iterable_Any; - PyObject *__pyx_kp_s_Iterable_Hashable; - PyObject *__pyx_kp_s_Iterable__S; - PyObject *__pyx_kp_s_Iterable__T; - PyObject *__pyx_n_s_Iterator; - PyObject *__pyx_n_s_KeyError; - PyObject *__pyx_n_s_List; - PyObject *__pyx_n_s_NoReturn; - PyObject *__pyx_n_s_None; - PyObject *__pyx_n_s_NotImplemented; - PyObject *__pyx_n_s_Optional; - PyObject *__pyx_n_s_OrderedSet; - PyObject *__pyx_kp_s_OrderedSet_Union__T__S; - PyObject *__pyx_kp_s_OrderedSet__T; - PyObject *__pyx_n_s_OrderedSet___class_getitem; - PyObject *__pyx_n_s_OrderedSet___reduce_cython; - PyObject *__pyx_n_s_OrderedSet___setstate_cython; - PyObject *__pyx_n_s_OrderedSet_add; - PyObject *__pyx_n_s_OrderedSet_clear; - PyObject *__pyx_n_s_OrderedSet_copy; - PyObject *__pyx_n_s_OrderedSet_difference; - PyObject *__pyx_n_s_OrderedSet_difference_update; - PyObject *__pyx_n_s_OrderedSet_discard; - PyObject *__pyx_n_s_OrderedSet_insert; - PyObject *__pyx_n_s_OrderedSet_intersection; - PyObject *__pyx_n_s_OrderedSet_intersection_update; - PyObject *__pyx_n_s_OrderedSet_pop; - PyObject *__pyx_n_s_OrderedSet_remove; - PyObject *__pyx_n_s_OrderedSet_symmetric_difference; - PyObject *__pyx_n_s_OrderedSet_symmetric_difference_2; - PyObject *__pyx_n_s_OrderedSet_union; - PyObject *__pyx_n_s_OrderedSet_update; - PyObject *__pyx_n_s_PickleError; - PyObject *__pyx_n_s_S; - PyObject *__pyx_n_u_S; - PyObject *__pyx_n_s_Self; - PyObject *__pyx_n_s_Set; - PyObject *__pyx_n_s_T; - PyObject *__pyx_n_u_T; - PyObject *__pyx_n_s_Tuple; - PyObject *__pyx_n_s_TypeError; - PyObject *__pyx_n_s_TypeVar; - PyObject *__pyx_n_s_Union; - PyObject *__pyx_kp_u__2; - PyObject *__pyx_kp_u__3; - PyObject *__pyx_kp_u__6; - PyObject *__pyx_n_s__8; - PyObject *__pyx_n_s_a; - PyObject *__pyx_n_s_add; - PyObject *__pyx_n_s_asyncio_coroutines; - PyObject *__pyx_n_s_bool; - PyObject *__pyx_n_s_class; - PyObject *__pyx_n_s_class_getitem; - PyObject *__pyx_n_s_clear; - PyObject *__pyx_n_s_cline_in_traceback; - PyObject *__pyx_n_s_cls; - PyObject *__pyx_n_s_copy; - PyObject *__pyx_n_s_copy_2; - PyObject *__pyx_kp_s_cython_Py_ssize_t; - PyObject *__pyx_n_s_d; - PyObject *__pyx_n_s_dict; - PyObject *__pyx_n_s_dict_2; - PyObject *__pyx_n_s_difference; - PyObject *__pyx_n_s_difference_update; - PyObject *__pyx_kp_u_disable; - PyObject *__pyx_n_s_discard; - PyObject *__pyx_n_s_element; - PyObject *__pyx_kp_u_enable; - PyObject *__pyx_kp_u_gc; - PyObject *__pyx_n_s_getstate; - PyObject *__pyx_n_s_import; - PyObject *__pyx_n_s_init; - PyObject *__pyx_n_s_insert; - PyObject *__pyx_n_s_intersection; - PyObject *__pyx_n_s_intersection_update; - PyObject *__pyx_n_s_is_compiled; - PyObject *__pyx_n_s_is_coroutine; - PyObject *__pyx_kp_u_isenabled; - PyObject *__pyx_n_s_issubset; - PyObject *__pyx_n_s_issuperset; - PyObject *__pyx_n_s_items; - PyObject *__pyx_n_s_iterable; - PyObject *__pyx_n_s_iterables; - PyObject *__pyx_n_s_key; - PyObject *__pyx_n_s_keys; - PyObject *__pyx_n_u_len; - PyObject *__pyx_kp_s_lib_sqlalchemy_util__collections; - PyObject *__pyx_n_u_list; - PyObject *__pyx_n_s_main; - PyObject *__pyx_n_u_members; - PyObject *__pyx_n_s_name; - PyObject *__pyx_n_s_new; - PyObject *__pyx_n_s_other; - PyObject *__pyx_n_s_other_set; - PyObject *__pyx_n_s_pair; - PyObject *__pyx_n_s_pickle; - PyObject *__pyx_n_s_pop; - PyObject *__pyx_kp_u_pop_from_an_empty_set; - PyObject *__pyx_n_s_popitem; - PyObject *__pyx_n_s_pos; - PyObject *__pyx_n_s_pyx_PickleError; - PyObject *__pyx_n_s_pyx_checksum; - PyObject *__pyx_n_s_pyx_result; - PyObject *__pyx_n_s_pyx_state; - PyObject *__pyx_n_s_pyx_type; - PyObject *__pyx_n_s_pyx_unpickle_IdentitySet; - PyObject *__pyx_n_s_pyx_unpickle_OrderedSet; - PyObject *__pyx_n_s_pyx_vtable; - PyObject *__pyx_n_s_reduce; - PyObject *__pyx_n_s_reduce_cython; - PyObject *__pyx_n_s_reduce_ex; - PyObject *__pyx_n_s_remove; - PyObject *__pyx_n_s_repr; - PyObject *__pyx_n_s_result; - PyObject *__pyx_n_s_return; - PyObject *__pyx_n_s_self; - PyObject *__pyx_n_s_seq; - PyObject *__pyx_kp_u_set_objects_are_unhashable; - PyObject *__pyx_n_s_setstate; - PyObject *__pyx_n_s_setstate_cython; - PyObject *__pyx_n_s_slots; - PyObject *__pyx_n_s_sqlalchemy_util__collections_cy; - PyObject *__pyx_n_s_state; - PyObject *__pyx_n_s_str; - PyObject *__pyx_kp_s_stringsource; - PyObject *__pyx_n_s_symmetric_difference; - PyObject *__pyx_n_s_symmetric_difference_update; - PyObject *__pyx_n_s_test; - PyObject *__pyx_kp_s_type_Self; - PyObject *__pyx_n_s_typing; - PyObject *__pyx_n_s_union; - PyObject *__pyx_n_s_unique_list; - PyObject *__pyx_n_s_update; - PyObject *__pyx_n_s_use_setstate; - PyObject *__pyx_n_s_value; - PyObject *__pyx_n_s_values; - PyObject *__pyx_int_61630440; - PyObject *__pyx_int_75814257; - PyObject *__pyx_int_143295406; - PyObject *__pyx_int_183888701; - PyObject *__pyx_int_197243545; - PyObject *__pyx_int_242532825; - PyObject *__pyx_tuple_; - PyObject *__pyx_tuple__4; - PyObject *__pyx_tuple__5; - PyObject *__pyx_tuple__7; - PyObject *__pyx_tuple__10; - PyObject *__pyx_tuple__11; - PyObject *__pyx_tuple__12; - PyObject *__pyx_tuple__14; - PyObject *__pyx_tuple__15; - PyObject *__pyx_tuple__17; - PyObject *__pyx_tuple__19; - PyObject *__pyx_tuple__22; - PyObject *__pyx_tuple__24; - PyObject *__pyx_tuple__28; - PyObject *__pyx_tuple__30; - PyObject *__pyx_tuple__32; - PyObject *__pyx_tuple__34; - PyObject *__pyx_tuple__37; - PyObject *__pyx_tuple__41; - PyObject *__pyx_tuple__43; - PyObject *__pyx_tuple__45; - PyObject *__pyx_tuple__49; - PyObject *__pyx_tuple__52; - PyObject *__pyx_tuple__67; - PyObject *__pyx_codeobj__9; - PyObject *__pyx_codeobj__13; - PyObject *__pyx_codeobj__16; - PyObject *__pyx_codeobj__18; - PyObject *__pyx_codeobj__20; - PyObject *__pyx_codeobj__21; - PyObject *__pyx_codeobj__23; - PyObject *__pyx_codeobj__25; - PyObject *__pyx_codeobj__26; - PyObject *__pyx_codeobj__27; - PyObject *__pyx_codeobj__29; - PyObject *__pyx_codeobj__31; - PyObject *__pyx_codeobj__33; - PyObject *__pyx_codeobj__35; - PyObject *__pyx_codeobj__36; - PyObject *__pyx_codeobj__38; - PyObject *__pyx_codeobj__39; - PyObject *__pyx_codeobj__40; - PyObject *__pyx_codeobj__42; - PyObject *__pyx_codeobj__44; - PyObject *__pyx_codeobj__46; - PyObject *__pyx_codeobj__47; - PyObject *__pyx_codeobj__48; - PyObject *__pyx_codeobj__50; - PyObject *__pyx_codeobj__51; - PyObject *__pyx_codeobj__53; - PyObject *__pyx_codeobj__54; - PyObject *__pyx_codeobj__55; - PyObject *__pyx_codeobj__56; - PyObject *__pyx_codeobj__57; - PyObject *__pyx_codeobj__58; - PyObject *__pyx_codeobj__59; - PyObject *__pyx_codeobj__60; - PyObject *__pyx_codeobj__61; - PyObject *__pyx_codeobj__62; - PyObject *__pyx_codeobj__63; - PyObject *__pyx_codeobj__64; - PyObject *__pyx_codeobj__65; - PyObject *__pyx_codeobj__66; - PyObject *__pyx_codeobj__68; - PyObject *__pyx_codeobj__69; -} __pyx_mstate; - -#if CYTHON_USE_MODULE_STATE -#ifdef __cplusplus -namespace { - extern struct PyModuleDef __pyx_moduledef; -} /* anonymous namespace */ -#else -static struct PyModuleDef __pyx_moduledef; -#endif - -#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) - -#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) - -#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) -#else -static __pyx_mstate __pyx_mstate_global_static = -#ifdef __cplusplus - {}; -#else - {0}; -#endif -static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; -#endif -/* #### Code section: module_state_clear ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_clear(PyObject *m) { - __pyx_mstate *clear_module_state = __pyx_mstate(m); - if (!clear_module_state) return 0; - Py_CLEAR(clear_module_state->__pyx_d); - Py_CLEAR(clear_module_state->__pyx_b); - Py_CLEAR(clear_module_state->__pyx_cython_runtime); - Py_CLEAR(clear_module_state->__pyx_empty_tuple); - Py_CLEAR(clear_module_state->__pyx_empty_bytes); - Py_CLEAR(clear_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_CLEAR(clear_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); - #endif - Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet); - Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet); - Py_CLEAR(clear_module_state->__pyx_n_s_AbstractSet); - Py_CLEAR(clear_module_state->__pyx_n_s_Any); - Py_CLEAR(clear_module_state->__pyx_n_s_Dict); - Py_CLEAR(clear_module_state->__pyx_n_s_Hashable); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet___copy); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet___reduce_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet___setstate_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_add); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_clear); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_copy); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_difference); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_difference_update); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_discard); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_intersection); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_intersection_update); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_issubset); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_issuperset); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_pop); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_remove); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_symmetric_difference); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_symmetric_difference_2); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_union); - Py_CLEAR(clear_module_state->__pyx_n_s_IdentitySet_update); - Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2); - Py_CLEAR(clear_module_state->__pyx_n_s_IndexError); - Py_CLEAR(clear_module_state->__pyx_n_s_Iterable); - Py_CLEAR(clear_module_state->__pyx_kp_s_Iterable_Any); - Py_CLEAR(clear_module_state->__pyx_kp_s_Iterable_Hashable); - Py_CLEAR(clear_module_state->__pyx_kp_s_Iterable__S); - Py_CLEAR(clear_module_state->__pyx_kp_s_Iterable__T); - Py_CLEAR(clear_module_state->__pyx_n_s_Iterator); - Py_CLEAR(clear_module_state->__pyx_n_s_KeyError); - Py_CLEAR(clear_module_state->__pyx_n_s_List); - Py_CLEAR(clear_module_state->__pyx_n_s_NoReturn); - Py_CLEAR(clear_module_state->__pyx_n_s_None); - Py_CLEAR(clear_module_state->__pyx_n_s_NotImplemented); - Py_CLEAR(clear_module_state->__pyx_n_s_Optional); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet); - Py_CLEAR(clear_module_state->__pyx_kp_s_OrderedSet_Union__T__S); - Py_CLEAR(clear_module_state->__pyx_kp_s_OrderedSet__T); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet___class_getitem); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet___reduce_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet___setstate_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_add); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_clear); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_copy); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_difference); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_difference_update); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_discard); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_insert); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_intersection); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_intersection_update); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_pop); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_remove); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_symmetric_difference); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_symmetric_difference_2); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_union); - Py_CLEAR(clear_module_state->__pyx_n_s_OrderedSet_update); - Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); - Py_CLEAR(clear_module_state->__pyx_n_s_S); - Py_CLEAR(clear_module_state->__pyx_n_u_S); - Py_CLEAR(clear_module_state->__pyx_n_s_Self); - Py_CLEAR(clear_module_state->__pyx_n_s_Set); - Py_CLEAR(clear_module_state->__pyx_n_s_T); - Py_CLEAR(clear_module_state->__pyx_n_u_T); - Py_CLEAR(clear_module_state->__pyx_n_s_Tuple); - Py_CLEAR(clear_module_state->__pyx_n_s_TypeError); - Py_CLEAR(clear_module_state->__pyx_n_s_TypeVar); - Py_CLEAR(clear_module_state->__pyx_n_s_Union); - Py_CLEAR(clear_module_state->__pyx_kp_u__2); - Py_CLEAR(clear_module_state->__pyx_kp_u__3); - Py_CLEAR(clear_module_state->__pyx_kp_u__6); - Py_CLEAR(clear_module_state->__pyx_n_s__8); - Py_CLEAR(clear_module_state->__pyx_n_s_a); - Py_CLEAR(clear_module_state->__pyx_n_s_add); - Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); - Py_CLEAR(clear_module_state->__pyx_n_s_bool); - Py_CLEAR(clear_module_state->__pyx_n_s_class); - Py_CLEAR(clear_module_state->__pyx_n_s_class_getitem); - Py_CLEAR(clear_module_state->__pyx_n_s_clear); - Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); - Py_CLEAR(clear_module_state->__pyx_n_s_cls); - Py_CLEAR(clear_module_state->__pyx_n_s_copy); - Py_CLEAR(clear_module_state->__pyx_n_s_copy_2); - Py_CLEAR(clear_module_state->__pyx_kp_s_cython_Py_ssize_t); - Py_CLEAR(clear_module_state->__pyx_n_s_d); - Py_CLEAR(clear_module_state->__pyx_n_s_dict); - Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); - Py_CLEAR(clear_module_state->__pyx_n_s_difference); - Py_CLEAR(clear_module_state->__pyx_n_s_difference_update); - Py_CLEAR(clear_module_state->__pyx_kp_u_disable); - Py_CLEAR(clear_module_state->__pyx_n_s_discard); - Py_CLEAR(clear_module_state->__pyx_n_s_element); - Py_CLEAR(clear_module_state->__pyx_kp_u_enable); - Py_CLEAR(clear_module_state->__pyx_kp_u_gc); - Py_CLEAR(clear_module_state->__pyx_n_s_getstate); - Py_CLEAR(clear_module_state->__pyx_n_s_import); - Py_CLEAR(clear_module_state->__pyx_n_s_init); - Py_CLEAR(clear_module_state->__pyx_n_s_insert); - Py_CLEAR(clear_module_state->__pyx_n_s_intersection); - Py_CLEAR(clear_module_state->__pyx_n_s_intersection_update); - Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); - Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); - Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); - Py_CLEAR(clear_module_state->__pyx_n_s_issubset); - Py_CLEAR(clear_module_state->__pyx_n_s_issuperset); - Py_CLEAR(clear_module_state->__pyx_n_s_items); - Py_CLEAR(clear_module_state->__pyx_n_s_iterable); - Py_CLEAR(clear_module_state->__pyx_n_s_iterables); - Py_CLEAR(clear_module_state->__pyx_n_s_key); - Py_CLEAR(clear_module_state->__pyx_n_s_keys); - Py_CLEAR(clear_module_state->__pyx_n_u_len); - Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_util__collections); - Py_CLEAR(clear_module_state->__pyx_n_u_list); - Py_CLEAR(clear_module_state->__pyx_n_s_main); - Py_CLEAR(clear_module_state->__pyx_n_u_members); - Py_CLEAR(clear_module_state->__pyx_n_s_name); - Py_CLEAR(clear_module_state->__pyx_n_s_new); - Py_CLEAR(clear_module_state->__pyx_n_s_other); - Py_CLEAR(clear_module_state->__pyx_n_s_other_set); - Py_CLEAR(clear_module_state->__pyx_n_s_pair); - Py_CLEAR(clear_module_state->__pyx_n_s_pickle); - Py_CLEAR(clear_module_state->__pyx_n_s_pop); - Py_CLEAR(clear_module_state->__pyx_kp_u_pop_from_an_empty_set); - Py_CLEAR(clear_module_state->__pyx_n_s_popitem); - Py_CLEAR(clear_module_state->__pyx_n_s_pos); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_result); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_state); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_type); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_IdentitySet); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_OrderedSet); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_vtable); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce_ex); - Py_CLEAR(clear_module_state->__pyx_n_s_remove); - Py_CLEAR(clear_module_state->__pyx_n_s_repr); - Py_CLEAR(clear_module_state->__pyx_n_s_result); - Py_CLEAR(clear_module_state->__pyx_n_s_return); - Py_CLEAR(clear_module_state->__pyx_n_s_self); - Py_CLEAR(clear_module_state->__pyx_n_s_seq); - Py_CLEAR(clear_module_state->__pyx_kp_u_set_objects_are_unhashable); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_slots); - Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_util__collections_cy); - Py_CLEAR(clear_module_state->__pyx_n_s_state); - Py_CLEAR(clear_module_state->__pyx_n_s_str); - Py_CLEAR(clear_module_state->__pyx_kp_s_stringsource); - Py_CLEAR(clear_module_state->__pyx_n_s_symmetric_difference); - Py_CLEAR(clear_module_state->__pyx_n_s_symmetric_difference_update); - Py_CLEAR(clear_module_state->__pyx_n_s_test); - Py_CLEAR(clear_module_state->__pyx_kp_s_type_Self); - Py_CLEAR(clear_module_state->__pyx_n_s_typing); - Py_CLEAR(clear_module_state->__pyx_n_s_union); - Py_CLEAR(clear_module_state->__pyx_n_s_unique_list); - Py_CLEAR(clear_module_state->__pyx_n_s_update); - Py_CLEAR(clear_module_state->__pyx_n_s_use_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_value); - Py_CLEAR(clear_module_state->__pyx_n_s_values); - Py_CLEAR(clear_module_state->__pyx_int_61630440); - Py_CLEAR(clear_module_state->__pyx_int_75814257); - Py_CLEAR(clear_module_state->__pyx_int_143295406); - Py_CLEAR(clear_module_state->__pyx_int_183888701); - Py_CLEAR(clear_module_state->__pyx_int_197243545); - Py_CLEAR(clear_module_state->__pyx_int_242532825); - Py_CLEAR(clear_module_state->__pyx_tuple_); - Py_CLEAR(clear_module_state->__pyx_tuple__4); - Py_CLEAR(clear_module_state->__pyx_tuple__5); - Py_CLEAR(clear_module_state->__pyx_tuple__7); - Py_CLEAR(clear_module_state->__pyx_tuple__10); - Py_CLEAR(clear_module_state->__pyx_tuple__11); - Py_CLEAR(clear_module_state->__pyx_tuple__12); - Py_CLEAR(clear_module_state->__pyx_tuple__14); - Py_CLEAR(clear_module_state->__pyx_tuple__15); - Py_CLEAR(clear_module_state->__pyx_tuple__17); - Py_CLEAR(clear_module_state->__pyx_tuple__19); - Py_CLEAR(clear_module_state->__pyx_tuple__22); - Py_CLEAR(clear_module_state->__pyx_tuple__24); - Py_CLEAR(clear_module_state->__pyx_tuple__28); - Py_CLEAR(clear_module_state->__pyx_tuple__30); - Py_CLEAR(clear_module_state->__pyx_tuple__32); - Py_CLEAR(clear_module_state->__pyx_tuple__34); - Py_CLEAR(clear_module_state->__pyx_tuple__37); - Py_CLEAR(clear_module_state->__pyx_tuple__41); - Py_CLEAR(clear_module_state->__pyx_tuple__43); - Py_CLEAR(clear_module_state->__pyx_tuple__45); - Py_CLEAR(clear_module_state->__pyx_tuple__49); - Py_CLEAR(clear_module_state->__pyx_tuple__52); - Py_CLEAR(clear_module_state->__pyx_tuple__67); - Py_CLEAR(clear_module_state->__pyx_codeobj__9); - Py_CLEAR(clear_module_state->__pyx_codeobj__13); - Py_CLEAR(clear_module_state->__pyx_codeobj__16); - Py_CLEAR(clear_module_state->__pyx_codeobj__18); - Py_CLEAR(clear_module_state->__pyx_codeobj__20); - Py_CLEAR(clear_module_state->__pyx_codeobj__21); - Py_CLEAR(clear_module_state->__pyx_codeobj__23); - Py_CLEAR(clear_module_state->__pyx_codeobj__25); - Py_CLEAR(clear_module_state->__pyx_codeobj__26); - Py_CLEAR(clear_module_state->__pyx_codeobj__27); - Py_CLEAR(clear_module_state->__pyx_codeobj__29); - Py_CLEAR(clear_module_state->__pyx_codeobj__31); - Py_CLEAR(clear_module_state->__pyx_codeobj__33); - Py_CLEAR(clear_module_state->__pyx_codeobj__35); - Py_CLEAR(clear_module_state->__pyx_codeobj__36); - Py_CLEAR(clear_module_state->__pyx_codeobj__38); - Py_CLEAR(clear_module_state->__pyx_codeobj__39); - Py_CLEAR(clear_module_state->__pyx_codeobj__40); - Py_CLEAR(clear_module_state->__pyx_codeobj__42); - Py_CLEAR(clear_module_state->__pyx_codeobj__44); - Py_CLEAR(clear_module_state->__pyx_codeobj__46); - Py_CLEAR(clear_module_state->__pyx_codeobj__47); - Py_CLEAR(clear_module_state->__pyx_codeobj__48); - Py_CLEAR(clear_module_state->__pyx_codeobj__50); - Py_CLEAR(clear_module_state->__pyx_codeobj__51); - Py_CLEAR(clear_module_state->__pyx_codeobj__53); - Py_CLEAR(clear_module_state->__pyx_codeobj__54); - Py_CLEAR(clear_module_state->__pyx_codeobj__55); - Py_CLEAR(clear_module_state->__pyx_codeobj__56); - Py_CLEAR(clear_module_state->__pyx_codeobj__57); - Py_CLEAR(clear_module_state->__pyx_codeobj__58); - Py_CLEAR(clear_module_state->__pyx_codeobj__59); - Py_CLEAR(clear_module_state->__pyx_codeobj__60); - Py_CLEAR(clear_module_state->__pyx_codeobj__61); - Py_CLEAR(clear_module_state->__pyx_codeobj__62); - Py_CLEAR(clear_module_state->__pyx_codeobj__63); - Py_CLEAR(clear_module_state->__pyx_codeobj__64); - Py_CLEAR(clear_module_state->__pyx_codeobj__65); - Py_CLEAR(clear_module_state->__pyx_codeobj__66); - Py_CLEAR(clear_module_state->__pyx_codeobj__68); - Py_CLEAR(clear_module_state->__pyx_codeobj__69); - return 0; -} -#endif -/* #### Code section: module_state_traverse ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { - __pyx_mstate *traverse_module_state = __pyx_mstate(m); - if (!traverse_module_state) return 0; - Py_VISIT(traverse_module_state->__pyx_d); - Py_VISIT(traverse_module_state->__pyx_b); - Py_VISIT(traverse_module_state->__pyx_cython_runtime); - Py_VISIT(traverse_module_state->__pyx_empty_tuple); - Py_VISIT(traverse_module_state->__pyx_empty_bytes); - Py_VISIT(traverse_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_VISIT(traverse_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); - #endif - Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet); - Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet); - Py_VISIT(traverse_module_state->__pyx_n_s_AbstractSet); - Py_VISIT(traverse_module_state->__pyx_n_s_Any); - Py_VISIT(traverse_module_state->__pyx_n_s_Dict); - Py_VISIT(traverse_module_state->__pyx_n_s_Hashable); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet___copy); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet___reduce_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet___setstate_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_add); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_clear); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_copy); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_difference); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_difference_update); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_discard); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_intersection); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_intersection_update); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_issubset); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_issuperset); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_pop); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_remove); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_symmetric_difference); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_symmetric_difference_2); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_union); - Py_VISIT(traverse_module_state->__pyx_n_s_IdentitySet_update); - Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2); - Py_VISIT(traverse_module_state->__pyx_n_s_IndexError); - Py_VISIT(traverse_module_state->__pyx_n_s_Iterable); - Py_VISIT(traverse_module_state->__pyx_kp_s_Iterable_Any); - Py_VISIT(traverse_module_state->__pyx_kp_s_Iterable_Hashable); - Py_VISIT(traverse_module_state->__pyx_kp_s_Iterable__S); - Py_VISIT(traverse_module_state->__pyx_kp_s_Iterable__T); - Py_VISIT(traverse_module_state->__pyx_n_s_Iterator); - Py_VISIT(traverse_module_state->__pyx_n_s_KeyError); - Py_VISIT(traverse_module_state->__pyx_n_s_List); - Py_VISIT(traverse_module_state->__pyx_n_s_NoReturn); - Py_VISIT(traverse_module_state->__pyx_n_s_None); - Py_VISIT(traverse_module_state->__pyx_n_s_NotImplemented); - Py_VISIT(traverse_module_state->__pyx_n_s_Optional); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet); - Py_VISIT(traverse_module_state->__pyx_kp_s_OrderedSet_Union__T__S); - Py_VISIT(traverse_module_state->__pyx_kp_s_OrderedSet__T); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet___class_getitem); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet___reduce_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet___setstate_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_add); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_clear); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_copy); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_difference); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_difference_update); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_discard); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_insert); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_intersection); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_intersection_update); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_pop); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_remove); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_symmetric_difference); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_symmetric_difference_2); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_union); - Py_VISIT(traverse_module_state->__pyx_n_s_OrderedSet_update); - Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); - Py_VISIT(traverse_module_state->__pyx_n_s_S); - Py_VISIT(traverse_module_state->__pyx_n_u_S); - Py_VISIT(traverse_module_state->__pyx_n_s_Self); - Py_VISIT(traverse_module_state->__pyx_n_s_Set); - Py_VISIT(traverse_module_state->__pyx_n_s_T); - Py_VISIT(traverse_module_state->__pyx_n_u_T); - Py_VISIT(traverse_module_state->__pyx_n_s_Tuple); - Py_VISIT(traverse_module_state->__pyx_n_s_TypeError); - Py_VISIT(traverse_module_state->__pyx_n_s_TypeVar); - Py_VISIT(traverse_module_state->__pyx_n_s_Union); - Py_VISIT(traverse_module_state->__pyx_kp_u__2); - Py_VISIT(traverse_module_state->__pyx_kp_u__3); - Py_VISIT(traverse_module_state->__pyx_kp_u__6); - Py_VISIT(traverse_module_state->__pyx_n_s__8); - Py_VISIT(traverse_module_state->__pyx_n_s_a); - Py_VISIT(traverse_module_state->__pyx_n_s_add); - Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); - Py_VISIT(traverse_module_state->__pyx_n_s_bool); - Py_VISIT(traverse_module_state->__pyx_n_s_class); - Py_VISIT(traverse_module_state->__pyx_n_s_class_getitem); - Py_VISIT(traverse_module_state->__pyx_n_s_clear); - Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); - Py_VISIT(traverse_module_state->__pyx_n_s_cls); - Py_VISIT(traverse_module_state->__pyx_n_s_copy); - Py_VISIT(traverse_module_state->__pyx_n_s_copy_2); - Py_VISIT(traverse_module_state->__pyx_kp_s_cython_Py_ssize_t); - Py_VISIT(traverse_module_state->__pyx_n_s_d); - Py_VISIT(traverse_module_state->__pyx_n_s_dict); - Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); - Py_VISIT(traverse_module_state->__pyx_n_s_difference); - Py_VISIT(traverse_module_state->__pyx_n_s_difference_update); - Py_VISIT(traverse_module_state->__pyx_kp_u_disable); - Py_VISIT(traverse_module_state->__pyx_n_s_discard); - Py_VISIT(traverse_module_state->__pyx_n_s_element); - Py_VISIT(traverse_module_state->__pyx_kp_u_enable); - Py_VISIT(traverse_module_state->__pyx_kp_u_gc); - Py_VISIT(traverse_module_state->__pyx_n_s_getstate); - Py_VISIT(traverse_module_state->__pyx_n_s_import); - Py_VISIT(traverse_module_state->__pyx_n_s_init); - Py_VISIT(traverse_module_state->__pyx_n_s_insert); - Py_VISIT(traverse_module_state->__pyx_n_s_intersection); - Py_VISIT(traverse_module_state->__pyx_n_s_intersection_update); - Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); - Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); - Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); - Py_VISIT(traverse_module_state->__pyx_n_s_issubset); - Py_VISIT(traverse_module_state->__pyx_n_s_issuperset); - Py_VISIT(traverse_module_state->__pyx_n_s_items); - Py_VISIT(traverse_module_state->__pyx_n_s_iterable); - Py_VISIT(traverse_module_state->__pyx_n_s_iterables); - Py_VISIT(traverse_module_state->__pyx_n_s_key); - Py_VISIT(traverse_module_state->__pyx_n_s_keys); - Py_VISIT(traverse_module_state->__pyx_n_u_len); - Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_util__collections); - Py_VISIT(traverse_module_state->__pyx_n_u_list); - Py_VISIT(traverse_module_state->__pyx_n_s_main); - Py_VISIT(traverse_module_state->__pyx_n_u_members); - Py_VISIT(traverse_module_state->__pyx_n_s_name); - Py_VISIT(traverse_module_state->__pyx_n_s_new); - Py_VISIT(traverse_module_state->__pyx_n_s_other); - Py_VISIT(traverse_module_state->__pyx_n_s_other_set); - Py_VISIT(traverse_module_state->__pyx_n_s_pair); - Py_VISIT(traverse_module_state->__pyx_n_s_pickle); - Py_VISIT(traverse_module_state->__pyx_n_s_pop); - Py_VISIT(traverse_module_state->__pyx_kp_u_pop_from_an_empty_set); - Py_VISIT(traverse_module_state->__pyx_n_s_popitem); - Py_VISIT(traverse_module_state->__pyx_n_s_pos); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_result); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_state); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_type); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_IdentitySet); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_OrderedSet); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_vtable); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce_ex); - Py_VISIT(traverse_module_state->__pyx_n_s_remove); - Py_VISIT(traverse_module_state->__pyx_n_s_repr); - Py_VISIT(traverse_module_state->__pyx_n_s_result); - Py_VISIT(traverse_module_state->__pyx_n_s_return); - Py_VISIT(traverse_module_state->__pyx_n_s_self); - Py_VISIT(traverse_module_state->__pyx_n_s_seq); - Py_VISIT(traverse_module_state->__pyx_kp_u_set_objects_are_unhashable); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_slots); - Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_util__collections_cy); - Py_VISIT(traverse_module_state->__pyx_n_s_state); - Py_VISIT(traverse_module_state->__pyx_n_s_str); - Py_VISIT(traverse_module_state->__pyx_kp_s_stringsource); - Py_VISIT(traverse_module_state->__pyx_n_s_symmetric_difference); - Py_VISIT(traverse_module_state->__pyx_n_s_symmetric_difference_update); - Py_VISIT(traverse_module_state->__pyx_n_s_test); - Py_VISIT(traverse_module_state->__pyx_kp_s_type_Self); - Py_VISIT(traverse_module_state->__pyx_n_s_typing); - Py_VISIT(traverse_module_state->__pyx_n_s_union); - Py_VISIT(traverse_module_state->__pyx_n_s_unique_list); - Py_VISIT(traverse_module_state->__pyx_n_s_update); - Py_VISIT(traverse_module_state->__pyx_n_s_use_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_value); - Py_VISIT(traverse_module_state->__pyx_n_s_values); - Py_VISIT(traverse_module_state->__pyx_int_61630440); - Py_VISIT(traverse_module_state->__pyx_int_75814257); - Py_VISIT(traverse_module_state->__pyx_int_143295406); - Py_VISIT(traverse_module_state->__pyx_int_183888701); - Py_VISIT(traverse_module_state->__pyx_int_197243545); - Py_VISIT(traverse_module_state->__pyx_int_242532825); - Py_VISIT(traverse_module_state->__pyx_tuple_); - Py_VISIT(traverse_module_state->__pyx_tuple__4); - Py_VISIT(traverse_module_state->__pyx_tuple__5); - Py_VISIT(traverse_module_state->__pyx_tuple__7); - Py_VISIT(traverse_module_state->__pyx_tuple__10); - Py_VISIT(traverse_module_state->__pyx_tuple__11); - Py_VISIT(traverse_module_state->__pyx_tuple__12); - Py_VISIT(traverse_module_state->__pyx_tuple__14); - Py_VISIT(traverse_module_state->__pyx_tuple__15); - Py_VISIT(traverse_module_state->__pyx_tuple__17); - Py_VISIT(traverse_module_state->__pyx_tuple__19); - Py_VISIT(traverse_module_state->__pyx_tuple__22); - Py_VISIT(traverse_module_state->__pyx_tuple__24); - Py_VISIT(traverse_module_state->__pyx_tuple__28); - Py_VISIT(traverse_module_state->__pyx_tuple__30); - Py_VISIT(traverse_module_state->__pyx_tuple__32); - Py_VISIT(traverse_module_state->__pyx_tuple__34); - Py_VISIT(traverse_module_state->__pyx_tuple__37); - Py_VISIT(traverse_module_state->__pyx_tuple__41); - Py_VISIT(traverse_module_state->__pyx_tuple__43); - Py_VISIT(traverse_module_state->__pyx_tuple__45); - Py_VISIT(traverse_module_state->__pyx_tuple__49); - Py_VISIT(traverse_module_state->__pyx_tuple__52); - Py_VISIT(traverse_module_state->__pyx_tuple__67); - Py_VISIT(traverse_module_state->__pyx_codeobj__9); - Py_VISIT(traverse_module_state->__pyx_codeobj__13); - Py_VISIT(traverse_module_state->__pyx_codeobj__16); - Py_VISIT(traverse_module_state->__pyx_codeobj__18); - Py_VISIT(traverse_module_state->__pyx_codeobj__20); - Py_VISIT(traverse_module_state->__pyx_codeobj__21); - Py_VISIT(traverse_module_state->__pyx_codeobj__23); - Py_VISIT(traverse_module_state->__pyx_codeobj__25); - Py_VISIT(traverse_module_state->__pyx_codeobj__26); - Py_VISIT(traverse_module_state->__pyx_codeobj__27); - Py_VISIT(traverse_module_state->__pyx_codeobj__29); - Py_VISIT(traverse_module_state->__pyx_codeobj__31); - Py_VISIT(traverse_module_state->__pyx_codeobj__33); - Py_VISIT(traverse_module_state->__pyx_codeobj__35); - Py_VISIT(traverse_module_state->__pyx_codeobj__36); - Py_VISIT(traverse_module_state->__pyx_codeobj__38); - Py_VISIT(traverse_module_state->__pyx_codeobj__39); - Py_VISIT(traverse_module_state->__pyx_codeobj__40); - Py_VISIT(traverse_module_state->__pyx_codeobj__42); - Py_VISIT(traverse_module_state->__pyx_codeobj__44); - Py_VISIT(traverse_module_state->__pyx_codeobj__46); - Py_VISIT(traverse_module_state->__pyx_codeobj__47); - Py_VISIT(traverse_module_state->__pyx_codeobj__48); - Py_VISIT(traverse_module_state->__pyx_codeobj__50); - Py_VISIT(traverse_module_state->__pyx_codeobj__51); - Py_VISIT(traverse_module_state->__pyx_codeobj__53); - Py_VISIT(traverse_module_state->__pyx_codeobj__54); - Py_VISIT(traverse_module_state->__pyx_codeobj__55); - Py_VISIT(traverse_module_state->__pyx_codeobj__56); - Py_VISIT(traverse_module_state->__pyx_codeobj__57); - Py_VISIT(traverse_module_state->__pyx_codeobj__58); - Py_VISIT(traverse_module_state->__pyx_codeobj__59); - Py_VISIT(traverse_module_state->__pyx_codeobj__60); - Py_VISIT(traverse_module_state->__pyx_codeobj__61); - Py_VISIT(traverse_module_state->__pyx_codeobj__62); - Py_VISIT(traverse_module_state->__pyx_codeobj__63); - Py_VISIT(traverse_module_state->__pyx_codeobj__64); - Py_VISIT(traverse_module_state->__pyx_codeobj__65); - Py_VISIT(traverse_module_state->__pyx_codeobj__66); - Py_VISIT(traverse_module_state->__pyx_codeobj__68); - Py_VISIT(traverse_module_state->__pyx_codeobj__69); - return 0; -} -#endif -/* #### Code section: module_state_defines ### */ -#define __pyx_d __pyx_mstate_global->__pyx_d -#define __pyx_b __pyx_mstate_global->__pyx_b -#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime -#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple -#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes -#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode -#ifdef __Pyx_CyFunction_USED -#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType -#endif -#ifdef __Pyx_FusedFunction_USED -#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType -#endif -#ifdef __Pyx_Generator_USED -#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType -#endif -#ifdef __Pyx_IterableCoroutine_USED -#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#define __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet __pyx_mstate_global->__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet -#define __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet __pyx_mstate_global->__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet -#endif -#define __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet __pyx_mstate_global->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet -#define __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet __pyx_mstate_global->__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet -#define __pyx_n_s_AbstractSet __pyx_mstate_global->__pyx_n_s_AbstractSet -#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any -#define __pyx_n_s_Dict __pyx_mstate_global->__pyx_n_s_Dict -#define __pyx_n_s_Hashable __pyx_mstate_global->__pyx_n_s_Hashable -#define __pyx_n_s_IdentitySet __pyx_mstate_global->__pyx_n_s_IdentitySet -#define __pyx_n_s_IdentitySet___copy __pyx_mstate_global->__pyx_n_s_IdentitySet___copy -#define __pyx_n_s_IdentitySet___reduce_cython __pyx_mstate_global->__pyx_n_s_IdentitySet___reduce_cython -#define __pyx_n_s_IdentitySet___setstate_cython __pyx_mstate_global->__pyx_n_s_IdentitySet___setstate_cython -#define __pyx_n_s_IdentitySet_add __pyx_mstate_global->__pyx_n_s_IdentitySet_add -#define __pyx_n_s_IdentitySet_clear __pyx_mstate_global->__pyx_n_s_IdentitySet_clear -#define __pyx_n_s_IdentitySet_copy __pyx_mstate_global->__pyx_n_s_IdentitySet_copy -#define __pyx_n_s_IdentitySet_difference __pyx_mstate_global->__pyx_n_s_IdentitySet_difference -#define __pyx_n_s_IdentitySet_difference_update __pyx_mstate_global->__pyx_n_s_IdentitySet_difference_update -#define __pyx_n_s_IdentitySet_discard __pyx_mstate_global->__pyx_n_s_IdentitySet_discard -#define __pyx_n_s_IdentitySet_intersection __pyx_mstate_global->__pyx_n_s_IdentitySet_intersection -#define __pyx_n_s_IdentitySet_intersection_update __pyx_mstate_global->__pyx_n_s_IdentitySet_intersection_update -#define __pyx_n_s_IdentitySet_issubset __pyx_mstate_global->__pyx_n_s_IdentitySet_issubset -#define __pyx_n_s_IdentitySet_issuperset __pyx_mstate_global->__pyx_n_s_IdentitySet_issuperset -#define __pyx_n_s_IdentitySet_pop __pyx_mstate_global->__pyx_n_s_IdentitySet_pop -#define __pyx_n_s_IdentitySet_remove __pyx_mstate_global->__pyx_n_s_IdentitySet_remove -#define __pyx_n_s_IdentitySet_symmetric_difference __pyx_mstate_global->__pyx_n_s_IdentitySet_symmetric_difference -#define __pyx_n_s_IdentitySet_symmetric_difference_2 __pyx_mstate_global->__pyx_n_s_IdentitySet_symmetric_difference_2 -#define __pyx_n_s_IdentitySet_union __pyx_mstate_global->__pyx_n_s_IdentitySet_union -#define __pyx_n_s_IdentitySet_update __pyx_mstate_global->__pyx_n_s_IdentitySet_update -#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 -#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2 -#define __pyx_n_s_IndexError __pyx_mstate_global->__pyx_n_s_IndexError -#define __pyx_n_s_Iterable __pyx_mstate_global->__pyx_n_s_Iterable -#define __pyx_kp_s_Iterable_Any __pyx_mstate_global->__pyx_kp_s_Iterable_Any -#define __pyx_kp_s_Iterable_Hashable __pyx_mstate_global->__pyx_kp_s_Iterable_Hashable -#define __pyx_kp_s_Iterable__S __pyx_mstate_global->__pyx_kp_s_Iterable__S -#define __pyx_kp_s_Iterable__T __pyx_mstate_global->__pyx_kp_s_Iterable__T -#define __pyx_n_s_Iterator __pyx_mstate_global->__pyx_n_s_Iterator -#define __pyx_n_s_KeyError __pyx_mstate_global->__pyx_n_s_KeyError -#define __pyx_n_s_List __pyx_mstate_global->__pyx_n_s_List -#define __pyx_n_s_NoReturn __pyx_mstate_global->__pyx_n_s_NoReturn -#define __pyx_n_s_None __pyx_mstate_global->__pyx_n_s_None -#define __pyx_n_s_NotImplemented __pyx_mstate_global->__pyx_n_s_NotImplemented -#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional -#define __pyx_n_s_OrderedSet __pyx_mstate_global->__pyx_n_s_OrderedSet -#define __pyx_kp_s_OrderedSet_Union__T__S __pyx_mstate_global->__pyx_kp_s_OrderedSet_Union__T__S -#define __pyx_kp_s_OrderedSet__T __pyx_mstate_global->__pyx_kp_s_OrderedSet__T -#define __pyx_n_s_OrderedSet___class_getitem __pyx_mstate_global->__pyx_n_s_OrderedSet___class_getitem -#define __pyx_n_s_OrderedSet___reduce_cython __pyx_mstate_global->__pyx_n_s_OrderedSet___reduce_cython -#define __pyx_n_s_OrderedSet___setstate_cython __pyx_mstate_global->__pyx_n_s_OrderedSet___setstate_cython -#define __pyx_n_s_OrderedSet_add __pyx_mstate_global->__pyx_n_s_OrderedSet_add -#define __pyx_n_s_OrderedSet_clear __pyx_mstate_global->__pyx_n_s_OrderedSet_clear -#define __pyx_n_s_OrderedSet_copy __pyx_mstate_global->__pyx_n_s_OrderedSet_copy -#define __pyx_n_s_OrderedSet_difference __pyx_mstate_global->__pyx_n_s_OrderedSet_difference -#define __pyx_n_s_OrderedSet_difference_update __pyx_mstate_global->__pyx_n_s_OrderedSet_difference_update -#define __pyx_n_s_OrderedSet_discard __pyx_mstate_global->__pyx_n_s_OrderedSet_discard -#define __pyx_n_s_OrderedSet_insert __pyx_mstate_global->__pyx_n_s_OrderedSet_insert -#define __pyx_n_s_OrderedSet_intersection __pyx_mstate_global->__pyx_n_s_OrderedSet_intersection -#define __pyx_n_s_OrderedSet_intersection_update __pyx_mstate_global->__pyx_n_s_OrderedSet_intersection_update -#define __pyx_n_s_OrderedSet_pop __pyx_mstate_global->__pyx_n_s_OrderedSet_pop -#define __pyx_n_s_OrderedSet_remove __pyx_mstate_global->__pyx_n_s_OrderedSet_remove -#define __pyx_n_s_OrderedSet_symmetric_difference __pyx_mstate_global->__pyx_n_s_OrderedSet_symmetric_difference -#define __pyx_n_s_OrderedSet_symmetric_difference_2 __pyx_mstate_global->__pyx_n_s_OrderedSet_symmetric_difference_2 -#define __pyx_n_s_OrderedSet_union __pyx_mstate_global->__pyx_n_s_OrderedSet_union -#define __pyx_n_s_OrderedSet_update __pyx_mstate_global->__pyx_n_s_OrderedSet_update -#define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError -#define __pyx_n_s_S __pyx_mstate_global->__pyx_n_s_S -#define __pyx_n_u_S __pyx_mstate_global->__pyx_n_u_S -#define __pyx_n_s_Self __pyx_mstate_global->__pyx_n_s_Self -#define __pyx_n_s_Set __pyx_mstate_global->__pyx_n_s_Set -#define __pyx_n_s_T __pyx_mstate_global->__pyx_n_s_T -#define __pyx_n_u_T __pyx_mstate_global->__pyx_n_u_T -#define __pyx_n_s_Tuple __pyx_mstate_global->__pyx_n_s_Tuple -#define __pyx_n_s_TypeError __pyx_mstate_global->__pyx_n_s_TypeError -#define __pyx_n_s_TypeVar __pyx_mstate_global->__pyx_n_s_TypeVar -#define __pyx_n_s_Union __pyx_mstate_global->__pyx_n_s_Union -#define __pyx_kp_u__2 __pyx_mstate_global->__pyx_kp_u__2 -#define __pyx_kp_u__3 __pyx_mstate_global->__pyx_kp_u__3 -#define __pyx_kp_u__6 __pyx_mstate_global->__pyx_kp_u__6 -#define __pyx_n_s__8 __pyx_mstate_global->__pyx_n_s__8 -#define __pyx_n_s_a __pyx_mstate_global->__pyx_n_s_a -#define __pyx_n_s_add __pyx_mstate_global->__pyx_n_s_add -#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines -#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool -#define __pyx_n_s_class __pyx_mstate_global->__pyx_n_s_class -#define __pyx_n_s_class_getitem __pyx_mstate_global->__pyx_n_s_class_getitem -#define __pyx_n_s_clear __pyx_mstate_global->__pyx_n_s_clear -#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback -#define __pyx_n_s_cls __pyx_mstate_global->__pyx_n_s_cls -#define __pyx_n_s_copy __pyx_mstate_global->__pyx_n_s_copy -#define __pyx_n_s_copy_2 __pyx_mstate_global->__pyx_n_s_copy_2 -#define __pyx_kp_s_cython_Py_ssize_t __pyx_mstate_global->__pyx_kp_s_cython_Py_ssize_t -#define __pyx_n_s_d __pyx_mstate_global->__pyx_n_s_d -#define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict -#define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 -#define __pyx_n_s_difference __pyx_mstate_global->__pyx_n_s_difference -#define __pyx_n_s_difference_update __pyx_mstate_global->__pyx_n_s_difference_update -#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable -#define __pyx_n_s_discard __pyx_mstate_global->__pyx_n_s_discard -#define __pyx_n_s_element __pyx_mstate_global->__pyx_n_s_element -#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable -#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc -#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate -#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import -#define __pyx_n_s_init __pyx_mstate_global->__pyx_n_s_init -#define __pyx_n_s_insert __pyx_mstate_global->__pyx_n_s_insert -#define __pyx_n_s_intersection __pyx_mstate_global->__pyx_n_s_intersection -#define __pyx_n_s_intersection_update __pyx_mstate_global->__pyx_n_s_intersection_update -#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled -#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine -#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled -#define __pyx_n_s_issubset __pyx_mstate_global->__pyx_n_s_issubset -#define __pyx_n_s_issuperset __pyx_mstate_global->__pyx_n_s_issuperset -#define __pyx_n_s_items __pyx_mstate_global->__pyx_n_s_items -#define __pyx_n_s_iterable __pyx_mstate_global->__pyx_n_s_iterable -#define __pyx_n_s_iterables __pyx_mstate_global->__pyx_n_s_iterables -#define __pyx_n_s_key __pyx_mstate_global->__pyx_n_s_key -#define __pyx_n_s_keys __pyx_mstate_global->__pyx_n_s_keys -#define __pyx_n_u_len __pyx_mstate_global->__pyx_n_u_len -#define __pyx_kp_s_lib_sqlalchemy_util__collections __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_util__collections -#define __pyx_n_u_list __pyx_mstate_global->__pyx_n_u_list -#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main -#define __pyx_n_u_members __pyx_mstate_global->__pyx_n_u_members -#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name -#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new -#define __pyx_n_s_other __pyx_mstate_global->__pyx_n_s_other -#define __pyx_n_s_other_set __pyx_mstate_global->__pyx_n_s_other_set -#define __pyx_n_s_pair __pyx_mstate_global->__pyx_n_s_pair -#define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle -#define __pyx_n_s_pop __pyx_mstate_global->__pyx_n_s_pop -#define __pyx_kp_u_pop_from_an_empty_set __pyx_mstate_global->__pyx_kp_u_pop_from_an_empty_set -#define __pyx_n_s_popitem __pyx_mstate_global->__pyx_n_s_popitem -#define __pyx_n_s_pos __pyx_mstate_global->__pyx_n_s_pos -#define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError -#define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum -#define __pyx_n_s_pyx_result __pyx_mstate_global->__pyx_n_s_pyx_result -#define __pyx_n_s_pyx_state __pyx_mstate_global->__pyx_n_s_pyx_state -#define __pyx_n_s_pyx_type __pyx_mstate_global->__pyx_n_s_pyx_type -#define __pyx_n_s_pyx_unpickle_IdentitySet __pyx_mstate_global->__pyx_n_s_pyx_unpickle_IdentitySet -#define __pyx_n_s_pyx_unpickle_OrderedSet __pyx_mstate_global->__pyx_n_s_pyx_unpickle_OrderedSet -#define __pyx_n_s_pyx_vtable __pyx_mstate_global->__pyx_n_s_pyx_vtable -#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce -#define __pyx_n_s_reduce_cython __pyx_mstate_global->__pyx_n_s_reduce_cython -#define __pyx_n_s_reduce_ex __pyx_mstate_global->__pyx_n_s_reduce_ex -#define __pyx_n_s_remove __pyx_mstate_global->__pyx_n_s_remove -#define __pyx_n_s_repr __pyx_mstate_global->__pyx_n_s_repr -#define __pyx_n_s_result __pyx_mstate_global->__pyx_n_s_result -#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return -#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self -#define __pyx_n_s_seq __pyx_mstate_global->__pyx_n_s_seq -#define __pyx_kp_u_set_objects_are_unhashable __pyx_mstate_global->__pyx_kp_u_set_objects_are_unhashable -#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate -#define __pyx_n_s_setstate_cython __pyx_mstate_global->__pyx_n_s_setstate_cython -#define __pyx_n_s_slots __pyx_mstate_global->__pyx_n_s_slots -#define __pyx_n_s_sqlalchemy_util__collections_cy __pyx_mstate_global->__pyx_n_s_sqlalchemy_util__collections_cy -#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state -#define __pyx_n_s_str __pyx_mstate_global->__pyx_n_s_str -#define __pyx_kp_s_stringsource __pyx_mstate_global->__pyx_kp_s_stringsource -#define __pyx_n_s_symmetric_difference __pyx_mstate_global->__pyx_n_s_symmetric_difference -#define __pyx_n_s_symmetric_difference_update __pyx_mstate_global->__pyx_n_s_symmetric_difference_update -#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test -#define __pyx_kp_s_type_Self __pyx_mstate_global->__pyx_kp_s_type_Self -#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing -#define __pyx_n_s_union __pyx_mstate_global->__pyx_n_s_union -#define __pyx_n_s_unique_list __pyx_mstate_global->__pyx_n_s_unique_list -#define __pyx_n_s_update __pyx_mstate_global->__pyx_n_s_update -#define __pyx_n_s_use_setstate __pyx_mstate_global->__pyx_n_s_use_setstate -#define __pyx_n_s_value __pyx_mstate_global->__pyx_n_s_value -#define __pyx_n_s_values __pyx_mstate_global->__pyx_n_s_values -#define __pyx_int_61630440 __pyx_mstate_global->__pyx_int_61630440 -#define __pyx_int_75814257 __pyx_mstate_global->__pyx_int_75814257 -#define __pyx_int_143295406 __pyx_mstate_global->__pyx_int_143295406 -#define __pyx_int_183888701 __pyx_mstate_global->__pyx_int_183888701 -#define __pyx_int_197243545 __pyx_mstate_global->__pyx_int_197243545 -#define __pyx_int_242532825 __pyx_mstate_global->__pyx_int_242532825 -#define __pyx_tuple_ __pyx_mstate_global->__pyx_tuple_ -#define __pyx_tuple__4 __pyx_mstate_global->__pyx_tuple__4 -#define __pyx_tuple__5 __pyx_mstate_global->__pyx_tuple__5 -#define __pyx_tuple__7 __pyx_mstate_global->__pyx_tuple__7 -#define __pyx_tuple__10 __pyx_mstate_global->__pyx_tuple__10 -#define __pyx_tuple__11 __pyx_mstate_global->__pyx_tuple__11 -#define __pyx_tuple__12 __pyx_mstate_global->__pyx_tuple__12 -#define __pyx_tuple__14 __pyx_mstate_global->__pyx_tuple__14 -#define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 -#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 -#define __pyx_tuple__19 __pyx_mstate_global->__pyx_tuple__19 -#define __pyx_tuple__22 __pyx_mstate_global->__pyx_tuple__22 -#define __pyx_tuple__24 __pyx_mstate_global->__pyx_tuple__24 -#define __pyx_tuple__28 __pyx_mstate_global->__pyx_tuple__28 -#define __pyx_tuple__30 __pyx_mstate_global->__pyx_tuple__30 -#define __pyx_tuple__32 __pyx_mstate_global->__pyx_tuple__32 -#define __pyx_tuple__34 __pyx_mstate_global->__pyx_tuple__34 -#define __pyx_tuple__37 __pyx_mstate_global->__pyx_tuple__37 -#define __pyx_tuple__41 __pyx_mstate_global->__pyx_tuple__41 -#define __pyx_tuple__43 __pyx_mstate_global->__pyx_tuple__43 -#define __pyx_tuple__45 __pyx_mstate_global->__pyx_tuple__45 -#define __pyx_tuple__49 __pyx_mstate_global->__pyx_tuple__49 -#define __pyx_tuple__52 __pyx_mstate_global->__pyx_tuple__52 -#define __pyx_tuple__67 __pyx_mstate_global->__pyx_tuple__67 -#define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 -#define __pyx_codeobj__13 __pyx_mstate_global->__pyx_codeobj__13 -#define __pyx_codeobj__16 __pyx_mstate_global->__pyx_codeobj__16 -#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 -#define __pyx_codeobj__20 __pyx_mstate_global->__pyx_codeobj__20 -#define __pyx_codeobj__21 __pyx_mstate_global->__pyx_codeobj__21 -#define __pyx_codeobj__23 __pyx_mstate_global->__pyx_codeobj__23 -#define __pyx_codeobj__25 __pyx_mstate_global->__pyx_codeobj__25 -#define __pyx_codeobj__26 __pyx_mstate_global->__pyx_codeobj__26 -#define __pyx_codeobj__27 __pyx_mstate_global->__pyx_codeobj__27 -#define __pyx_codeobj__29 __pyx_mstate_global->__pyx_codeobj__29 -#define __pyx_codeobj__31 __pyx_mstate_global->__pyx_codeobj__31 -#define __pyx_codeobj__33 __pyx_mstate_global->__pyx_codeobj__33 -#define __pyx_codeobj__35 __pyx_mstate_global->__pyx_codeobj__35 -#define __pyx_codeobj__36 __pyx_mstate_global->__pyx_codeobj__36 -#define __pyx_codeobj__38 __pyx_mstate_global->__pyx_codeobj__38 -#define __pyx_codeobj__39 __pyx_mstate_global->__pyx_codeobj__39 -#define __pyx_codeobj__40 __pyx_mstate_global->__pyx_codeobj__40 -#define __pyx_codeobj__42 __pyx_mstate_global->__pyx_codeobj__42 -#define __pyx_codeobj__44 __pyx_mstate_global->__pyx_codeobj__44 -#define __pyx_codeobj__46 __pyx_mstate_global->__pyx_codeobj__46 -#define __pyx_codeobj__47 __pyx_mstate_global->__pyx_codeobj__47 -#define __pyx_codeobj__48 __pyx_mstate_global->__pyx_codeobj__48 -#define __pyx_codeobj__50 __pyx_mstate_global->__pyx_codeobj__50 -#define __pyx_codeobj__51 __pyx_mstate_global->__pyx_codeobj__51 -#define __pyx_codeobj__53 __pyx_mstate_global->__pyx_codeobj__53 -#define __pyx_codeobj__54 __pyx_mstate_global->__pyx_codeobj__54 -#define __pyx_codeobj__55 __pyx_mstate_global->__pyx_codeobj__55 -#define __pyx_codeobj__56 __pyx_mstate_global->__pyx_codeobj__56 -#define __pyx_codeobj__57 __pyx_mstate_global->__pyx_codeobj__57 -#define __pyx_codeobj__58 __pyx_mstate_global->__pyx_codeobj__58 -#define __pyx_codeobj__59 __pyx_mstate_global->__pyx_codeobj__59 -#define __pyx_codeobj__60 __pyx_mstate_global->__pyx_codeobj__60 -#define __pyx_codeobj__61 __pyx_mstate_global->__pyx_codeobj__61 -#define __pyx_codeobj__62 __pyx_mstate_global->__pyx_codeobj__62 -#define __pyx_codeobj__63 __pyx_mstate_global->__pyx_codeobj__63 -#define __pyx_codeobj__64 __pyx_mstate_global->__pyx_codeobj__64 -#define __pyx_codeobj__65 __pyx_mstate_global->__pyx_codeobj__65 -#define __pyx_codeobj__66 __pyx_mstate_global->__pyx_codeobj__66 -#define __pyx_codeobj__68 __pyx_mstate_global->__pyx_codeobj__68 -#define __pyx_codeobj__69 __pyx_mstate_global->__pyx_codeobj__69 -/* #### Code section: module_code ### */ - -/* "sqlalchemy/util/_collections_cy.py":38 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -PyDoc_STRVAR(__pyx_doc_10sqlalchemy_4util_15_collections_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_4util_15_collections_cy__is_compiled}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy__is_compiled(__pyx_self); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled", 1); - - /* "sqlalchemy/util/_collections_cy.py":40 - * def _is_compiled() -> bool: - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_True); - __pyx_r = Py_True; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":38 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":48 - * - * - * @cython.ccall # <<<<<<<<<<<<<< - * def unique_list(seq: Iterable[_T]) -> List[_T]: - * # this version seems somewhat faster for smaller sizes, but it's - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_3unique_list(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_unique_list(PyObject *__pyx_v_seq, CYTHON_UNUSED int __pyx_skip_dispatch) { - PyObject *__pyx_v_seen = 0; - PyObject *__pyx_7genexpr__pyx_v_x = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - Py_ssize_t __pyx_t_3; - PyObject *(*__pyx_t_4)(PyObject *); - PyObject *__pyx_t_5 = NULL; - int __pyx_t_6; - int __pyx_t_7; - int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("unique_list", 1); - - /* "sqlalchemy/util/_collections_cy.py":55 - * # return PyDict_Keys(w) if cython.compiled else list(w) - * if cython.compiled: - * seen: Set[_T] = set() # <<<<<<<<<<<<<< - * return [x for x in seq if x not in seen and not set.add(seen, x)] - * else: - */ - __pyx_t_1 = PySet_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 55, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_seen = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":56 - * if cython.compiled: - * seen: Set[_T] = set() - * return [x for x in seq if x not in seen and not set.add(seen, x)] # <<<<<<<<<<<<<< - * else: - * return list(dict.fromkeys(seq)) - */ - __Pyx_XDECREF(__pyx_r); - { /* enter inner scope */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_1); - if (likely(PyList_CheckExact(__pyx_v_seq)) || PyTuple_CheckExact(__pyx_v_seq)) { - __pyx_t_2 = __pyx_v_seq; __Pyx_INCREF(__pyx_t_2); - __pyx_t_3 = 0; - __pyx_t_4 = NULL; - } else { - __pyx_t_3 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_seq); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 56, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 56, __pyx_L5_error) - } - for (;;) { - if (likely(!__pyx_t_4)) { - if (likely(PyList_CheckExact(__pyx_t_2))) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 56, __pyx_L5_error) - #endif - if (__pyx_t_3 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_5 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_5); __pyx_t_3++; if (unlikely((0 < 0))) __PYX_ERR(0, 56, __pyx_L5_error) - #else - __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 56, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_5); - #endif - } else { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 56, __pyx_L5_error) - #endif - if (__pyx_t_3 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_5); __pyx_t_3++; if (unlikely((0 < 0))) __PYX_ERR(0, 56, __pyx_L5_error) - #else - __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 56, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_5); - #endif - } - } else { - __pyx_t_5 = __pyx_t_4(__pyx_t_2); - if (unlikely(!__pyx_t_5)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 56, __pyx_L5_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_5); - } - __Pyx_XDECREF_SET(__pyx_7genexpr__pyx_v_x, __pyx_t_5); - __pyx_t_5 = 0; - __pyx_t_7 = (__Pyx_PySet_ContainsTF(__pyx_7genexpr__pyx_v_x, __pyx_v_seen, Py_NE)); if (unlikely((__pyx_t_7 < 0))) __PYX_ERR(0, 56, __pyx_L5_error) - if (__pyx_t_7) { - } else { - __pyx_t_6 = __pyx_t_7; - goto __pyx_L9_bool_binop_done; - } - __pyx_t_8 = PySet_Add(__pyx_v_seen, __pyx_7genexpr__pyx_v_x); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 56, __pyx_L5_error) - __pyx_t_7 = (!(__pyx_t_8 != 0)); - __pyx_t_6 = __pyx_t_7; - __pyx_L9_bool_binop_done:; - if (__pyx_t_6) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_7genexpr__pyx_v_x))) __PYX_ERR(0, 56, __pyx_L5_error) - } - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_7genexpr__pyx_v_x); __pyx_7genexpr__pyx_v_x = 0; - goto __pyx_L12_exit_scope; - __pyx_L5_error:; - __Pyx_XDECREF(__pyx_7genexpr__pyx_v_x); __pyx_7genexpr__pyx_v_x = 0; - goto __pyx_L1_error; - __pyx_L12_exit_scope:; - } /* exit inner scope */ - __pyx_r = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":48 - * - * - * @cython.ccall # <<<<<<<<<<<<<< - * def unique_list(seq: Iterable[_T]) -> List[_T]: - * # this version seems somewhat faster for smaller sizes, but it's - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.unique_list", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_seen); - __Pyx_XDECREF(__pyx_7genexpr__pyx_v_x); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_3unique_list(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_3unique_list = {"unique_list", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_3unique_list, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_3unique_list(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_seq = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("unique_list (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_seq,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_seq)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 48, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "unique_list") < 0)) __PYX_ERR(0, 48, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_seq = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("unique_list", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 48, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.unique_list", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_2unique_list(__pyx_self, __pyx_v_seq); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_2unique_list(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_seq) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("unique_list", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_unique_list(__pyx_v_seq, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.unique_list", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":86 - * _list: List[_T] - * - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__(cls, key: Any) -> type[Self]: - * return cls - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__(PyObject *__pyx_v_cls, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__ = {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__(PyObject *__pyx_v_cls, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__class_getitem__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 86, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__class_getitem__") < 0)) __PYX_ERR(0, 86, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_key = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__class_getitem__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 86, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__class_getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet___class_getitem__(((PyTypeObject*)__pyx_v_cls), __pyx_v_key); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__class_getitem__", 1); - - /* "sqlalchemy/util/_collections_cy.py":88 - * @classmethod - * def __class_getitem__(cls, key: Any) -> type[Self]: - * return cls # <<<<<<<<<<<<<< - * - * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_cls); - __pyx_r = ((PyObject *)__pyx_v_cls); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":86 - * _list: List[_T] - * - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__(cls, key: Any) -> type[Self]: - * return cls - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":90 - * return cls - * - * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: # <<<<<<<<<<<<<< - * if d is not None: - * if isinstance(d, set) or isinstance(d, dict): - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_3__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_3__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_d = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_d,0}; - values[0] = __Pyx_Arg_NewRef_VARARGS(((PyObject *)Py_None)); - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (kw_args > 0) { - PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_d); - if (value) { values[0] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 90, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 90, __pyx_L3_error) - } - } else { - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_d = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 1, __pyx_nargs); __PYX_ERR(0, 90, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_2__init__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_d); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_2__init__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_d) { - int __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - unsigned int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 1); - - /* "sqlalchemy/util/_collections_cy.py":91 - * - * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: - * if d is not None: # <<<<<<<<<<<<<< - * if isinstance(d, set) or isinstance(d, dict): - * self._list = list(d) - */ - __pyx_t_1 = (__pyx_v_d != Py_None); - if (__pyx_t_1) { - - /* "sqlalchemy/util/_collections_cy.py":92 - * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: - * if d is not None: - * if isinstance(d, set) or isinstance(d, dict): # <<<<<<<<<<<<<< - * self._list = list(d) - * else: - */ - __pyx_t_2 = PySet_Check(__pyx_v_d); - if (!__pyx_t_2) { - } else { - __pyx_t_1 = __pyx_t_2; - goto __pyx_L5_bool_binop_done; - } - __pyx_t_2 = PyDict_Check(__pyx_v_d); - __pyx_t_1 = __pyx_t_2; - __pyx_L5_bool_binop_done:; - if (__pyx_t_1) { - - /* "sqlalchemy/util/_collections_cy.py":93 - * if d is not None: - * if isinstance(d, set) or isinstance(d, dict): - * self._list = list(d) # <<<<<<<<<<<<<< - * else: - * self._list = unique_list(d) - */ - __pyx_t_3 = PySequence_List(__pyx_v_d); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_v_self->_list); - __Pyx_DECREF(__pyx_v_self->_list); - __pyx_v_self->_list = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":92 - * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: - * if d is not None: - * if isinstance(d, set) or isinstance(d, dict): # <<<<<<<<<<<<<< - * self._list = list(d) - * else: - */ - goto __pyx_L4; - } - - /* "sqlalchemy/util/_collections_cy.py":95 - * self._list = list(d) - * else: - * self._list = unique_list(d) # <<<<<<<<<<<<<< - * set.__init__(self, self._list) - * else: - */ - /*else*/ { - __pyx_t_3 = __pyx_f_10sqlalchemy_4util_15_collections_cy_unique_list(__pyx_v_d, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 95, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_v_self->_list); - __Pyx_DECREF(__pyx_v_self->_list); - __pyx_v_self->_list = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - } - __pyx_L4:; - - /* "sqlalchemy/util/_collections_cy.py":96 - * else: - * self._list = unique_list(d) - * set.__init__(self, self._list) # <<<<<<<<<<<<<< - * else: - * self._list = [] - */ - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_init); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - __pyx_t_6 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_6 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[3] = {__pyx_t_5, ((PyObject *)__pyx_v_self), __pyx_v_self->_list}; - __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_6, 2+__pyx_t_6); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":91 - * - * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: - * if d is not None: # <<<<<<<<<<<<<< - * if isinstance(d, set) or isinstance(d, dict): - * self._list = list(d) - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":98 - * set.__init__(self, self._list) - * else: - * self._list = [] # <<<<<<<<<<<<<< - * set.__init__(self) - * - */ - /*else*/ { - __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 98, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_v_self->_list); - __Pyx_DECREF(__pyx_v_self->_list); - __pyx_v_self->_list = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":99 - * else: - * self._list = [] - * set.__init__(self) # <<<<<<<<<<<<<< - * - * def copy(self) -> OrderedSet[_T]: - */ - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_init); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - __pyx_t_6 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_6 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_5, ((PyObject *)__pyx_v_self)}; - __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_6, 1+__pyx_t_6); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_L3:; - - /* "sqlalchemy/util/_collections_cy.py":90 - * return cls - * - * def __init__(self, d: Optional[Iterable[_T]] = None) -> None: # <<<<<<<<<<<<<< - * if d is not None: - * if isinstance(d, set) or isinstance(d, dict): - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":101 - * set.__init__(self) - * - * def copy(self) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self._from_list(list(self._list)) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy = {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("copy (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("copy", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "copy", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_4copy(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_4copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("copy", 1); - - /* "sqlalchemy/util/_collections_cy.py":102 - * - * def copy(self) -> OrderedSet[_T]: - * return self._from_list(list(self._list)) # <<<<<<<<<<<<<< - * - * @cython.final - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PySequence_List(__pyx_v_self->_list); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 102, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_1))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 102, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":101 - * set.__init__(self) - * - * def copy(self) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self._from_list(list(self._list)) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.copy", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":104 - * return self._from_list(list(self._list)) - * - * @cython.final # <<<<<<<<<<<<<< - * @cython.cfunc - * @cython.inline - */ - -static CYTHON_INLINE struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_new_list) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_new = 0; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_from_list", 1); - - /* "sqlalchemy/util/_collections_cy.py":108 - * @cython.inline - * def _from_list(self, new_list: List[_T]) -> OrderedSet: # type: ignore[type-arg] # noqa: E501 - * new: OrderedSet = OrderedSet.__new__(OrderedSet) # type: ignore[type-arg] # noqa: E501 # <<<<<<<<<<<<<< - * new._list = new_list - * set.update(new, new_list) - */ - __pyx_t_1 = ((PyObject *)__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet(((PyTypeObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet), __pyx_empty_tuple, NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 108, __pyx_L1_error) - __Pyx_GOTREF((PyObject *)__pyx_t_1); - __pyx_v_new = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":109 - * def _from_list(self, new_list: List[_T]) -> OrderedSet: # type: ignore[type-arg] # noqa: E501 - * new: OrderedSet = OrderedSet.__new__(OrderedSet) # type: ignore[type-arg] # noqa: E501 - * new._list = new_list # <<<<<<<<<<<<<< - * set.update(new, new_list) - * return new - */ - __Pyx_INCREF(__pyx_v_new_list); - __Pyx_GIVEREF(__pyx_v_new_list); - __Pyx_GOTREF(__pyx_v_new->_list); - __Pyx_DECREF(__pyx_v_new->_list); - __pyx_v_new->_list = __pyx_v_new_list; - - /* "sqlalchemy/util/_collections_cy.py":110 - * new: OrderedSet = OrderedSet.__new__(OrderedSet) # type: ignore[type-arg] # noqa: E501 - * new._list = new_list - * set.update(new, new_list) # <<<<<<<<<<<<<< - * return new - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 110, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[3] = {__pyx_t_3, ((PyObject *)__pyx_v_new), __pyx_v_new_list}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 2+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":111 - * new._list = new_list - * set.update(new, new_list) - * return new # <<<<<<<<<<<<<< - * - * def add(self, element: _T, /) -> None: - */ - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_new); - __pyx_r = __pyx_v_new; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":104 - * return self._from_list(list(self._list)) - * - * @cython.final # <<<<<<<<<<<<<< - * @cython.cfunc - * @cython.inline - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet._from_list", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_new); - __Pyx_XGIVEREF((PyObject *)__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":113 - * return new - * - * def add(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element not in self: - * self._list.append(element) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add(PyObject *__pyx_v_self, PyObject *__pyx_v_element); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add = {"add", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add(PyObject *__pyx_v_self, PyObject *__pyx_v_element) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("add (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_6add(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_element)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_6add(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("add", 1); - - /* "sqlalchemy/util/_collections_cy.py":114 - * - * def add(self, element: _T, /) -> None: - * if element not in self: # <<<<<<<<<<<<<< - * self._list.append(element) - * set.add(self, element) - */ - __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_element, ((PyObject *)__pyx_v_self), Py_NE)); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 114, __pyx_L1_error) - if (__pyx_t_1) { - - /* "sqlalchemy/util/_collections_cy.py":115 - * def add(self, element: _T, /) -> None: - * if element not in self: - * self._list.append(element) # <<<<<<<<<<<<<< - * set.add(self, element) - * - */ - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 115, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_self->_list, __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 115, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":116 - * if element not in self: - * self._list.append(element) - * set.add(self, element) # <<<<<<<<<<<<<< - * - * def remove(self, element: _T, /) -> None: - */ - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "add", "set"); - __PYX_ERR(0, 116, __pyx_L1_error) - } - __pyx_t_2 = PySet_Add(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 116, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":114 - * - * def add(self, element: _T, /) -> None: - * if element not in self: # <<<<<<<<<<<<<< - * self._list.append(element) - * set.add(self, element) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":113 - * return new - * - * def add(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element not in self: - * self._list.append(element) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.add", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":118 - * set.add(self, element) - * - * def remove(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * # set.remove will raise if element is not in self - * set.remove(self, element) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove(PyObject *__pyx_v_self, PyObject *__pyx_v_element); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove = {"remove", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove(PyObject *__pyx_v_self, PyObject *__pyx_v_element) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("remove (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_8remove(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_element)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_8remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("remove", 1); - - /* "sqlalchemy/util/_collections_cy.py":120 - * def remove(self, element: _T, /) -> None: - * # set.remove will raise if element is not in self - * set.remove(self, element) # <<<<<<<<<<<<<< - * self._list.remove(element) - * - */ - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "remove", "set"); - __PYX_ERR(0, 120, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PySet_Remove(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 120, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":121 - * # set.remove will raise if element is not in self - * set.remove(self, element) - * self._list.remove(element) # <<<<<<<<<<<<<< - * - * def pop(self) -> _T: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_list, __pyx_n_s_remove); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 121, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_element}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 121, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":118 - * set.add(self, element) - * - * def remove(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * # set.remove will raise if element is not in self - * set.remove(self, element) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":123 - * self._list.remove(element) - * - * def pop(self) -> _T: # <<<<<<<<<<<<<< - * try: - * value = self._list.pop() - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop = {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pop (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("pop", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "pop", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_10pop(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_10pop(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { - PyObject *__pyx_v_value = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - int __pyx_t_9; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("pop", 1); - - /* "sqlalchemy/util/_collections_cy.py":124 - * - * def pop(self) -> _T: - * try: # <<<<<<<<<<<<<< - * value = self._list.pop() - * except IndexError: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - /*try:*/ { - - /* "sqlalchemy/util/_collections_cy.py":125 - * def pop(self) -> _T: - * try: - * value = self._list.pop() # <<<<<<<<<<<<<< - * except IndexError: - * raise KeyError("pop from an empty set") from None - */ - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "pop"); - __PYX_ERR(0, 125, __pyx_L3_error) - } - __pyx_t_4 = __Pyx_PyList_Pop(__pyx_v_self->_list); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 125, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_v_value = __pyx_t_4; - __pyx_t_4 = 0; - - /* "sqlalchemy/util/_collections_cy.py":124 - * - * def pop(self) -> _T: - * try: # <<<<<<<<<<<<<< - * value = self._list.pop() - * except IndexError: - */ - } - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - goto __pyx_L8_try_end; - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "sqlalchemy/util/_collections_cy.py":126 - * try: - * value = self._list.pop() - * except IndexError: # <<<<<<<<<<<<<< - * raise KeyError("pop from an empty set") from None - * set.remove(self, value) - */ - __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_IndexError); - if (__pyx_t_5) { - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 126, __pyx_L5_except_error) - __Pyx_XGOTREF(__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_6); - __Pyx_XGOTREF(__pyx_t_7); - - /* "sqlalchemy/util/_collections_cy.py":127 - * value = self._list.pop() - * except IndexError: - * raise KeyError("pop from an empty set") from None # <<<<<<<<<<<<<< - * set.remove(self, value) - * return value - */ - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 127, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_Raise(__pyx_t_8, 0, 0, Py_None); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __PYX_ERR(0, 127, __pyx_L5_except_error) - } - goto __pyx_L5_except_error; - - /* "sqlalchemy/util/_collections_cy.py":124 - * - * def pop(self) -> _T: - * try: # <<<<<<<<<<<<<< - * value = self._list.pop() - * except IndexError: - */ - __pyx_L5_except_error:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L1_error; - __pyx_L8_try_end:; - } - - /* "sqlalchemy/util/_collections_cy.py":128 - * except IndexError: - * raise KeyError("pop from an empty set") from None - * set.remove(self, value) # <<<<<<<<<<<<<< - * return value - * - */ - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "remove", "set"); - __PYX_ERR(0, 128, __pyx_L1_error) - } - __pyx_t_9 = __Pyx_PySet_Remove(((PyObject*)__pyx_v_self), __pyx_v_value); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 128, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":129 - * raise KeyError("pop from an empty set") from None - * set.remove(self, value) - * return value # <<<<<<<<<<<<<< - * - * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_value); - __pyx_r = __pyx_v_value; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":123 - * self._list.remove(element) - * - * def pop(self) -> _T: # <<<<<<<<<<<<<< - * try: - * value = self._list.pop() - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_value); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":131 - * return value - * - * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element not in self: - * self._list.insert(pos, element) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert = {"insert", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - Py_ssize_t __pyx_v_pos; - PyObject *__pyx_v_element = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[2] = {0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("insert (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {0}; - if (__pyx_kwds && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) > 0) { - if (likely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values, 0, "insert") < 0)) __PYX_ERR(0, 131, __pyx_L3_error) - } else if (unlikely(__pyx_nargs != 2)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - } - __pyx_v_pos = __Pyx_PyIndex_AsSsize_t(values[0]); if (unlikely((__pyx_v_pos == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 131, __pyx_L3_error) - __pyx_v_element = values[1]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("insert", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 131, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_12insert(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_pos, __pyx_v_element); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_12insert(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, Py_ssize_t __pyx_v_pos, PyObject *__pyx_v_element) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("insert", 1); - - /* "sqlalchemy/util/_collections_cy.py":132 - * - * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: - * if element not in self: # <<<<<<<<<<<<<< - * self._list.insert(pos, element) - * set.add(self, element) - */ - __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_element, ((PyObject *)__pyx_v_self), Py_NE)); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 132, __pyx_L1_error) - if (__pyx_t_1) { - - /* "sqlalchemy/util/_collections_cy.py":133 - * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: - * if element not in self: - * self._list.insert(pos, element) # <<<<<<<<<<<<<< - * set.add(self, element) - * - */ - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "insert"); - __PYX_ERR(0, 133, __pyx_L1_error) - } - __pyx_t_2 = PyList_Insert(__pyx_v_self->_list, __pyx_v_pos, __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 133, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":134 - * if element not in self: - * self._list.insert(pos, element) - * set.add(self, element) # <<<<<<<<<<<<<< - * - * def discard(self, element: _T, /) -> None: - */ - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "add", "set"); - __PYX_ERR(0, 134, __pyx_L1_error) - } - __pyx_t_2 = PySet_Add(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 134, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":132 - * - * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: - * if element not in self: # <<<<<<<<<<<<<< - * self._list.insert(pos, element) - * set.add(self, element) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":131 - * return value - * - * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element not in self: - * self._list.insert(pos, element) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":136 - * set.add(self, element) - * - * def discard(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element in self: - * set.remove(self, element) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard(PyObject *__pyx_v_self, PyObject *__pyx_v_element); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard = {"discard", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard(PyObject *__pyx_v_self, PyObject *__pyx_v_element) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("discard (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_14discard(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_element)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_14discard(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_element) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - unsigned int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("discard", 1); - - /* "sqlalchemy/util/_collections_cy.py":137 - * - * def discard(self, element: _T, /) -> None: - * if element in self: # <<<<<<<<<<<<<< - * set.remove(self, element) - * self._list.remove(element) - */ - __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_element, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 137, __pyx_L1_error) - if (__pyx_t_1) { - - /* "sqlalchemy/util/_collections_cy.py":138 - * def discard(self, element: _T, /) -> None: - * if element in self: - * set.remove(self, element) # <<<<<<<<<<<<<< - * self._list.remove(element) - * - */ - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "remove", "set"); - __PYX_ERR(0, 138, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PySet_Remove(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 138, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":139 - * if element in self: - * set.remove(self, element) - * self._list.remove(element) # <<<<<<<<<<<<<< - * - * def clear(self) -> None: - */ - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_list, __pyx_n_s_remove); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 139, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - __pyx_t_6 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - __pyx_t_6 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_5, __pyx_v_element}; - __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_4, __pyx_callargs+1-__pyx_t_6, 1+__pyx_t_6); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 139, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":137 - * - * def discard(self, element: _T, /) -> None: - * if element in self: # <<<<<<<<<<<<<< - * set.remove(self, element) - * self._list.remove(element) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":136 - * set.add(self, element) - * - * def discard(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element in self: - * set.remove(self, element) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.discard", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":141 - * self._list.remove(element) - * - * def clear(self) -> None: # <<<<<<<<<<<<<< - * set.clear(self) # type: ignore[arg-type] - * self._list = [] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear = {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("clear (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("clear", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "clear", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_16clear(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_16clear(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("clear", 1); - - /* "sqlalchemy/util/_collections_cy.py":142 - * - * def clear(self) -> None: - * set.clear(self) # type: ignore[arg-type] # <<<<<<<<<<<<<< - * self._list = [] - * - */ - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "clear", "set"); - __PYX_ERR(0, 142, __pyx_L1_error) - } - __pyx_t_1 = PySet_Clear(((PyObject*)__pyx_v_self)); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 142, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":143 - * def clear(self) -> None: - * set.clear(self) # type: ignore[arg-type] - * self._list = [] # <<<<<<<<<<<<<< - * - * def __getitem__(self, key: cython.Py_ssize_t) -> _T: - */ - __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 143, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->_list); - __Pyx_DECREF(__pyx_v_self->_list); - __pyx_v_self->_list = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":141 - * self._list.remove(element) - * - * def clear(self) -> None: # <<<<<<<<<<<<<< - * set.clear(self) # type: ignore[arg-type] - * self._list = [] - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":145 - * self._list = [] - * - * def __getitem__(self, key: cython.Py_ssize_t) -> _T: # <<<<<<<<<<<<<< - * return self._list[key] - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_19__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_arg_key); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_19__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_arg_key) { - Py_ssize_t __pyx_v_key; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - assert(__pyx_arg_key); { - __pyx_v_key = __Pyx_PyIndex_AsSsize_t(__pyx_arg_key); if (unlikely((__pyx_v_key == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 145, __pyx_L3_error) - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_18__getitem__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((Py_ssize_t)__pyx_v_key)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_18__getitem__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, Py_ssize_t __pyx_v_key) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__getitem__", 1); - - /* "sqlalchemy/util/_collections_cy.py":146 - * - * def __getitem__(self, key: cython.Py_ssize_t) -> _T: - * return self._list[key] # <<<<<<<<<<<<<< - * - * def __iter__(self) -> Iterator[_T]: - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 146, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_List(__pyx_v_self->_list, __pyx_v_key, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":145 - * self._list = [] - * - * def __getitem__(self, key: cython.Py_ssize_t) -> _T: # <<<<<<<<<<<<<< - * return self._list[key] - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":148 - * return self._list[key] - * - * def __iter__(self) -> Iterator[_T]: # <<<<<<<<<<<<<< - * return iter(self._list) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_21__iter__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_21__iter__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_20__iter__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_20__iter__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__iter__", 1); - - /* "sqlalchemy/util/_collections_cy.py":149 - * - * def __iter__(self) -> Iterator[_T]: - * return iter(self._list) # <<<<<<<<<<<<<< - * - * def __add__(self, other: Iterator[_T]) -> OrderedSet[_T]: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_v_self->_list; - __Pyx_INCREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":148 - * return self._list[key] - * - * def __iter__(self) -> Iterator[_T]: # <<<<<<<<<<<<<< - * return iter(self._list) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":151 - * return iter(self._list) - * - * def __add__(self, other: Iterator[_T]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self.union(other) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_23__add__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_23__add__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__add__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_22__add__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_22__add__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__add__", 1); - - /* "sqlalchemy/util/_collections_cy.py":152 - * - * def __add__(self, other: Iterator[_T]) -> OrderedSet[_T]: - * return self.union(other) # <<<<<<<<<<<<<< - * - * def __repr__(self) -> str: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_union); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 152, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":151 - * return iter(self._list) - * - * def __add__(self, other: Iterator[_T]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self.union(other) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__add__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":154 - * return self.union(other) - * - * def __repr__(self) -> str: # <<<<<<<<<<<<<< - * return "%s(%r)" % (self.__class__.__name__, self._list) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_24__repr__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_24__repr__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - Py_UCS4 __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__repr__", 1); - - /* "sqlalchemy/util/_collections_cy.py":155 - * - * def __repr__(self) -> str: - * return "%s(%r)" % (self.__class__.__name__, self._list) # <<<<<<<<<<<<<< - * - * __str__ = __repr__ - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyTuple_New(4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 155, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = 0; - __pyx_t_3 = 127; - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_name); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 155, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = __Pyx_PyObject_FormatSimpleAndDecref(PyObject_Unicode(__pyx_t_5), __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; - __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); - __pyx_t_4 = 0; - __Pyx_INCREF(__pyx_kp_u__2); - __pyx_t_2 += 1; - __Pyx_GIVEREF(__pyx_kp_u__2); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_kp_u__2); - __pyx_t_4 = __Pyx_PyObject_FormatSimpleAndDecref(PyObject_Repr(__pyx_v_self->_list), __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; - __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); - __pyx_t_4 = 0; - __Pyx_INCREF(__pyx_kp_u__3); - __pyx_t_2 += 1; - __Pyx_GIVEREF(__pyx_kp_u__3); - PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_kp_u__3); - __pyx_t_4 = __Pyx_PyUnicode_Join(__pyx_t_1, 4, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 155, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":154 - * return self.union(other) - * - * def __repr__(self) -> str: # <<<<<<<<<<<<<< - * return "%s(%r)" % (self.__class__.__name__, self._list) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":160 - * - * # @cython.ccall # cdef function cannot have star argument - * def update(self, *iterables: Iterable[_T]) -> None: # <<<<<<<<<<<<<< - * for iterable in iterables: - * for element in iterable: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update = {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_iterables = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("update (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "update", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_iterables = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_26update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_iterables); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_iterables); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_26update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_iterables) { - PyObject *__pyx_v_iterable = NULL; - PyObject *__pyx_v_element = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - PyObject *(*__pyx_t_5)(PyObject *); - PyObject *__pyx_t_6 = NULL; - int __pyx_t_7; - int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("update", 1); - - /* "sqlalchemy/util/_collections_cy.py":161 - * # @cython.ccall # cdef function cannot have star argument - * def update(self, *iterables: Iterable[_T]) -> None: - * for iterable in iterables: # <<<<<<<<<<<<<< - * for element in iterable: - * # inline of add. mainly for python, since for cython we - */ - __pyx_t_1 = __pyx_v_iterables; __Pyx_INCREF(__pyx_t_1); - __pyx_t_2 = 0; - for (;;) { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_1); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 161, __pyx_L1_error) - #endif - if (__pyx_t_2 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely((0 < 0))) __PYX_ERR(0, 161, __pyx_L1_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 161, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - __Pyx_XDECREF_SET(__pyx_v_iterable, __pyx_t_3); - __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":162 - * def update(self, *iterables: Iterable[_T]) -> None: - * for iterable in iterables: - * for element in iterable: # <<<<<<<<<<<<<< - * # inline of add. mainly for python, since for cython we - * # could create an @cfunc @inline _add function that would - */ - if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { - __pyx_t_3 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_3); - __pyx_t_4 = 0; - __pyx_t_5 = NULL; - } else { - __pyx_t_4 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 162, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 162, __pyx_L1_error) - } - for (;;) { - if (likely(!__pyx_t_5)) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 162, __pyx_L1_error) - #endif - if (__pyx_t_4 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_6 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 162, __pyx_L1_error) - #else - __pyx_t_6 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 162, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - #endif - } else { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 162, __pyx_L1_error) - #endif - if (__pyx_t_4 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 162, __pyx_L1_error) - #else - __pyx_t_6 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 162, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - #endif - } - } else { - __pyx_t_6 = __pyx_t_5(__pyx_t_3); - if (unlikely(!__pyx_t_6)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 162, __pyx_L1_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_6); - } - __Pyx_XDECREF_SET(__pyx_v_element, __pyx_t_6); - __pyx_t_6 = 0; - - /* "sqlalchemy/util/_collections_cy.py":166 - * # could create an @cfunc @inline _add function that would - * # perform the same - * if element not in self: # <<<<<<<<<<<<<< - * self._list.append(element) - * set.add(self, element) - */ - __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_v_element, ((PyObject *)__pyx_v_self), Py_NE)); if (unlikely((__pyx_t_7 < 0))) __PYX_ERR(0, 166, __pyx_L1_error) - if (__pyx_t_7) { - - /* "sqlalchemy/util/_collections_cy.py":167 - * # perform the same - * if element not in self: - * self._list.append(element) # <<<<<<<<<<<<<< - * set.add(self, element) - * - */ - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); - __PYX_ERR(0, 167, __pyx_L1_error) - } - __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_self->_list, __pyx_v_element); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 167, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":168 - * if element not in self: - * self._list.append(element) - * set.add(self, element) # <<<<<<<<<<<<<< - * - * def __ior__( - */ - if (unlikely(((PyObject *)__pyx_v_self) == Py_None)) { - PyErr_Format(PyExc_TypeError, "descriptor '%s' requires a '%s' object but received a 'NoneType'", "add", "set"); - __PYX_ERR(0, 168, __pyx_L1_error) - } - __pyx_t_8 = PySet_Add(((PyObject*)__pyx_v_self), __pyx_v_element); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 168, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":166 - * # could create an @cfunc @inline _add function that would - * # perform the same - * if element not in self: # <<<<<<<<<<<<<< - * self._list.append(element) - * set.add(self, element) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":162 - * def update(self, *iterables: Iterable[_T]) -> None: - * for iterable in iterables: - * for element in iterable: # <<<<<<<<<<<<<< - * # inline of add. mainly for python, since for cython we - * # could create an @cfunc @inline _add function that would - */ - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":161 - * # @cython.ccall # cdef function cannot have star argument - * def update(self, *iterables: Iterable[_T]) -> None: - * for iterable in iterables: # <<<<<<<<<<<<<< - * for element in iterable: - * # inline of add. mainly for python, since for cython we - */ - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":160 - * - * # @cython.ccall # cdef function cannot have star argument - * def update(self, *iterables: Iterable[_T]) -> None: # <<<<<<<<<<<<<< - * for iterable in iterables: - * for element in iterable: - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_iterable); - __Pyx_XDECREF(__pyx_v_element); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":170 - * set.add(self, element) - * - * def __ior__( # <<<<<<<<<<<<<< - * self: OrderedSet[Union[_T, _S]], iterable: AbstractSet[_S] - * ) -> OrderedSet[Union[_T, _S]]: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_29__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_29__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__ior__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_28__ior__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_28__ior__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__ior__", 1); - - /* "sqlalchemy/util/_collections_cy.py":173 - * self: OrderedSet[Union[_T, _S]], iterable: AbstractSet[_S] - * ) -> OrderedSet[Union[_T, _S]]: - * self.update(iterable) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 173, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_iterable}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 173, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":174 - * ) -> OrderedSet[Union[_T, _S]]: - * self.update(iterable) - * return self # <<<<<<<<<<<<<< - * - * # @cython.ccall # cdef function cannot have star argument - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":170 - * set.add(self, element) - * - * def __ior__( # <<<<<<<<<<<<<< - * self: OrderedSet[Union[_T, _S]], iterable: AbstractSet[_S] - * ) -> OrderedSet[Union[_T, _S]]: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__ior__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":177 - * - * # @cython.ccall # cdef function cannot have star argument - * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< - * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) - * result.update(*other) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union = {"union", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_other = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("union (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "union", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_other = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_30union(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_other); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_30union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_result = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("union", 1); - - /* "sqlalchemy/util/_collections_cy.py":178 - * # @cython.ccall # cdef function cannot have star argument - * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: - * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) # <<<<<<<<<<<<<< - * result.update(*other) - * return result - */ - __pyx_t_1 = PySequence_List(__pyx_v_self->_list); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 178, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_1))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 178, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":179 - * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: - * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) - * result.update(*other) # <<<<<<<<<<<<<< - * return result - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_result), __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 179, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_v_other, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 179, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":180 - * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) - * result.update(*other) - * return result # <<<<<<<<<<<<<< - * - * def __or__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_result); - __pyx_r = ((PyObject *)__pyx_v_result); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":177 - * - * # @cython.ccall # cdef function cannot have star argument - * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< - * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) - * result.update(*other) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.union", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":182 - * return result - * - * def __or__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< - * return self.union(other) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_33__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_33__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__or__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_32__or__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_32__or__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__or__", 1); - - /* "sqlalchemy/util/_collections_cy.py":183 - * - * def __or__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: - * return self.union(other) # <<<<<<<<<<<<<< - * - * # @cython.ccall # cdef function cannot have star argument - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_union); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 183, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 183, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":182 - * return result - * - * def __or__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< - * return self.union(other) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__or__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":186 - * - * # @cython.ccall # cdef function cannot have star argument - * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * other_set: Set[Any] = set.intersection(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection = {"intersection", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_other = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("intersection (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "intersection", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_other = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_34intersection(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_other); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_34intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_v_other_set = 0; - PyObject *__pyx_8genexpr1__pyx_v_a = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("intersection", 1); - - /* "sqlalchemy/util/_collections_cy.py":187 - * # @cython.ccall # cdef function cannot have star argument - * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: - * other_set: Set[Any] = set.intersection(self, *other) # <<<<<<<<<<<<<< - * return self._from_list([a for a in self._list if a in other_set]) - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_intersection); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 187, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 187, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __Pyx_GIVEREF((PyObject *)__pyx_v_self); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_self))) __PYX_ERR(0, 187, __pyx_L1_error); - __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_v_other); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 187, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 187, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(PySet_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("set", __pyx_t_2))) __PYX_ERR(0, 187, __pyx_L1_error) - __pyx_v_other_set = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":188 - * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: - * other_set: Set[Any] = set.intersection(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) # <<<<<<<<<<<<<< - * - * def __and__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - */ - __Pyx_XDECREF(__pyx_r); - { /* enter inner scope */ - __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 188, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_2); - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 188, __pyx_L5_error) - } - __pyx_t_3 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_3); - __pyx_t_4 = 0; - for (;;) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 188, __pyx_L5_error) - #endif - if (__pyx_t_4 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 188, __pyx_L5_error) - #else - __pyx_t_1 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 188, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_1); - #endif - __Pyx_XDECREF_SET(__pyx_8genexpr1__pyx_v_a, __pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v_other_set == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 188, __pyx_L5_error) - } - __pyx_t_5 = (__Pyx_PySet_ContainsTF(__pyx_8genexpr1__pyx_v_a, __pyx_v_other_set, Py_EQ)); if (unlikely((__pyx_t_5 < 0))) __PYX_ERR(0, 188, __pyx_L5_error) - if (__pyx_t_5) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_8genexpr1__pyx_v_a))) __PYX_ERR(0, 188, __pyx_L5_error) - } - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_8genexpr1__pyx_v_a); __pyx_8genexpr1__pyx_v_a = 0; - goto __pyx_L10_exit_scope; - __pyx_L5_error:; - __Pyx_XDECREF(__pyx_8genexpr1__pyx_v_a); __pyx_8genexpr1__pyx_v_a = 0; - goto __pyx_L1_error; - __pyx_L10_exit_scope:; - } /* exit inner scope */ - __pyx_t_3 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_2))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 188, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":186 - * - * # @cython.ccall # cdef function cannot have star argument - * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * other_set: Set[Any] = set.intersection(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.intersection", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_other_set); - __Pyx_XDECREF(__pyx_8genexpr1__pyx_v_a); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":190 - * return self._from_list([a for a in self._list if a in other_set]) - * - * def __and__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self.intersection(other) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_37__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_37__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__and__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_36__and__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_36__and__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__and__", 1); - - /* "sqlalchemy/util/_collections_cy.py":191 - * - * def __and__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - * return self.intersection(other) # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_intersection); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 191, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 191, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":190 - * return self._from_list([a for a in self._list if a in other_set]) - * - * def __and__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self.intersection(other) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__and__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":193 - * return self.intersection(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference( - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_skip_dispatch) { - PyObject *__pyx_v_collection = 0; - PyObject *__pyx_v_other_set = 0; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_result = NULL; - PyObject *__pyx_8genexpr2__pyx_v_a = NULL; - PyObject *__pyx_8genexpr3__pyx_v_a = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *(*__pyx_t_8)(PyObject *); - PyObject *__pyx_t_9 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("symmetric_difference", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_symmetric_difference); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 193, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_other}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 193, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":200 - * collection: Iterable[Any] - * other_set: Set[_S] - * if isinstance(other, set): # <<<<<<<<<<<<<< - * other_set = cython.cast(set, other) - * collection = other_set - */ - __pyx_t_6 = PySet_Check(__pyx_v_other); - if (__pyx_t_6) { - - /* "sqlalchemy/util/_collections_cy.py":201 - * other_set: Set[_S] - * if isinstance(other, set): - * other_set = cython.cast(set, other) # <<<<<<<<<<<<<< - * collection = other_set - * elif hasattr(other, "__len__"): - */ - __pyx_t_1 = __pyx_v_other; - __Pyx_INCREF(__pyx_t_1); - __pyx_v_other_set = __pyx_t_1; - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":202 - * if isinstance(other, set): - * other_set = cython.cast(set, other) - * collection = other_set # <<<<<<<<<<<<<< - * elif hasattr(other, "__len__"): - * collection = other - */ - __Pyx_INCREF(__pyx_v_other_set); - __pyx_v_collection = __pyx_v_other_set; - - /* "sqlalchemy/util/_collections_cy.py":200 - * collection: Iterable[Any] - * other_set: Set[_S] - * if isinstance(other, set): # <<<<<<<<<<<<<< - * other_set = cython.cast(set, other) - * collection = other_set - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":203 - * other_set = cython.cast(set, other) - * collection = other_set - * elif hasattr(other, "__len__"): # <<<<<<<<<<<<<< - * collection = other - * other_set = set(other) - */ - __pyx_t_6 = __Pyx_HasAttr(__pyx_v_other, __pyx_n_u_len); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 203, __pyx_L1_error) - if (__pyx_t_6) { - - /* "sqlalchemy/util/_collections_cy.py":204 - * collection = other_set - * elif hasattr(other, "__len__"): - * collection = other # <<<<<<<<<<<<<< - * other_set = set(other) - * else: - */ - __Pyx_INCREF(__pyx_v_other); - __pyx_v_collection = __pyx_v_other; - - /* "sqlalchemy/util/_collections_cy.py":205 - * elif hasattr(other, "__len__"): - * collection = other - * other_set = set(other) # <<<<<<<<<<<<<< - * else: - * collection = list(other) - */ - __pyx_t_1 = PySet_New(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 205, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_other_set = __pyx_t_1; - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":203 - * other_set = cython.cast(set, other) - * collection = other_set - * elif hasattr(other, "__len__"): # <<<<<<<<<<<<<< - * collection = other - * other_set = set(other) - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":207 - * other_set = set(other) - * else: - * collection = list(other) # <<<<<<<<<<<<<< - * other_set = set(collection) - * result: OrderedSet[Union[_T, _S]] = self._from_list( - */ - /*else*/ { - __pyx_t_1 = PySequence_List(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 207, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_collection = __pyx_t_1; - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":208 - * else: - * collection = list(other) - * other_set = set(collection) # <<<<<<<<<<<<<< - * result: OrderedSet[Union[_T, _S]] = self._from_list( - * [a for a in self._list if a not in other_set] - */ - __pyx_t_1 = PySet_New(__pyx_v_collection); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 208, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_other_set = __pyx_t_1; - __pyx_t_1 = 0; - } - __pyx_L3:; - - /* "sqlalchemy/util/_collections_cy.py":209 - * collection = list(other) - * other_set = set(collection) - * result: OrderedSet[Union[_T, _S]] = self._from_list( # <<<<<<<<<<<<<< - * [a for a in self._list if a not in other_set] - * ) - */ - { /* enter inner scope */ - - /* "sqlalchemy/util/_collections_cy.py":210 - * other_set = set(collection) - * result: OrderedSet[Union[_T, _S]] = self._from_list( - * [a for a in self._list if a not in other_set] # <<<<<<<<<<<<<< - * ) - * result.update([a for a in collection if a not in self]) - */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 210, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_1); - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 210, __pyx_L6_error) - } - __pyx_t_2 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_2); - __pyx_t_7 = 0; - for (;;) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 210, __pyx_L6_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 210, __pyx_L6_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 210, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - __Pyx_XDECREF_SET(__pyx_8genexpr2__pyx_v_a, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr2__pyx_v_a, __pyx_v_other_set, Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 210, __pyx_L6_error) - if (__pyx_t_6) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_8genexpr2__pyx_v_a))) __PYX_ERR(0, 210, __pyx_L6_error) - } - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_8genexpr2__pyx_v_a); __pyx_8genexpr2__pyx_v_a = 0; - goto __pyx_L11_exit_scope; - __pyx_L6_error:; - __Pyx_XDECREF(__pyx_8genexpr2__pyx_v_a); __pyx_8genexpr2__pyx_v_a = 0; - goto __pyx_L1_error; - __pyx_L11_exit_scope:; - } /* exit inner scope */ - - /* "sqlalchemy/util/_collections_cy.py":209 - * collection = list(other) - * other_set = set(collection) - * result: OrderedSet[Union[_T, _S]] = self._from_list( # <<<<<<<<<<<<<< - * [a for a in self._list if a not in other_set] - * ) - */ - __pyx_t_2 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_1))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 209, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":212 - * [a for a in self._list if a not in other_set] - * ) - * result.update([a for a in collection if a not in self]) # <<<<<<<<<<<<<< - * return result - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_result), __pyx_n_s_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 212, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - { /* enter inner scope */ - __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 212, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_3); - if (likely(PyList_CheckExact(__pyx_v_collection)) || PyTuple_CheckExact(__pyx_v_collection)) { - __pyx_t_4 = __pyx_v_collection; __Pyx_INCREF(__pyx_t_4); - __pyx_t_7 = 0; - __pyx_t_8 = NULL; - } else { - __pyx_t_7 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_v_collection); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 212, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_4); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 212, __pyx_L14_error) - } - for (;;) { - if (likely(!__pyx_t_8)) { - if (likely(PyList_CheckExact(__pyx_t_4))) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_4); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 212, __pyx_L14_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_9 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 212, __pyx_L14_error) - #else - __pyx_t_9 = __Pyx_PySequence_ITEM(__pyx_t_4, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 212, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_9); - #endif - } else { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_4); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 212, __pyx_L14_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_9 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 212, __pyx_L14_error) - #else - __pyx_t_9 = __Pyx_PySequence_ITEM(__pyx_t_4, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 212, __pyx_L14_error) - __Pyx_GOTREF(__pyx_t_9); - #endif - } - } else { - __pyx_t_9 = __pyx_t_8(__pyx_t_4); - if (unlikely(!__pyx_t_9)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 212, __pyx_L14_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_9); - } - __Pyx_XDECREF_SET(__pyx_8genexpr3__pyx_v_a, __pyx_t_9); - __pyx_t_9 = 0; - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr3__pyx_v_a, ((PyObject *)__pyx_v_self), Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 212, __pyx_L14_error) - if (__pyx_t_6) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_3, (PyObject*)__pyx_8genexpr3__pyx_v_a))) __PYX_ERR(0, 212, __pyx_L14_error) - } - } - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_XDECREF(__pyx_8genexpr3__pyx_v_a); __pyx_8genexpr3__pyx_v_a = 0; - goto __pyx_L19_exit_scope; - __pyx_L14_error:; - __Pyx_XDECREF(__pyx_8genexpr3__pyx_v_a); __pyx_8genexpr3__pyx_v_a = 0; - goto __pyx_L1_error; - __pyx_L19_exit_scope:; - } /* exit inner scope */ - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_1))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_1, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_1, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 212, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":213 - * ) - * result.update([a for a in collection if a not in self]) - * return result # <<<<<<<<<<<<<< - * - * def __xor__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_result); - __pyx_r = ((PyObject *)__pyx_v_result); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":193 - * return self.intersection(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference( - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.symmetric_difference", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_collection); - __Pyx_XDECREF(__pyx_v_other_set); - __Pyx_XDECREF((PyObject *)__pyx_v_result); - __Pyx_XDECREF(__pyx_8genexpr2__pyx_v_a); - __Pyx_XDECREF(__pyx_8genexpr3__pyx_v_a); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference = {"symmetric_difference", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("symmetric_difference (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_38symmetric_difference(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_38symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("symmetric_difference", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference(__pyx_v_self, __pyx_v_other, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 193, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.symmetric_difference", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":215 - * return result - * - * def __xor__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< - * return self.symmetric_difference(other) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_41__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_41__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__xor__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_40__xor__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_40__xor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__xor__", 1); - - /* "sqlalchemy/util/_collections_cy.py":216 - * - * def __xor__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: - * return self.symmetric_difference(other) # <<<<<<<<<<<<<< - * - * # @cython.ccall # cdef function cannot have star argument - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self->__pyx_vtab)->symmetric_difference(__pyx_v_self, __pyx_v_other, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 216, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":215 - * return result - * - * def __xor__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< - * return self.symmetric_difference(other) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__xor__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":219 - * - * # @cython.ccall # cdef function cannot have star argument - * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * other_set: Set[Any] = set.difference(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference = {"difference", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_other = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("difference (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "difference", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_other = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_42difference(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_other); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_42difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_v_other_set = 0; - PyObject *__pyx_8genexpr4__pyx_v_a = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("difference", 1); - - /* "sqlalchemy/util/_collections_cy.py":220 - * # @cython.ccall # cdef function cannot have star argument - * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: - * other_set: Set[Any] = set.difference(self, *other) # <<<<<<<<<<<<<< - * return self._from_list([a for a in self._list if a in other_set]) - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_difference); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 220, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 220, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __Pyx_GIVEREF((PyObject *)__pyx_v_self); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_self))) __PYX_ERR(0, 220, __pyx_L1_error); - __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_v_other); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 220, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 220, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(PySet_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None) || __Pyx_RaiseUnexpectedTypeError("set", __pyx_t_2))) __PYX_ERR(0, 220, __pyx_L1_error) - __pyx_v_other_set = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":221 - * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: - * other_set: Set[Any] = set.difference(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) # <<<<<<<<<<<<<< - * - * def __sub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - */ - __Pyx_XDECREF(__pyx_r); - { /* enter inner scope */ - __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 221, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_2); - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 221, __pyx_L5_error) - } - __pyx_t_3 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_3); - __pyx_t_4 = 0; - for (;;) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 221, __pyx_L5_error) - #endif - if (__pyx_t_4 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 221, __pyx_L5_error) - #else - __pyx_t_1 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 221, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_1); - #endif - __Pyx_XDECREF_SET(__pyx_8genexpr4__pyx_v_a, __pyx_t_1); - __pyx_t_1 = 0; - if (unlikely(__pyx_v_other_set == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 221, __pyx_L5_error) - } - __pyx_t_5 = (__Pyx_PySet_ContainsTF(__pyx_8genexpr4__pyx_v_a, __pyx_v_other_set, Py_EQ)); if (unlikely((__pyx_t_5 < 0))) __PYX_ERR(0, 221, __pyx_L5_error) - if (__pyx_t_5) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_8genexpr4__pyx_v_a))) __PYX_ERR(0, 221, __pyx_L5_error) - } - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_8genexpr4__pyx_v_a); __pyx_8genexpr4__pyx_v_a = 0; - goto __pyx_L10_exit_scope; - __pyx_L5_error:; - __Pyx_XDECREF(__pyx_8genexpr4__pyx_v_a); __pyx_8genexpr4__pyx_v_a = 0; - goto __pyx_L1_error; - __pyx_L10_exit_scope:; - } /* exit inner scope */ - __pyx_t_3 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list(__pyx_v_self, ((PyObject*)__pyx_t_2))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 221, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":219 - * - * # @cython.ccall # cdef function cannot have star argument - * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * other_set: Set[Any] = set.difference(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.difference", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_other_set); - __Pyx_XDECREF(__pyx_8genexpr4__pyx_v_a); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":223 - * return self._from_list([a for a in self._list if a in other_set]) - * - * def __sub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self.difference(other) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_45__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_45__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__sub__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_44__sub__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_44__sub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__sub__", 1); - - /* "sqlalchemy/util/_collections_cy.py":224 - * - * def __sub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - * return self.difference(other) # <<<<<<<<<<<<<< - * - * # @cython.ccall # cdef function cannot have star argument - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_difference); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 224, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 224, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":223 - * return self._from_list([a for a in self._list if a in other_set]) - * - * def __sub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self.difference(other) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__sub__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":227 - * - * # @cython.ccall # cdef function cannot have star argument - * def intersection_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< - * set.intersection_update(self, *other) - * self._list = [a for a in self._list if a in self] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update = {"intersection_update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_other = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("intersection_update (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "intersection_update", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_other = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_46intersection_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_other); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_46intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_8genexpr5__pyx_v_a = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("intersection_update", 1); - - /* "sqlalchemy/util/_collections_cy.py":228 - * # @cython.ccall # cdef function cannot have star argument - * def intersection_update(self, *other: Iterable[Hashable]) -> None: - * set.intersection_update(self, *other) # <<<<<<<<<<<<<< - * self._list = [a for a in self._list if a in self] - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_intersection_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 228, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 228, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __Pyx_GIVEREF((PyObject *)__pyx_v_self); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_self))) __PYX_ERR(0, 228, __pyx_L1_error); - __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_v_other); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 228, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 228, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":229 - * def intersection_update(self, *other: Iterable[Hashable]) -> None: - * set.intersection_update(self, *other) - * self._list = [a for a in self._list if a in self] # <<<<<<<<<<<<<< - * - * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - */ - { /* enter inner scope */ - __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 229, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_2); - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 229, __pyx_L5_error) - } - __pyx_t_3 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_3); - __pyx_t_4 = 0; - for (;;) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 229, __pyx_L5_error) - #endif - if (__pyx_t_4 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 229, __pyx_L5_error) - #else - __pyx_t_1 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 229, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_1); - #endif - __Pyx_XDECREF_SET(__pyx_8genexpr5__pyx_v_a, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr5__pyx_v_a, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_5 < 0))) __PYX_ERR(0, 229, __pyx_L5_error) - if (__pyx_t_5) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_8genexpr5__pyx_v_a))) __PYX_ERR(0, 229, __pyx_L5_error) - } - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_8genexpr5__pyx_v_a); __pyx_8genexpr5__pyx_v_a = 0; - goto __pyx_L10_exit_scope; - __pyx_L5_error:; - __Pyx_XDECREF(__pyx_8genexpr5__pyx_v_a); __pyx_8genexpr5__pyx_v_a = 0; - goto __pyx_L1_error; - __pyx_L10_exit_scope:; - } /* exit inner scope */ - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->_list); - __Pyx_DECREF(__pyx_v_self->_list); - __pyx_v_self->_list = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":227 - * - * # @cython.ccall # cdef function cannot have star argument - * def intersection_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< - * set.intersection_update(self, *other) - * self._list = [a for a in self._list if a in self] - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.intersection_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_8genexpr5__pyx_v_a); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":231 - * self._list = [a for a in self._list if a in self] - * - * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * self.intersection_update(other) - * return self - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_49__iand__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_49__iand__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iand__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_48__iand__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_48__iand__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__iand__", 1); - - /* "sqlalchemy/util/_collections_cy.py":232 - * - * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - * self.intersection_update(other) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_intersection_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 232, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 232, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":233 - * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - * self.intersection_update(other) - * return self # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":231 - * self._list = [a for a in self._list if a in self] - * - * def __iand__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * self.intersection_update(other) - * return self - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__iand__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":235 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_skip_dispatch) { - PyObject *__pyx_v_collection = NULL; - PyObject *__pyx_8genexpr6__pyx_v_a = NULL; - PyObject *__pyx_8genexpr7__pyx_v_a = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *(*__pyx_t_8)(PyObject *); - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("symmetric_difference_update", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_symmetric_difference_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 235, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_other}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 235, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":238 - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: - * collection = other if hasattr(other, "__len__") else list(other) # <<<<<<<<<<<<<< - * set.symmetric_difference_update(self, collection) - * self._list = [a for a in self._list if a in self] - */ - __pyx_t_6 = __Pyx_HasAttr(__pyx_v_other, __pyx_n_u_len); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 238, __pyx_L1_error) - if (__pyx_t_6) { - __Pyx_INCREF(__pyx_v_other); - __pyx_t_1 = __pyx_v_other; - } else { - __pyx_t_2 = PySequence_List(__pyx_v_other); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 238, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __pyx_t_2; - __pyx_t_2 = 0; - } - __pyx_v_collection = __pyx_t_1; - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":239 - * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: - * collection = other if hasattr(other, "__len__") else list(other) - * set.symmetric_difference_update(self, collection) # <<<<<<<<<<<<<< - * self._list = [a for a in self._list if a in self] - * self._list += [a for a in collection if a in self] - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_symmetric_difference_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 239, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[3] = {__pyx_t_3, ((PyObject *)__pyx_v_self), __pyx_v_collection}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 2+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 239, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":240 - * collection = other if hasattr(other, "__len__") else list(other) - * set.symmetric_difference_update(self, collection) - * self._list = [a for a in self._list if a in self] # <<<<<<<<<<<<<< - * self._list += [a for a in collection if a in self] - * - */ - { /* enter inner scope */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 240, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_1); - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 240, __pyx_L5_error) - } - __pyx_t_2 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_2); - __pyx_t_7 = 0; - for (;;) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 240, __pyx_L5_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 240, __pyx_L5_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 240, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - __Pyx_XDECREF_SET(__pyx_8genexpr6__pyx_v_a, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr6__pyx_v_a, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 240, __pyx_L5_error) - if (__pyx_t_6) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_8genexpr6__pyx_v_a))) __PYX_ERR(0, 240, __pyx_L5_error) - } - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_8genexpr6__pyx_v_a); __pyx_8genexpr6__pyx_v_a = 0; - goto __pyx_L10_exit_scope; - __pyx_L5_error:; - __Pyx_XDECREF(__pyx_8genexpr6__pyx_v_a); __pyx_8genexpr6__pyx_v_a = 0; - goto __pyx_L1_error; - __pyx_L10_exit_scope:; - } /* exit inner scope */ - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_list); - __Pyx_DECREF(__pyx_v_self->_list); - __pyx_v_self->_list = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":241 - * set.symmetric_difference_update(self, collection) - * self._list = [a for a in self._list if a in self] - * self._list += [a for a in collection if a in self] # <<<<<<<<<<<<<< - * - * def __ixor__( - */ - { /* enter inner scope */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 241, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_1); - if (likely(PyList_CheckExact(__pyx_v_collection)) || PyTuple_CheckExact(__pyx_v_collection)) { - __pyx_t_2 = __pyx_v_collection; __Pyx_INCREF(__pyx_t_2); - __pyx_t_7 = 0; - __pyx_t_8 = NULL; - } else { - __pyx_t_7 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_collection); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 241, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 241, __pyx_L13_error) - } - for (;;) { - if (likely(!__pyx_t_8)) { - if (likely(PyList_CheckExact(__pyx_t_2))) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 241, __pyx_L13_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 241, __pyx_L13_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 241, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - } else { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 241, __pyx_L13_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 241, __pyx_L13_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 241, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - } - } else { - __pyx_t_3 = __pyx_t_8(__pyx_t_2); - if (unlikely(!__pyx_t_3)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 241, __pyx_L13_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_3); - } - __Pyx_XDECREF_SET(__pyx_8genexpr7__pyx_v_a, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr7__pyx_v_a, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 241, __pyx_L13_error) - if (__pyx_t_6) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_8genexpr7__pyx_v_a))) __PYX_ERR(0, 241, __pyx_L13_error) - } - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_8genexpr7__pyx_v_a); __pyx_8genexpr7__pyx_v_a = 0; - goto __pyx_L18_exit_scope; - __pyx_L13_error:; - __Pyx_XDECREF(__pyx_8genexpr7__pyx_v_a); __pyx_8genexpr7__pyx_v_a = 0; - goto __pyx_L1_error; - __pyx_L18_exit_scope:; - } /* exit inner scope */ - __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_self->_list, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 241, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->_list); - __Pyx_DECREF(__pyx_v_self->_list); - __pyx_v_self->_list = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":235 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.symmetric_difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_collection); - __Pyx_XDECREF(__pyx_8genexpr6__pyx_v_a); - __Pyx_XDECREF(__pyx_8genexpr7__pyx_v_a); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update = {"symmetric_difference_update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("symmetric_difference_update (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_50symmetric_difference_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_50symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("symmetric_difference_update", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference_update(__pyx_v_self, __pyx_v_other, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 235, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.symmetric_difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":243 - * self._list += [a for a in collection if a in self] - * - * def __ixor__( # <<<<<<<<<<<<<< - * self: OrderedSet[Union[_T, _S]], other: AbstractSet[_S] - * ) -> OrderedSet[Union[_T, _S]]: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_53__ixor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_53__ixor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__ixor__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_52__ixor__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_52__ixor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__ixor__", 1); - - /* "sqlalchemy/util/_collections_cy.py":246 - * self: OrderedSet[Union[_T, _S]], other: AbstractSet[_S] - * ) -> OrderedSet[Union[_T, _S]]: - * self.symmetric_difference_update(other) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self->__pyx_vtab)->symmetric_difference_update(__pyx_v_self, __pyx_v_other, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 246, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":247 - * ) -> OrderedSet[Union[_T, _S]]: - * self.symmetric_difference_update(other) - * return self # <<<<<<<<<<<<<< - * - * # @cython.ccall # cdef function cannot have star argument - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":243 - * self._list += [a for a in collection if a in self] - * - * def __ixor__( # <<<<<<<<<<<<<< - * self: OrderedSet[Union[_T, _S]], other: AbstractSet[_S] - * ) -> OrderedSet[Union[_T, _S]]: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__ixor__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":250 - * - * # @cython.ccall # cdef function cannot have star argument - * def difference_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< - * set.difference_update(self, *other) - * self._list = [a for a in self._list if a in self] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update = {"difference_update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_other = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("difference_update (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "difference_update", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_other = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_54difference_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v_other); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_other); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_54difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_8genexpr8__pyx_v_a = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("difference_update", 1); - - /* "sqlalchemy/util/_collections_cy.py":251 - * # @cython.ccall # cdef function cannot have star argument - * def difference_update(self, *other: Iterable[Hashable]) -> None: - * set.difference_update(self, *other) # <<<<<<<<<<<<<< - * self._list = [a for a in self._list if a in self] - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PySet_Type)), __pyx_n_s_difference_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 251, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 251, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __Pyx_GIVEREF((PyObject *)__pyx_v_self); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_self))) __PYX_ERR(0, 251, __pyx_L1_error); - __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_v_other); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 251, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 251, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":252 - * def difference_update(self, *other: Iterable[Hashable]) -> None: - * set.difference_update(self, *other) - * self._list = [a for a in self._list if a in self] # <<<<<<<<<<<<<< - * - * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - */ - { /* enter inner scope */ - __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 252, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_2); - if (unlikely(__pyx_v_self->_list == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 252, __pyx_L5_error) - } - __pyx_t_3 = __pyx_v_self->_list; __Pyx_INCREF(__pyx_t_3); - __pyx_t_4 = 0; - for (;;) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_3); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 252, __pyx_L5_error) - #endif - if (__pyx_t_4 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 252, __pyx_L5_error) - #else - __pyx_t_1 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 252, __pyx_L5_error) - __Pyx_GOTREF(__pyx_t_1); - #endif - __Pyx_XDECREF_SET(__pyx_8genexpr8__pyx_v_a, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_8genexpr8__pyx_v_a, ((PyObject *)__pyx_v_self), Py_EQ)); if (unlikely((__pyx_t_5 < 0))) __PYX_ERR(0, 252, __pyx_L5_error) - if (__pyx_t_5) { - if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_8genexpr8__pyx_v_a))) __PYX_ERR(0, 252, __pyx_L5_error) - } - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_8genexpr8__pyx_v_a); __pyx_8genexpr8__pyx_v_a = 0; - goto __pyx_L10_exit_scope; - __pyx_L5_error:; - __Pyx_XDECREF(__pyx_8genexpr8__pyx_v_a); __pyx_8genexpr8__pyx_v_a = 0; - goto __pyx_L1_error; - __pyx_L10_exit_scope:; - } /* exit inner scope */ - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_v_self->_list); - __Pyx_DECREF(__pyx_v_self->_list); - __pyx_v_self->_list = ((PyObject*)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":250 - * - * # @cython.ccall # cdef function cannot have star argument - * def difference_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< - * set.difference_update(self, *other) - * self._list = [a for a in self._list if a in self] - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_8genexpr8__pyx_v_a); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":254 - * self._list = [a for a in self._list if a in self] - * - * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * self.difference_update(other) - * return self - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_57__isub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_57__isub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__isub__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_56__isub__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_56__isub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__isub__", 1); - - /* "sqlalchemy/util/_collections_cy.py":255 - * - * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - * self.difference_update(other) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_difference_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 255, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_other}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 255, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":256 - * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: - * self.difference_update(other) - * return self # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":254 - * self._list = [a for a in self._list if a in self] - * - * def __isub__(self, other: AbstractSet[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * self.difference_update(other) - * return self - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__isub__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_58__reduce_cython__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_58__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 1); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self._list,) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v_self->_list); - __Pyx_GIVEREF(__pyx_v_self->_list); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->_list)) __PYX_ERR(1, 5, __pyx_L1_error); - __pyx_v_state = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self._list,) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v__dict = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":7 - * state = (self._list,) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_2 = (__pyx_v__dict != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(1, 8, __pyx_L1_error); - __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = self._list is not None - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self._list,) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = self._list is not None # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state - */ - /*else*/ { - __pyx_t_2 = (__pyx_v_self->_list != ((PyObject*)Py_None)); - __pyx_v_use_setstate = __pyx_t_2; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = self._list is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state - * else: - */ - if (__pyx_v_use_setstate) { - - /* "(tree fragment)":13 - * use_setstate = self._list is not None - * if use_setstate: - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_OrderedSet); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_242532825); - __Pyx_GIVEREF(__pyx_int_242532825); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_242532825)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_3); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_3 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = self._list is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, None), state - * else: - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_OrderedSet); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_242532825); - __Pyx_GIVEREF(__pyx_int_242532825); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_242532825)) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_4); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_4 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 16, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(1, 16, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v___pyx_state = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(1, 16, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_60__setstate_cython__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v_self), __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_10OrderedSet_60__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 1); - - /* "(tree fragment)":17 - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_OrderedSet__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.OrderedSet.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":261 - * if cython.compiled: - * - * @cython.cfunc # <<<<<<<<<<<<<< - * @cython.inline - * def _get_id(item: object, /) -> cython.ulonglong: - */ - -static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(PyObject *__pyx_v_item) { - unsigned PY_LONG_LONG __pyx_r; - - /* "sqlalchemy/util/_collections_cy.py":264 - * @cython.inline - * def _get_id(item: object, /) -> cython.ulonglong: - * return cython.cast( # <<<<<<<<<<<<<< - * cython.ulonglong, - * cython.cast(cython.pointer(cython.void), item), - */ - __pyx_r = ((unsigned PY_LONG_LONG)((void *)__pyx_v_item)); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":261 - * if cython.compiled: - * - * @cython.cfunc # <<<<<<<<<<<<<< - * @cython.inline - * def _get_id(item: object, /) -> cython.ulonglong: - */ - - /* function exit code */ - __pyx_L0:; - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":285 - * _members: Dict[int, Any] - * - * def __init__(self, iterable: Optional[Iterable[Any]] = None): # <<<<<<<<<<<<<< - * # the code assumes this class is ordered - * self._members = {} - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_iterable = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return -1; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_iterable,0}; - values[0] = __Pyx_Arg_NewRef_VARARGS(((PyObject *)Py_None)); - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_VARARGS(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (kw_args > 0) { - PyObject* value = __Pyx_GetKwValue_VARARGS(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_iterable); - if (value) { values[0] = __Pyx_Arg_NewRef_VARARGS(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 285, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__init__") < 0)) __PYX_ERR(0, 285, __pyx_L3_error) - } - } else { - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_VARARGS(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_iterable = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 1, __pyx_nargs); __PYX_ERR(0, 285, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet___init__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), __pyx_v_iterable); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_VARARGS(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet___init__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__init__", 1); - - /* "sqlalchemy/util/_collections_cy.py":287 - * def __init__(self, iterable: Optional[Iterable[Any]] = None): - * # the code assumes this class is ordered - * self._members = {} # <<<<<<<<<<<<<< - * if iterable: - * self.update(iterable) - */ - __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 287, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_members); - __Pyx_DECREF(__pyx_v_self->_members); - __pyx_v_self->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":288 - * # the code assumes this class is ordered - * self._members = {} - * if iterable: # <<<<<<<<<<<<<< - * self.update(iterable) - * - */ - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_iterable); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(0, 288, __pyx_L1_error) - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":289 - * self._members = {} - * if iterable: - * self.update(iterable) # <<<<<<<<<<<<<< - * - * def add(self, value: Any, /) -> None: - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->update(__pyx_v_self, __pyx_v_iterable, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 289, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":288 - * # the code assumes this class is ordered - * self._members = {} - * if iterable: # <<<<<<<<<<<<<< - * self.update(iterable) - * - */ - } - - /* "sqlalchemy/util/_collections_cy.py":285 - * _members: Dict[int, Any] - * - * def __init__(self, iterable: Optional[Iterable[Any]] = None): # <<<<<<<<<<<<<< - * # the code assumes this class is ordered - * self._members = {} - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":291 - * self.update(iterable) - * - * def add(self, value: Any, /) -> None: # <<<<<<<<<<<<<< - * self._members[_get_id(value)] = value - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add = {"add", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("add (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_2add(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_2add(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - unsigned PY_LONG_LONG __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("add", 1); - - /* "sqlalchemy/util/_collections_cy.py":292 - * - * def add(self, value: Any, /) -> None: - * self._members[_get_id(value)] = value # <<<<<<<<<<<<<< - * - * def __contains__(self, value) -> bool: - */ - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 292, __pyx_L1_error) - } - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_value); if (unlikely(__pyx_t_1 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 292, __pyx_L1_error) - __pyx_t_2 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 292, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (unlikely((PyDict_SetItem(__pyx_v_self->_members, __pyx_t_2, __pyx_v_value) < 0))) __PYX_ERR(0, 292, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":291 - * self.update(iterable) - * - * def add(self, value: Any, /) -> None: # <<<<<<<<<<<<<< - * self._members[_get_id(value)] = value - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.add", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":294 - * self._members[_get_id(value)] = value - * - * def __contains__(self, value) -> bool: # <<<<<<<<<<<<<< - * return _get_id(value) in self._members - * - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_5__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_5__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_4__contains__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_4__contains__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - unsigned PY_LONG_LONG __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__contains__", 1); - - /* "sqlalchemy/util/_collections_cy.py":295 - * - * def __contains__(self, value) -> bool: - * return _get_id(value) in self._members # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_value); if (unlikely(__pyx_t_1 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 295, __pyx_L1_error) - __pyx_t_2 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 295, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 295, __pyx_L1_error) - } - __pyx_t_3 = (__Pyx_PyDict_ContainsTF(__pyx_t_2, __pyx_v_self->_members, Py_EQ)); if (unlikely((__pyx_t_3 < 0))) __PYX_ERR(0, 295, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":294 - * self._members[_get_id(value)] = value - * - * def __contains__(self, value) -> bool: # <<<<<<<<<<<<<< - * return _get_id(value) in self._members - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":297 - * return _get_id(value) in self._members - * - * @cython.ccall # <<<<<<<<<<<<<< - * def remove(self, value: Any, /): - * del self._members[_get_id(value)] - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value, int __pyx_skip_dispatch) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - unsigned PY_LONG_LONG __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("remove", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_remove); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 297, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_value}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 297, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":299 - * @cython.ccall - * def remove(self, value: Any, /): - * del self._members[_get_id(value)] # <<<<<<<<<<<<<< - * - * def discard(self, value, /) -> None: - */ - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 299, __pyx_L1_error) - } - __pyx_t_6 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_value); if (unlikely(__pyx_t_6 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 299, __pyx_L1_error) - __pyx_t_1 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 299, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (unlikely((PyDict_DelItem(__pyx_v_self->_members, __pyx_t_1) < 0))) __PYX_ERR(0, 299, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":297 - * return _get_id(value) in self._members - * - * @cython.ccall # <<<<<<<<<<<<<< - * def remove(self, value: Any, /): - * del self._members[_get_id(value)] - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove = {"remove", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("remove (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_6remove(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_6remove(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("remove", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_remove(__pyx_v_self, __pyx_v_value, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 297, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":301 - * del self._members[_get_id(value)] - * - * def discard(self, value, /) -> None: # <<<<<<<<<<<<<< - * try: - * self.remove(value) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard = {"discard", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("discard (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_8discard(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_8discard(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("discard", 1); - - /* "sqlalchemy/util/_collections_cy.py":302 - * - * def discard(self, value, /) -> None: - * try: # <<<<<<<<<<<<<< - * self.remove(value) - * except KeyError: - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - /*try:*/ { - - /* "sqlalchemy/util/_collections_cy.py":303 - * def discard(self, value, /) -> None: - * try: - * self.remove(value) # <<<<<<<<<<<<<< - * except KeyError: - * pass - */ - __pyx_t_4 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->remove(__pyx_v_self, __pyx_v_value, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 303, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "sqlalchemy/util/_collections_cy.py":302 - * - * def discard(self, value, /) -> None: - * try: # <<<<<<<<<<<<<< - * self.remove(value) - * except KeyError: - */ - } - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - goto __pyx_L8_try_end; - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "sqlalchemy/util/_collections_cy.py":304 - * try: - * self.remove(value) - * except KeyError: # <<<<<<<<<<<<<< - * pass - * - */ - __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyError); - if (__pyx_t_5) { - __Pyx_ErrRestore(0,0,0); - goto __pyx_L4_exception_handled; - } - goto __pyx_L5_except_error; - - /* "sqlalchemy/util/_collections_cy.py":302 - * - * def discard(self, value, /) -> None: - * try: # <<<<<<<<<<<<<< - * self.remove(value) - * except KeyError: - */ - __pyx_L5_except_error:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L1_error; - __pyx_L4_exception_handled:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - __pyx_L8_try_end:; - } - - /* "sqlalchemy/util/_collections_cy.py":301 - * del self._members[_get_id(value)] - * - * def discard(self, value, /) -> None: # <<<<<<<<<<<<<< - * try: - * self.remove(value) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.discard", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":307 - * pass - * - * def pop(self) -> Any: # <<<<<<<<<<<<<< - * pair: Tuple[Any, Any] - * try: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop = {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pop (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("pop", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "pop", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_10pop(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_10pop(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - PyObject *__pyx_v_pair = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - unsigned int __pyx_t_7; - int __pyx_t_8; - PyObject *__pyx_t_9 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("pop", 1); - - /* "sqlalchemy/util/_collections_cy.py":309 - * def pop(self) -> Any: - * pair: Tuple[Any, Any] - * try: # <<<<<<<<<<<<<< - * pair = self._members.popitem() - * return pair[1] - */ - { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); - __Pyx_XGOTREF(__pyx_t_1); - __Pyx_XGOTREF(__pyx_t_2); - __Pyx_XGOTREF(__pyx_t_3); - /*try:*/ { - - /* "sqlalchemy/util/_collections_cy.py":310 - * pair: Tuple[Any, Any] - * try: - * pair = self._members.popitem() # <<<<<<<<<<<<<< - * return pair[1] - * except KeyError: - */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_members, __pyx_n_s_popitem); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 310, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = NULL; - __pyx_t_7 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_5))) { - __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); - if (likely(__pyx_t_6)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); - __Pyx_INCREF(__pyx_t_6); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_5, function); - __pyx_t_7 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_6, NULL}; - __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_5, __pyx_callargs+1-__pyx_t_7, 0+__pyx_t_7); - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 310, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - } - if (!(likely(PyTuple_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_t_4))) __PYX_ERR(0, 310, __pyx_L3_error) - __pyx_v_pair = ((PyObject*)__pyx_t_4); - __pyx_t_4 = 0; - - /* "sqlalchemy/util/_collections_cy.py":311 - * try: - * pair = self._members.popitem() - * return pair[1] # <<<<<<<<<<<<<< - * except KeyError: - * raise KeyError("pop from an empty set") - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_pair == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 311, __pyx_L3_error) - } - __pyx_t_4 = __Pyx_GetItemInt_Tuple(__pyx_v_pair, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 311, __pyx_L3_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L7_try_return; - - /* "sqlalchemy/util/_collections_cy.py":309 - * def pop(self) -> Any: - * pair: Tuple[Any, Any] - * try: # <<<<<<<<<<<<<< - * pair = self._members.popitem() - * return pair[1] - */ - } - __pyx_L3_error:; - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "sqlalchemy/util/_collections_cy.py":312 - * pair = self._members.popitem() - * return pair[1] - * except KeyError: # <<<<<<<<<<<<<< - * raise KeyError("pop from an empty set") - * - */ - __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyError); - if (__pyx_t_8) { - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_5, &__pyx_t_6) < 0) __PYX_ERR(0, 312, __pyx_L5_except_error) - __Pyx_XGOTREF(__pyx_t_4); - __Pyx_XGOTREF(__pyx_t_5); - __Pyx_XGOTREF(__pyx_t_6); - - /* "sqlalchemy/util/_collections_cy.py":313 - * return pair[1] - * except KeyError: - * raise KeyError("pop from an empty set") # <<<<<<<<<<<<<< - * - * def clear(self) -> None: - */ - __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 313, __pyx_L5_except_error) - __Pyx_GOTREF(__pyx_t_9); - __Pyx_Raise(__pyx_t_9, 0, 0, 0); - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - __PYX_ERR(0, 313, __pyx_L5_except_error) - } - goto __pyx_L5_except_error; - - /* "sqlalchemy/util/_collections_cy.py":309 - * def pop(self) -> Any: - * pair: Tuple[Any, Any] - * try: # <<<<<<<<<<<<<< - * pair = self._members.popitem() - * return pair[1] - */ - __pyx_L5_except_error:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L1_error; - __pyx_L7_try_return:; - __Pyx_XGIVEREF(__pyx_t_1); - __Pyx_XGIVEREF(__pyx_t_2); - __Pyx_XGIVEREF(__pyx_t_3); - __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); - goto __pyx_L0; - } - - /* "sqlalchemy/util/_collections_cy.py":307 - * pass - * - * def pop(self) -> Any: # <<<<<<<<<<<<<< - * pair: Tuple[Any, Any] - * try: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_pair); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":315 - * raise KeyError("pop from an empty set") - * - * def clear(self) -> None: # <<<<<<<<<<<<<< - * self._members.clear() - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear = {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("clear (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("clear", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "clear", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_12clear(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_12clear(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("clear", 1); - - /* "sqlalchemy/util/_collections_cy.py":316 - * - * def clear(self) -> None: - * self._members.clear() # <<<<<<<<<<<<<< - * - * def __eq__(self, other: Any) -> bool: - */ - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "clear"); - __PYX_ERR(0, 316, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyDict_Clear(__pyx_v_self->_members); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 316, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":315 - * raise KeyError("pop from an empty set") - * - * def clear(self) -> None: # <<<<<<<<<<<<<< - * self._members.clear() - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":318 - * self._members.clear() - * - * def __eq__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * other_: IdentitySet - * if isinstance(other, IdentitySet): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_15__eq__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_15__eq__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__eq__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_14__eq__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_14__eq__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other_ = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__eq__", 1); - - /* "sqlalchemy/util/_collections_cy.py":320 - * def __eq__(self, other: Any) -> bool: - * other_: IdentitySet - * if isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * other_ = other - * return self._members == other_._members - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (__pyx_t_1) { - - /* "sqlalchemy/util/_collections_cy.py":321 - * other_: IdentitySet - * if isinstance(other, IdentitySet): - * other_ = other # <<<<<<<<<<<<<< - * return self._members == other_._members - * else: - */ - if (!(likely(((__pyx_v_other) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 321, __pyx_L1_error) - __pyx_t_2 = __pyx_v_other; - __Pyx_INCREF(__pyx_t_2); - __pyx_v_other_ = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":322 - * if isinstance(other, IdentitySet): - * other_ = other - * return self._members == other_._members # <<<<<<<<<<<<<< - * else: - * return False - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = PyObject_RichCompare(__pyx_v_self->_members, __pyx_v_other_->_members, Py_EQ); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 322, __pyx_L1_error) - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":320 - * def __eq__(self, other: Any) -> bool: - * other_: IdentitySet - * if isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * other_ = other - * return self._members == other_._members - */ - } - - /* "sqlalchemy/util/_collections_cy.py":324 - * return self._members == other_._members - * else: - * return False # <<<<<<<<<<<<<< - * - * def __ne__(self, other: Any) -> bool: - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_False); - __pyx_r = Py_False; - goto __pyx_L0; - } - - /* "sqlalchemy/util/_collections_cy.py":318 - * self._members.clear() - * - * def __eq__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * other_: IdentitySet - * if isinstance(other, IdentitySet): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__eq__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_other_); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":326 - * return False - * - * def __ne__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * other_: IdentitySet - * if isinstance(other, IdentitySet): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_17__ne__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_17__ne__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__ne__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_16__ne__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_16__ne__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other_ = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__ne__", 1); - - /* "sqlalchemy/util/_collections_cy.py":328 - * def __ne__(self, other: Any) -> bool: - * other_: IdentitySet - * if isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * other_ = other - * return self._members != other_._members - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (__pyx_t_1) { - - /* "sqlalchemy/util/_collections_cy.py":329 - * other_: IdentitySet - * if isinstance(other, IdentitySet): - * other_ = other # <<<<<<<<<<<<<< - * return self._members != other_._members - * else: - */ - if (!(likely(((__pyx_v_other) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 329, __pyx_L1_error) - __pyx_t_2 = __pyx_v_other; - __Pyx_INCREF(__pyx_t_2); - __pyx_v_other_ = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":330 - * if isinstance(other, IdentitySet): - * other_ = other - * return self._members != other_._members # <<<<<<<<<<<<<< - * else: - * return True - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = PyObject_RichCompare(__pyx_v_self->_members, __pyx_v_other_->_members, Py_NE); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 330, __pyx_L1_error) - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":328 - * def __ne__(self, other: Any) -> bool: - * other_: IdentitySet - * if isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * other_ = other - * return self._members != other_._members - */ - } - - /* "sqlalchemy/util/_collections_cy.py":332 - * return self._members != other_._members - * else: - * return True # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_True); - __pyx_r = Py_True; - goto __pyx_L0; - } - - /* "sqlalchemy/util/_collections_cy.py":326 - * return False - * - * def __ne__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * other_: IdentitySet - * if isinstance(other, IdentitySet): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__ne__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_other_); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":334 - * return True - * - * @cython.ccall # <<<<<<<<<<<<<< - * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static int __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issubset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("issubset", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_issubset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 334, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset)) { - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 334, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 334, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_6; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":337 - * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = iterable - * else: - */ - __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (__pyx_t_6) { - - /* "sqlalchemy/util/_collections_cy.py":338 - * other: IdentitySet - * if isinstance(iterable, IdentitySet): - * other = iterable # <<<<<<<<<<<<<< - * else: - * other = self.__class__(iterable) - */ - if (!(likely(((__pyx_v_iterable) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 338, __pyx_L1_error) - __pyx_t_1 = __pyx_v_iterable; - __Pyx_INCREF(__pyx_t_1); - __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":337 - * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = iterable - * else: - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":340 - * other = iterable - * else: - * other = self.__class__(iterable) # <<<<<<<<<<<<<< - * - * return self._members.keys() <= other._members.keys() - */ - /*else*/ { - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 340, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_iterable}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 340, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 340, __pyx_L1_error) - __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - } - __pyx_L3:; - - /* "sqlalchemy/util/_collections_cy.py":342 - * other = self.__class__(iterable) - * - * return self._members.keys() <= other._members.keys() # <<<<<<<<<<<<<< - * - * def __le__(self, other: Any) -> bool: - */ - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); - __PYX_ERR(0, 342, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyDict_Keys(__pyx_v_self->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 342, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (unlikely(__pyx_v_other->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); - __PYX_ERR(0, 342, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyDict_Keys(__pyx_v_other->_members); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 342, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_t_2, Py_LE); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 342, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 342, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_6; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":334 - * return True - * - * @cython.ccall # <<<<<<<<<<<<<< - * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.issubset", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_other); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset = {"issubset", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("issubset (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_18issubset(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_18issubset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("issubset", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issubset(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 334, __pyx_L1_error) - __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 334, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.issubset", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":344 - * return self._members.keys() <= other._members.keys() - * - * def __le__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_21__le__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_21__le__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__le__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_20__le__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_20__le__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__le__", 1); - - /* "sqlalchemy/util/_collections_cy.py":345 - * - * def __le__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.issubset(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":346 - * def __le__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * return self.issubset(other) - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":345 - * - * def __le__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.issubset(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":347 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * return self.issubset(other) # <<<<<<<<<<<<<< - * - * def __lt__(self, other: Any) -> bool: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->issubset(__pyx_v_self, __pyx_v_other, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 347, __pyx_L1_error) - __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 347, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":344 - * return self._members.keys() <= other._members.keys() - * - * def __le__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__le__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":349 - * return self.issubset(other) - * - * def __lt__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_23__lt__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_23__lt__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__lt__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_22__lt__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_22__lt__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - Py_ssize_t __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__lt__", 1); - - /* "sqlalchemy/util/_collections_cy.py":350 - * - * def __lt__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return len(self) < len(other) and self.issubset(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":351 - * def __lt__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * return len(self) < len(other) and self.issubset(other) - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":350 - * - * def __lt__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return len(self) < len(other) and self.issubset(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":352 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * return len(self) < len(other) and self.issubset(other) # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = PyObject_Length(((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 352, __pyx_L1_error) - __pyx_t_5 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 352, __pyx_L1_error) - __pyx_t_2 = (__pyx_t_4 < __pyx_t_5); - if (__pyx_t_2) { - } else { - __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 352, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_3 = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_2 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->issubset(__pyx_v_self, __pyx_v_other, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 352, __pyx_L1_error) - __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 352, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_3 = __pyx_t_6; - __pyx_t_6 = 0; - __pyx_L4_bool_binop_done:; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":349 - * return self.issubset(other) - * - * def __lt__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__lt__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":354 - * return len(self) < len(other) and self.issubset(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static int __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issuperset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_t_6; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("issuperset", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_issuperset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 354, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset)) { - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 354, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 354, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_6; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":357 - * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = iterable - * else: - */ - __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (__pyx_t_6) { - - /* "sqlalchemy/util/_collections_cy.py":358 - * other: IdentitySet - * if isinstance(iterable, IdentitySet): - * other = iterable # <<<<<<<<<<<<<< - * else: - * other = self.__class__(iterable) - */ - if (!(likely(((__pyx_v_iterable) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 358, __pyx_L1_error) - __pyx_t_1 = __pyx_v_iterable; - __Pyx_INCREF(__pyx_t_1); - __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":357 - * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = iterable - * else: - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":360 - * other = iterable - * else: - * other = self.__class__(iterable) # <<<<<<<<<<<<<< - * - * return self._members.keys() >= other._members.keys() - */ - /*else*/ { - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 360, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_iterable}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 360, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 360, __pyx_L1_error) - __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - } - __pyx_L3:; - - /* "sqlalchemy/util/_collections_cy.py":362 - * other = self.__class__(iterable) - * - * return self._members.keys() >= other._members.keys() # <<<<<<<<<<<<<< - * - * def __ge__(self, other: Any) -> bool: - */ - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); - __PYX_ERR(0, 362, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyDict_Keys(__pyx_v_self->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (unlikely(__pyx_v_other->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); - __PYX_ERR(0, 362, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyDict_Keys(__pyx_v_other->_members); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_t_2, Py_GE); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 362, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_6; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":354 - * return len(self) < len(other) and self.issubset(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.issuperset", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_other); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset = {"issuperset", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("issuperset (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_24issuperset(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_24issuperset(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("issuperset", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issuperset(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 354, __pyx_L1_error) - __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 354, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.issuperset", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":364 - * return self._members.keys() >= other._members.keys() - * - * def __ge__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_27__ge__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_27__ge__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__ge__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_26__ge__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_26__ge__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__ge__", 1); - - /* "sqlalchemy/util/_collections_cy.py":365 - * - * def __ge__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.issuperset(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":366 - * def __ge__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * return self.issuperset(other) - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":365 - * - * def __ge__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.issuperset(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":367 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * return self.issuperset(other) # <<<<<<<<<<<<<< - * - * def __gt__(self, other: Any) -> bool: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->issuperset(__pyx_v_self, __pyx_v_other, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 367, __pyx_L1_error) - __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 367, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":364 - * return self._members.keys() >= other._members.keys() - * - * def __ge__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__ge__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":369 - * return self.issuperset(other) - * - * def __gt__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_29__gt__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_29__gt__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__gt__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_28__gt__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_28__gt__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - Py_ssize_t __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__gt__", 1); - - /* "sqlalchemy/util/_collections_cy.py":370 - * - * def __gt__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return len(self) > len(other) and self.issuperset(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":371 - * def __gt__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * return len(self) > len(other) and self.issuperset(other) - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":370 - * - * def __gt__(self, other: Any) -> bool: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return len(self) > len(other) and self.issuperset(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":372 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * return len(self) > len(other) and self.issuperset(other) # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = PyObject_Length(((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 372, __pyx_L1_error) - __pyx_t_5 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 372, __pyx_L1_error) - __pyx_t_2 = (__pyx_t_4 > __pyx_t_5); - if (__pyx_t_2) { - } else { - __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 372, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_3 = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_2 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->issuperset(__pyx_v_self, __pyx_v_other, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 372, __pyx_L1_error) - __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 372, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_3 = __pyx_t_6; - __pyx_t_6 = 0; - __pyx_L4_bool_binop_done:; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":369 - * return self.issuperset(other) - * - * def __gt__(self, other: Any) -> bool: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__gt__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":374 - * return len(self) > len(other) and self.issuperset(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def union(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__class__() - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_result = 0; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("union", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_union); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 374, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union)) { - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 374, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 374, __pyx_L1_error) - __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":376 - * @cython.ccall - * def union(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__class__() # <<<<<<<<<<<<<< - * result._members.update(self._members) - * result.update(iterable) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 376, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 0+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 376, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 376, __pyx_L1_error) - __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":377 - * def union(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__class__() - * result._members.update(self._members) # <<<<<<<<<<<<<< - * result.update(iterable) - * return result - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_result->_members, __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 377, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_v_self->_members}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 377, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":378 - * result: IdentitySet = self.__class__() - * result._members.update(self._members) - * result.update(iterable) # <<<<<<<<<<<<<< - * return result - * - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_result->__pyx_vtab)->update(__pyx_v_result, __pyx_v_iterable, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 378, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":379 - * result._members.update(self._members) - * result.update(iterable) - * return result # <<<<<<<<<<<<<< - * - * def __or__(self, other: Any) -> IdentitySet: - */ - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_result); - __pyx_r = __pyx_v_result; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":374 - * return len(self) > len(other) and self.issuperset(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def union(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__class__() - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.union", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_result); - __Pyx_XGIVEREF((PyObject *)__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union = {"union", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("union (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_30union(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_30union(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("union", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_union(__pyx_v_self, __pyx_v_iterable, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 374, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.union", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":381 - * return result - * - * def __or__(self, other: Any) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_33__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_33__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__or__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_32__or__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_32__or__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__or__", 1); - - /* "sqlalchemy/util/_collections_cy.py":382 - * - * def __or__(self, other: Any) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.union(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":383 - * def __or__(self, other: Any) -> IdentitySet: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * return self.union(other) - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":382 - * - * def __or__(self, other: Any) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.union(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":384 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * return self.union(other) # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->__pyx_union(__pyx_v_self, __pyx_v_other, 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 384, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":381 - * return result - * - * def __or__(self, other: Any) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__or__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":386 - * return self.union(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def update(self, iterable: Iterable[Any], /): - * members: Dict[int, Any] = self._members - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - PyObject *__pyx_v_members = 0; - PyObject *__pyx_v_obj = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *(*__pyx_t_8)(PyObject *); - unsigned PY_LONG_LONG __pyx_t_9; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("update", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 386, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 386, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":388 - * @cython.ccall - * def update(self, iterable: Iterable[Any], /): - * members: Dict[int, Any] = self._members # <<<<<<<<<<<<<< - * if isinstance(iterable, IdentitySet): - * members.update(cython.cast(IdentitySet, iterable)._members) - */ - __pyx_t_1 = __pyx_v_self->_members; - __Pyx_INCREF(__pyx_t_1); - __pyx_v_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":389 - * def update(self, iterable: Iterable[Any], /): - * members: Dict[int, Any] = self._members - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * members.update(cython.cast(IdentitySet, iterable)._members) - * else: - */ - __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (__pyx_t_6) { - - /* "sqlalchemy/util/_collections_cy.py":390 - * members: Dict[int, Any] = self._members - * if isinstance(iterable, IdentitySet): - * members.update(cython.cast(IdentitySet, iterable)._members) # <<<<<<<<<<<<<< - * else: - * for obj in iterable: - */ - __pyx_t_1 = __Pyx_CallUnboundCMethod1(&__pyx_umethod_PyDict_Type_update, __pyx_v_members, ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 390, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":389 - * def update(self, iterable: Iterable[Any], /): - * members: Dict[int, Any] = self._members - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * members.update(cython.cast(IdentitySet, iterable)._members) - * else: - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":392 - * members.update(cython.cast(IdentitySet, iterable)._members) - * else: - * for obj in iterable: # <<<<<<<<<<<<<< - * members[_get_id(obj)] = obj - * - */ - /*else*/ { - if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { - __pyx_t_1 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_1); - __pyx_t_7 = 0; - __pyx_t_8 = NULL; - } else { - __pyx_t_7 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 392, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 392, __pyx_L1_error) - } - for (;;) { - if (likely(!__pyx_t_8)) { - if (likely(PyList_CheckExact(__pyx_t_1))) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_1); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 392, __pyx_L1_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_2 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_2); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 392, __pyx_L1_error) - #else - __pyx_t_2 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 392, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - #endif - } else { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_1); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 392, __pyx_L1_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_2); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 392, __pyx_L1_error) - #else - __pyx_t_2 = __Pyx_PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 392, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - #endif - } - } else { - __pyx_t_2 = __pyx_t_8(__pyx_t_1); - if (unlikely(!__pyx_t_2)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 392, __pyx_L1_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_2); - } - __Pyx_XDECREF_SET(__pyx_v_obj, __pyx_t_2); - __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":393 - * else: - * for obj in iterable: - * members[_get_id(obj)] = obj # <<<<<<<<<<<<<< - * - * def __ior__(self, other: Any) -> IdentitySet: - */ - if (unlikely(__pyx_v_members == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(0, 393, __pyx_L1_error) - } - __pyx_t_9 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_v_obj); if (unlikely(__pyx_t_9 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 393, __pyx_L1_error) - __pyx_t_2 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_9); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 393, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (unlikely((PyDict_SetItem(__pyx_v_members, __pyx_t_2, __pyx_v_obj) < 0))) __PYX_ERR(0, 393, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":392 - * members.update(cython.cast(IdentitySet, iterable)._members) - * else: - * for obj in iterable: # <<<<<<<<<<<<<< - * members[_get_id(obj)] = obj - * - */ - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - } - __pyx_L3:; - - /* "sqlalchemy/util/_collections_cy.py":386 - * return self.union(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def update(self, iterable: Iterable[Any], /): - * members: Dict[int, Any] = self._members - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_members); - __Pyx_XDECREF(__pyx_v_obj); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update = {"update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("update (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_34update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_34update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("update", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_update(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 386, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":395 - * members[_get_id(obj)] = obj - * - * def __ior__(self, other: Any) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_37__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_37__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__ior__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_36__ior__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_36__ior__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__ior__", 1); - - /* "sqlalchemy/util/_collections_cy.py":396 - * - * def __ior__(self, other: Any) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * self.update(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":397 - * def __ior__(self, other: Any) -> IdentitySet: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * self.update(other) - * return self - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":396 - * - * def __ior__(self, other: Any) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * self.update(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":398 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * self.update(other) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->update(__pyx_v_self, __pyx_v_other, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 398, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":399 - * return NotImplemented - * self.update(other) - * return self # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":395 - * members[_get_id(obj)] = obj - * - * def __ior__(self, other: Any) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__ior__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":401 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_result = 0; - PyObject *__pyx_v_other = NULL; - PyObject *__pyx_8genexpr9__pyx_v_obj = NULL; - PyObject *__pyx_9genexpr10__pyx_v_k = NULL; - PyObject *__pyx_9genexpr10__pyx_v_v = NULL; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *(*__pyx_t_8)(PyObject *); - unsigned PY_LONG_LONG __pyx_t_9; - Py_ssize_t __pyx_t_10; - int __pyx_t_11; - int __pyx_t_12; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("difference", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_difference); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference)) { - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 401, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 401, __pyx_L1_error) - __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":403 - * @cython.ccall - * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) # <<<<<<<<<<<<<< - * if isinstance(iterable, IdentitySet): - * other = cython.cast(IdentitySet, iterable)._members.keys() - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 403, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 403, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 403, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 403, __pyx_L1_error) - __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":404 - * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = cython.cast(IdentitySet, iterable)._members.keys() - * else: - */ - __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (__pyx_t_6) { - - /* "sqlalchemy/util/_collections_cy.py":405 - * result: IdentitySet = self.__new__(self.__class__) - * if isinstance(iterable, IdentitySet): - * other = cython.cast(IdentitySet, iterable)._members.keys() # <<<<<<<<<<<<<< - * else: - * other = {_get_id(obj) for obj in iterable} - */ - if (unlikely(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "keys"); - __PYX_ERR(0, 405, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyDict_Keys(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 405, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_other = __pyx_t_1; - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":404 - * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = cython.cast(IdentitySet, iterable)._members.keys() - * else: - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":407 - * other = cython.cast(IdentitySet, iterable)._members.keys() - * else: - * other = {_get_id(obj) for obj in iterable} # <<<<<<<<<<<<<< - * - * result._members = { - */ - /*else*/ { - { /* enter inner scope */ - __pyx_t_1 = PySet_New(NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 407, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_1); - if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { - __pyx_t_2 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_2); - __pyx_t_7 = 0; - __pyx_t_8 = NULL; - } else { - __pyx_t_7 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 407, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 407, __pyx_L6_error) - } - for (;;) { - if (likely(!__pyx_t_8)) { - if (likely(PyList_CheckExact(__pyx_t_2))) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 407, __pyx_L6_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 407, __pyx_L6_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 407, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - } else { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 407, __pyx_L6_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 407, __pyx_L6_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 407, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - } - } else { - __pyx_t_3 = __pyx_t_8(__pyx_t_2); - if (unlikely(!__pyx_t_3)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 407, __pyx_L6_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_3); - } - __Pyx_XDECREF_SET(__pyx_8genexpr9__pyx_v_obj, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_9 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_8genexpr9__pyx_v_obj); if (unlikely(__pyx_t_9 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 407, __pyx_L6_error) - __pyx_t_3 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 407, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - if (unlikely(PySet_Add(__pyx_t_1, (PyObject*)__pyx_t_3))) __PYX_ERR(0, 407, __pyx_L6_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_8genexpr9__pyx_v_obj); __pyx_8genexpr9__pyx_v_obj = 0; - goto __pyx_L10_exit_scope; - __pyx_L6_error:; - __Pyx_XDECREF(__pyx_8genexpr9__pyx_v_obj); __pyx_8genexpr9__pyx_v_obj = 0; - goto __pyx_L1_error; - __pyx_L10_exit_scope:; - } /* exit inner scope */ - __pyx_v_other = __pyx_t_1; - __pyx_t_1 = 0; - } - __pyx_L3:; - - /* "sqlalchemy/util/_collections_cy.py":409 - * other = {_get_id(obj) for obj in iterable} - * - * result._members = { # <<<<<<<<<<<<<< - * k: v for k, v in self._members.items() if k not in other - * } - */ - { /* enter inner scope */ - __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 409, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_1); - - /* "sqlalchemy/util/_collections_cy.py":410 - * - * result._members = { - * k: v for k, v in self._members.items() if k not in other # <<<<<<<<<<<<<< - * } - * return result - */ - __pyx_t_7 = 0; - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); - __PYX_ERR(0, 410, __pyx_L13_error) - } - __pyx_t_3 = __Pyx_dict_iterator(__pyx_v_self->_members, 1, __pyx_n_s_items, (&__pyx_t_10), (&__pyx_t_11)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 410, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_2); - __pyx_t_2 = __pyx_t_3; - __pyx_t_3 = 0; - while (1) { - __pyx_t_12 = __Pyx_dict_iter_next(__pyx_t_2, __pyx_t_10, &__pyx_t_7, &__pyx_t_3, &__pyx_t_4, NULL, __pyx_t_11); - if (unlikely(__pyx_t_12 == 0)) break; - if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 410, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_t_4); - __Pyx_XDECREF_SET(__pyx_9genexpr10__pyx_v_k, __pyx_t_3); - __pyx_t_3 = 0; - __Pyx_XDECREF_SET(__pyx_9genexpr10__pyx_v_v, __pyx_t_4); - __pyx_t_4 = 0; - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_9genexpr10__pyx_v_k, __pyx_v_other, Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 410, __pyx_L13_error) - if (__pyx_t_6) { - if (unlikely(PyDict_SetItem(__pyx_t_1, (PyObject*)__pyx_9genexpr10__pyx_v_k, (PyObject*)__pyx_9genexpr10__pyx_v_v))) __PYX_ERR(0, 410, __pyx_L13_error) - } - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_k); __pyx_9genexpr10__pyx_v_k = 0; - __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_v); __pyx_9genexpr10__pyx_v_v = 0; - goto __pyx_L17_exit_scope; - __pyx_L13_error:; - __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_k); __pyx_9genexpr10__pyx_v_k = 0; - __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_v); __pyx_9genexpr10__pyx_v_v = 0; - goto __pyx_L1_error; - __pyx_L17_exit_scope:; - } /* exit inner scope */ - - /* "sqlalchemy/util/_collections_cy.py":409 - * other = {_get_id(obj) for obj in iterable} - * - * result._members = { # <<<<<<<<<<<<<< - * k: v for k, v in self._members.items() if k not in other - * } - */ - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_result->_members); - __Pyx_DECREF(__pyx_v_result->_members); - __pyx_v_result->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":412 - * k: v for k, v in self._members.items() if k not in other - * } - * return result # <<<<<<<<<<<<<< - * - * def __sub__(self, other: IdentitySet) -> IdentitySet: - */ - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_result); - __pyx_r = __pyx_v_result; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":401 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.difference", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_result); - __Pyx_XDECREF(__pyx_v_other); - __Pyx_XDECREF(__pyx_8genexpr9__pyx_v_obj); - __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_k); - __Pyx_XDECREF(__pyx_9genexpr10__pyx_v_v); - __Pyx_XGIVEREF((PyObject *)__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference = {"difference", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("difference (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_38difference(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_38difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("difference", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference(__pyx_v_self, __pyx_v_iterable, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.difference", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":414 - * return result - * - * def __sub__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_41__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_41__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__sub__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 414, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_40__sub__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_40__sub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__sub__", 1); - - /* "sqlalchemy/util/_collections_cy.py":415 - * - * def __sub__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.difference(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":416 - * def __sub__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * return self.difference(other) - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":415 - * - * def __sub__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.difference(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":417 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * return self.difference(other) # <<<<<<<<<<<<<< - * - * # def difference_update(self, iterable: Iterable[Any]) -> None: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->difference(__pyx_v_self, ((PyObject *)__pyx_v_other), 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 417, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":414 - * return result - * - * def __sub__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__sub__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":420 - * - * # def difference_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.difference(iterable) - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("difference_update", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_difference_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 420, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 420, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":422 - * @cython.ccall - * def difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.difference(iterable) # <<<<<<<<<<<<<< - * self._members = other._members - * - */ - __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->difference(__pyx_v_self, __pyx_v_iterable, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 422, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":423 - * def difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.difference(iterable) - * self._members = other._members # <<<<<<<<<<<<<< - * - * def __isub__(self, other: IdentitySet) -> IdentitySet: - */ - __pyx_t_1 = __pyx_v_other->_members; - __Pyx_INCREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_members); - __Pyx_DECREF(__pyx_v_self->_members); - __pyx_v_self->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":420 - * - * # def difference_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.difference(iterable) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_other); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update = {"difference_update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("difference_update (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_42difference_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_42difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("difference_update", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference_update(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 420, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":425 - * self._members = other._members - * - * def __isub__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_45__isub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_45__isub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__isub__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 425, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_44__isub__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_44__isub__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__isub__", 1); - - /* "sqlalchemy/util/_collections_cy.py":426 - * - * def __isub__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * self.difference_update(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":427 - * def __isub__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * self.difference_update(other) - * return self - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":426 - * - * def __isub__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * self.difference_update(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":428 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * self.difference_update(other) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->difference_update(__pyx_v_self, ((PyObject *)__pyx_v_other), 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 428, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":429 - * return NotImplemented - * self.difference_update(other) - * return self # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":425 - * self._members = other._members - * - * def __isub__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__isub__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":431 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_result = 0; - PyObject *__pyx_v_other = NULL; - PyObject *__pyx_9genexpr11__pyx_v_obj = NULL; - PyObject *__pyx_9genexpr12__pyx_v_k = NULL; - PyObject *__pyx_9genexpr12__pyx_v_v = NULL; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *(*__pyx_t_8)(PyObject *); - unsigned PY_LONG_LONG __pyx_t_9; - Py_ssize_t __pyx_t_10; - int __pyx_t_11; - int __pyx_t_12; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("intersection", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_intersection); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 431, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection)) { - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 431, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 431, __pyx_L1_error) - __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":433 - * @cython.ccall - * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) # <<<<<<<<<<<<<< - * if isinstance(iterable, IdentitySet): - * other = cython.cast(IdentitySet, iterable)._members - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 433, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 433, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 433, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 433, __pyx_L1_error) - __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":434 - * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = cython.cast(IdentitySet, iterable)._members - * else: - */ - __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (__pyx_t_6) { - - /* "sqlalchemy/util/_collections_cy.py":435 - * result: IdentitySet = self.__new__(self.__class__) - * if isinstance(iterable, IdentitySet): - * other = cython.cast(IdentitySet, iterable)._members # <<<<<<<<<<<<<< - * else: - * other = {_get_id(obj) for obj in iterable} - */ - __pyx_t_1 = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members; - __Pyx_INCREF(__pyx_t_1); - __pyx_v_other = __pyx_t_1; - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":434 - * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = cython.cast(IdentitySet, iterable)._members - * else: - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":437 - * other = cython.cast(IdentitySet, iterable)._members - * else: - * other = {_get_id(obj) for obj in iterable} # <<<<<<<<<<<<<< - * result._members = { - * k: v for k, v in self._members.items() if k in other - */ - /*else*/ { - { /* enter inner scope */ - __pyx_t_1 = PySet_New(NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 437, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_1); - if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { - __pyx_t_2 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_2); - __pyx_t_7 = 0; - __pyx_t_8 = NULL; - } else { - __pyx_t_7 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 437, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 437, __pyx_L6_error) - } - for (;;) { - if (likely(!__pyx_t_8)) { - if (likely(PyList_CheckExact(__pyx_t_2))) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 437, __pyx_L6_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 437, __pyx_L6_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 437, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - } else { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 437, __pyx_L6_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 437, __pyx_L6_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 437, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - } - } else { - __pyx_t_3 = __pyx_t_8(__pyx_t_2); - if (unlikely(!__pyx_t_3)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 437, __pyx_L6_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_3); - } - __Pyx_XDECREF_SET(__pyx_9genexpr11__pyx_v_obj, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_9 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_9genexpr11__pyx_v_obj); if (unlikely(__pyx_t_9 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 437, __pyx_L6_error) - __pyx_t_3 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 437, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - if (unlikely(PySet_Add(__pyx_t_1, (PyObject*)__pyx_t_3))) __PYX_ERR(0, 437, __pyx_L6_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_9genexpr11__pyx_v_obj); __pyx_9genexpr11__pyx_v_obj = 0; - goto __pyx_L10_exit_scope; - __pyx_L6_error:; - __Pyx_XDECREF(__pyx_9genexpr11__pyx_v_obj); __pyx_9genexpr11__pyx_v_obj = 0; - goto __pyx_L1_error; - __pyx_L10_exit_scope:; - } /* exit inner scope */ - __pyx_v_other = __pyx_t_1; - __pyx_t_1 = 0; - } - __pyx_L3:; - - /* "sqlalchemy/util/_collections_cy.py":438 - * else: - * other = {_get_id(obj) for obj in iterable} - * result._members = { # <<<<<<<<<<<<<< - * k: v for k, v in self._members.items() if k in other - * } - */ - { /* enter inner scope */ - __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 438, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_1); - - /* "sqlalchemy/util/_collections_cy.py":439 - * other = {_get_id(obj) for obj in iterable} - * result._members = { - * k: v for k, v in self._members.items() if k in other # <<<<<<<<<<<<<< - * } - * return result - */ - __pyx_t_7 = 0; - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); - __PYX_ERR(0, 439, __pyx_L13_error) - } - __pyx_t_3 = __Pyx_dict_iterator(__pyx_v_self->_members, 1, __pyx_n_s_items, (&__pyx_t_10), (&__pyx_t_11)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 439, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_2); - __pyx_t_2 = __pyx_t_3; - __pyx_t_3 = 0; - while (1) { - __pyx_t_12 = __Pyx_dict_iter_next(__pyx_t_2, __pyx_t_10, &__pyx_t_7, &__pyx_t_3, &__pyx_t_4, NULL, __pyx_t_11); - if (unlikely(__pyx_t_12 == 0)) break; - if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 439, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_t_4); - __Pyx_XDECREF_SET(__pyx_9genexpr12__pyx_v_k, __pyx_t_3); - __pyx_t_3 = 0; - __Pyx_XDECREF_SET(__pyx_9genexpr12__pyx_v_v, __pyx_t_4); - __pyx_t_4 = 0; - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_9genexpr12__pyx_v_k, __pyx_v_other, Py_EQ)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 439, __pyx_L13_error) - if (__pyx_t_6) { - if (unlikely(PyDict_SetItem(__pyx_t_1, (PyObject*)__pyx_9genexpr12__pyx_v_k, (PyObject*)__pyx_9genexpr12__pyx_v_v))) __PYX_ERR(0, 439, __pyx_L13_error) - } - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_k); __pyx_9genexpr12__pyx_v_k = 0; - __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_v); __pyx_9genexpr12__pyx_v_v = 0; - goto __pyx_L17_exit_scope; - __pyx_L13_error:; - __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_k); __pyx_9genexpr12__pyx_v_k = 0; - __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_v); __pyx_9genexpr12__pyx_v_v = 0; - goto __pyx_L1_error; - __pyx_L17_exit_scope:; - } /* exit inner scope */ - - /* "sqlalchemy/util/_collections_cy.py":438 - * else: - * other = {_get_id(obj) for obj in iterable} - * result._members = { # <<<<<<<<<<<<<< - * k: v for k, v in self._members.items() if k in other - * } - */ - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_result->_members); - __Pyx_DECREF(__pyx_v_result->_members); - __pyx_v_result->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":441 - * k: v for k, v in self._members.items() if k in other - * } - * return result # <<<<<<<<<<<<<< - * - * def __and__(self, other): - */ - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_result); - __pyx_r = __pyx_v_result; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":431 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.intersection", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_result); - __Pyx_XDECREF(__pyx_v_other); - __Pyx_XDECREF(__pyx_9genexpr11__pyx_v_obj); - __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_k); - __Pyx_XDECREF(__pyx_9genexpr12__pyx_v_v); - __Pyx_XGIVEREF((PyObject *)__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection = {"intersection", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("intersection (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_46intersection(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_46intersection(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("intersection", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection(__pyx_v_self, __pyx_v_iterable, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 431, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.intersection", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":443 - * return result - * - * def __and__(self, other): # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_49__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_49__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__and__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_48__and__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_other)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_48__and__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__and__", 1); - - /* "sqlalchemy/util/_collections_cy.py":444 - * - * def __and__(self, other): - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.intersection(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":445 - * def __and__(self, other): - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * return self.intersection(other) - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":444 - * - * def __and__(self, other): - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.intersection(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":446 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * return self.intersection(other) # <<<<<<<<<<<<<< - * - * # def intersection_update(self, iterable: Iterable[Any]) -> None: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->intersection(__pyx_v_self, __pyx_v_other, 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 446, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":443 - * return result - * - * def __and__(self, other): # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__and__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":449 - * - * # def intersection_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def intersection_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.intersection(iterable) - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("intersection_update", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_intersection_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 449, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 449, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":451 - * @cython.ccall - * def intersection_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.intersection(iterable) # <<<<<<<<<<<<<< - * self._members = other._members - * - */ - __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->intersection(__pyx_v_self, __pyx_v_iterable, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 451, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":452 - * def intersection_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.intersection(iterable) - * self._members = other._members # <<<<<<<<<<<<<< - * - * def __iand__(self, other: IdentitySet) -> IdentitySet: - */ - __pyx_t_1 = __pyx_v_other->_members; - __Pyx_INCREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_members); - __Pyx_DECREF(__pyx_v_self->_members); - __pyx_v_self->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":449 - * - * # def intersection_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def intersection_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.intersection(iterable) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.intersection_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_other); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update = {"intersection_update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("intersection_update (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_50intersection_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_50intersection_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("intersection_update", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection_update(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 449, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.intersection_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":454 - * self._members = other._members - * - * def __iand__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_53__iand__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_53__iand__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iand__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 454, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_52__iand__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_52__iand__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__iand__", 1); - - /* "sqlalchemy/util/_collections_cy.py":455 - * - * def __iand__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * self.intersection_update(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":456 - * def __iand__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * self.intersection_update(other) - * return self - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":455 - * - * def __iand__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * self.intersection_update(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":457 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * self.intersection_update(other) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->intersection_update(__pyx_v_self, ((PyObject *)__pyx_v_other), 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 457, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":458 - * return NotImplemented - * self.intersection_update(other) - * return self # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":454 - * self._members = other._members - * - * def __iand__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__iand__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":460 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_result = 0; - PyObject *__pyx_v_other = 0; - PyObject *__pyx_9genexpr13__pyx_v_obj = NULL; - PyObject *__pyx_9genexpr14__pyx_v_k = NULL; - PyObject *__pyx_9genexpr14__pyx_v_v = NULL; - PyObject *__pyx_9genexpr15__pyx_v_k = NULL; - PyObject *__pyx_9genexpr15__pyx_v_v = NULL; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *(*__pyx_t_8)(PyObject *); - unsigned PY_LONG_LONG __pyx_t_9; - Py_ssize_t __pyx_t_10; - int __pyx_t_11; - int __pyx_t_12; - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("symmetric_difference", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_symmetric_difference); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 460, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference)) { - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 460, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 460, __pyx_L1_error) - __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":462 - * @cython.ccall - * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) # <<<<<<<<<<<<<< - * other: Dict[int, Any] - * if isinstance(iterable, IdentitySet): - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 462, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 462, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 462, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 462, __pyx_L1_error) - __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":464 - * result: IdentitySet = self.__new__(self.__class__) - * other: Dict[int, Any] - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = cython.cast(IdentitySet, iterable)._members - * else: - */ - __pyx_t_6 = __Pyx_TypeCheck(__pyx_v_iterable, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (__pyx_t_6) { - - /* "sqlalchemy/util/_collections_cy.py":465 - * other: Dict[int, Any] - * if isinstance(iterable, IdentitySet): - * other = cython.cast(IdentitySet, iterable)._members # <<<<<<<<<<<<<< - * else: - * other = {_get_id(obj): obj for obj in iterable} - */ - __pyx_t_1 = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_iterable)->_members; - __Pyx_INCREF(__pyx_t_1); - __pyx_v_other = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":464 - * result: IdentitySet = self.__new__(self.__class__) - * other: Dict[int, Any] - * if isinstance(iterable, IdentitySet): # <<<<<<<<<<<<<< - * other = cython.cast(IdentitySet, iterable)._members - * else: - */ - goto __pyx_L3; - } - - /* "sqlalchemy/util/_collections_cy.py":467 - * other = cython.cast(IdentitySet, iterable)._members - * else: - * other = {_get_id(obj): obj for obj in iterable} # <<<<<<<<<<<<<< - * result._members = { - * k: v for k, v in self._members.items() if k not in other - */ - /*else*/ { - { /* enter inner scope */ - __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 467, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_1); - if (likely(PyList_CheckExact(__pyx_v_iterable)) || PyTuple_CheckExact(__pyx_v_iterable)) { - __pyx_t_2 = __pyx_v_iterable; __Pyx_INCREF(__pyx_t_2); - __pyx_t_7 = 0; - __pyx_t_8 = NULL; - } else { - __pyx_t_7 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 467, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_8 = __Pyx_PyObject_GetIterNextFunc(__pyx_t_2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 467, __pyx_L6_error) - } - for (;;) { - if (likely(!__pyx_t_8)) { - if (likely(PyList_CheckExact(__pyx_t_2))) { - { - Py_ssize_t __pyx_temp = __Pyx_PyList_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 467, __pyx_L6_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 467, __pyx_L6_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 467, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - } else { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_2); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 467, __pyx_L6_error) - #endif - if (__pyx_t_7 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely((0 < 0))) __PYX_ERR(0, 467, __pyx_L6_error) - #else - __pyx_t_3 = __Pyx_PySequence_ITEM(__pyx_t_2, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 467, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - #endif - } - } else { - __pyx_t_3 = __pyx_t_8(__pyx_t_2); - if (unlikely(!__pyx_t_3)) { - PyObject* exc_type = PyErr_Occurred(); - if (exc_type) { - if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 467, __pyx_L6_error) - } - break; - } - __Pyx_GOTREF(__pyx_t_3); - } - __Pyx_XDECREF_SET(__pyx_9genexpr13__pyx_v_obj, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_9 = __pyx_f_10sqlalchemy_4util_15_collections_cy__get_id(__pyx_9genexpr13__pyx_v_obj); if (unlikely(__pyx_t_9 == ((unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 467, __pyx_L6_error) - __pyx_t_3 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_t_9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 467, __pyx_L6_error) - __Pyx_GOTREF(__pyx_t_3); - if (unlikely(PyDict_SetItem(__pyx_t_1, (PyObject*)__pyx_t_3, (PyObject*)__pyx_9genexpr13__pyx_v_obj))) __PYX_ERR(0, 467, __pyx_L6_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_9genexpr13__pyx_v_obj); __pyx_9genexpr13__pyx_v_obj = 0; - goto __pyx_L10_exit_scope; - __pyx_L6_error:; - __Pyx_XDECREF(__pyx_9genexpr13__pyx_v_obj); __pyx_9genexpr13__pyx_v_obj = 0; - goto __pyx_L1_error; - __pyx_L10_exit_scope:; - } /* exit inner scope */ - __pyx_v_other = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - } - __pyx_L3:; - - /* "sqlalchemy/util/_collections_cy.py":468 - * else: - * other = {_get_id(obj): obj for obj in iterable} - * result._members = { # <<<<<<<<<<<<<< - * k: v for k, v in self._members.items() if k not in other - * } - */ - { /* enter inner scope */ - __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 468, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_1); - - /* "sqlalchemy/util/_collections_cy.py":469 - * other = {_get_id(obj): obj for obj in iterable} - * result._members = { - * k: v for k, v in self._members.items() if k not in other # <<<<<<<<<<<<<< - * } - * result._members.update( - */ - __pyx_t_7 = 0; - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); - __PYX_ERR(0, 469, __pyx_L13_error) - } - __pyx_t_3 = __Pyx_dict_iterator(__pyx_v_self->_members, 1, __pyx_n_s_items, (&__pyx_t_10), (&__pyx_t_11)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 469, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_2); - __pyx_t_2 = __pyx_t_3; - __pyx_t_3 = 0; - while (1) { - __pyx_t_12 = __Pyx_dict_iter_next(__pyx_t_2, __pyx_t_10, &__pyx_t_7, &__pyx_t_3, &__pyx_t_4, NULL, __pyx_t_11); - if (unlikely(__pyx_t_12 == 0)) break; - if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 469, __pyx_L13_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GOTREF(__pyx_t_4); - __Pyx_XDECREF_SET(__pyx_9genexpr14__pyx_v_k, __pyx_t_3); - __pyx_t_3 = 0; - __Pyx_XDECREF_SET(__pyx_9genexpr14__pyx_v_v, __pyx_t_4); - __pyx_t_4 = 0; - if (unlikely(__pyx_v_other == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 469, __pyx_L13_error) - } - __pyx_t_6 = (__Pyx_PyDict_ContainsTF(__pyx_9genexpr14__pyx_v_k, __pyx_v_other, Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 469, __pyx_L13_error) - if (__pyx_t_6) { - if (unlikely(PyDict_SetItem(__pyx_t_1, (PyObject*)__pyx_9genexpr14__pyx_v_k, (PyObject*)__pyx_9genexpr14__pyx_v_v))) __PYX_ERR(0, 469, __pyx_L13_error) - } - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_k); __pyx_9genexpr14__pyx_v_k = 0; - __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_v); __pyx_9genexpr14__pyx_v_v = 0; - goto __pyx_L17_exit_scope; - __pyx_L13_error:; - __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_k); __pyx_9genexpr14__pyx_v_k = 0; - __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_v); __pyx_9genexpr14__pyx_v_v = 0; - goto __pyx_L1_error; - __pyx_L17_exit_scope:; - } /* exit inner scope */ - - /* "sqlalchemy/util/_collections_cy.py":468 - * else: - * other = {_get_id(obj): obj for obj in iterable} - * result._members = { # <<<<<<<<<<<<<< - * k: v for k, v in self._members.items() if k not in other - * } - */ - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_result->_members); - __Pyx_DECREF(__pyx_v_result->_members); - __pyx_v_result->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":471 - * k: v for k, v in self._members.items() if k not in other - * } - * result._members.update( # <<<<<<<<<<<<<< - * [(k, v) for k, v in other.items() if k not in self._members] - * ) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_result->_members, __pyx_n_s_update); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 471, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - { /* enter inner scope */ - - /* "sqlalchemy/util/_collections_cy.py":472 - * } - * result._members.update( - * [(k, v) for k, v in other.items() if k not in self._members] # <<<<<<<<<<<<<< - * ) - * return result - */ - __pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 472, __pyx_L20_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_10 = 0; - if (unlikely(__pyx_v_other == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "items"); - __PYX_ERR(0, 472, __pyx_L20_error) - } - __pyx_t_13 = __Pyx_dict_iterator(__pyx_v_other, 1, __pyx_n_s_items, (&__pyx_t_7), (&__pyx_t_11)); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 472, __pyx_L20_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_3); - __pyx_t_3 = __pyx_t_13; - __pyx_t_13 = 0; - while (1) { - __pyx_t_12 = __Pyx_dict_iter_next(__pyx_t_3, __pyx_t_7, &__pyx_t_10, &__pyx_t_13, &__pyx_t_14, NULL, __pyx_t_11); - if (unlikely(__pyx_t_12 == 0)) break; - if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 472, __pyx_L20_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_GOTREF(__pyx_t_14); - __Pyx_XDECREF_SET(__pyx_9genexpr15__pyx_v_k, __pyx_t_13); - __pyx_t_13 = 0; - __Pyx_XDECREF_SET(__pyx_9genexpr15__pyx_v_v, __pyx_t_14); - __pyx_t_14 = 0; - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); - __PYX_ERR(0, 472, __pyx_L20_error) - } - __pyx_t_6 = (__Pyx_PyDict_ContainsTF(__pyx_9genexpr15__pyx_v_k, __pyx_v_self->_members, Py_NE)); if (unlikely((__pyx_t_6 < 0))) __PYX_ERR(0, 472, __pyx_L20_error) - if (__pyx_t_6) { - __pyx_t_14 = PyTuple_New(2); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 472, __pyx_L20_error) - __Pyx_GOTREF(__pyx_t_14); - __Pyx_INCREF(__pyx_9genexpr15__pyx_v_k); - __Pyx_GIVEREF(__pyx_9genexpr15__pyx_v_k); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_9genexpr15__pyx_v_k)) __PYX_ERR(0, 472, __pyx_L20_error); - __Pyx_INCREF(__pyx_9genexpr15__pyx_v_v); - __Pyx_GIVEREF(__pyx_9genexpr15__pyx_v_v); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_14, 1, __pyx_9genexpr15__pyx_v_v)) __PYX_ERR(0, 472, __pyx_L20_error); - if (unlikely(__Pyx_ListComp_Append(__pyx_t_4, (PyObject*)__pyx_t_14))) __PYX_ERR(0, 472, __pyx_L20_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - } - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_k); __pyx_9genexpr15__pyx_v_k = 0; - __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_v); __pyx_9genexpr15__pyx_v_v = 0; - goto __pyx_L24_exit_scope; - __pyx_L20_error:; - __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_k); __pyx_9genexpr15__pyx_v_k = 0; - __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_v); __pyx_9genexpr15__pyx_v_v = 0; - goto __pyx_L1_error; - __pyx_L24_exit_scope:; - } /* exit inner scope */ - __pyx_t_3 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, __pyx_t_4}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 471, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":474 - * [(k, v) for k, v in other.items() if k not in self._members] - * ) - * return result # <<<<<<<<<<<<<< - * - * def __xor__(self, other: IdentitySet) -> IdentitySet: - */ - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_result); - __pyx_r = __pyx_v_result; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":460 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_14); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.symmetric_difference", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_result); - __Pyx_XDECREF(__pyx_v_other); - __Pyx_XDECREF(__pyx_9genexpr13__pyx_v_obj); - __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_k); - __Pyx_XDECREF(__pyx_9genexpr14__pyx_v_v); - __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_k); - __Pyx_XDECREF(__pyx_9genexpr15__pyx_v_v); - __Pyx_XGIVEREF((PyObject *)__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference = {"symmetric_difference", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("symmetric_difference (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_54symmetric_difference(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_54symmetric_difference(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("symmetric_difference", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference(__pyx_v_self, __pyx_v_iterable, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 460, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.symmetric_difference", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":476 - * return result - * - * def __xor__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_57__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_57__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__xor__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 476, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_56__xor__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_56__xor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__xor__", 1); - - /* "sqlalchemy/util/_collections_cy.py":477 - * - * def __xor__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.symmetric_difference(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":478 - * def __xor__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * return self.symmetric_difference(other) - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":477 - * - * def __xor__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * return self.symmetric_difference(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":479 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * return self.symmetric_difference(other) # <<<<<<<<<<<<<< - * - * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->symmetric_difference(__pyx_v_self, ((PyObject *)__pyx_v_other), 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 479, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":476 - * return result - * - * def __xor__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__xor__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":482 - * - * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def symmetric_difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.symmetric_difference(iterable) - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("symmetric_difference_update", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_symmetric_difference_update); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v_iterable}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":484 - * @cython.ccall - * def symmetric_difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.symmetric_difference(iterable) # <<<<<<<<<<<<<< - * self._members = other._members - * - */ - __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->symmetric_difference(__pyx_v_self, __pyx_v_iterable, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 484, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v_other = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":485 - * def symmetric_difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.symmetric_difference(iterable) - * self._members = other._members # <<<<<<<<<<<<<< - * - * def __ixor__(self, other: IdentitySet) -> IdentitySet: - */ - __pyx_t_1 = __pyx_v_other->_members; - __Pyx_INCREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_self->_members); - __Pyx_DECREF(__pyx_v_self->_members); - __pyx_v_self->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":482 - * - * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def symmetric_difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.symmetric_difference(iterable) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.symmetric_difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_other); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update = {"symmetric_difference_update", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update, METH_O, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update(PyObject *__pyx_v_self, PyObject *__pyx_v_iterable) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("symmetric_difference_update (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_58symmetric_difference_update(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((PyObject *)__pyx_v_iterable)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_58symmetric_difference_update(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v_iterable) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("symmetric_difference_update", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference_update(__pyx_v_self, __pyx_v_iterable, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.symmetric_difference_update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":487 - * self._members = other._members - * - * def __ixor__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_61__ixor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_61__ixor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__ixor__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, 0, "other", 0))) __PYX_ERR(0, 487, __pyx_L1_error) - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_60__ixor__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_other)); - - /* function exit code */ - goto __pyx_L0; - __pyx_L1_error:; - __pyx_r = NULL; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_60__ixor__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_other) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__ixor__", 1); - - /* "sqlalchemy/util/_collections_cy.py":488 - * - * def __ixor__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * self.symmetric_difference(other) - */ - __pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_other), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_collections_cy.py":489 - * def __ixor__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): - * return NotImplemented # <<<<<<<<<<<<<< - * self.symmetric_difference(other) - * return self - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_NotImplemented); - __pyx_r = __pyx_builtin_NotImplemented; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":488 - * - * def __ixor__(self, other: IdentitySet) -> IdentitySet: - * if not isinstance(other, IdentitySet): # <<<<<<<<<<<<<< - * return NotImplemented - * self.symmetric_difference(other) - */ - } - - /* "sqlalchemy/util/_collections_cy.py":490 - * if not isinstance(other, IdentitySet): - * return NotImplemented - * self.symmetric_difference(other) # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->symmetric_difference(__pyx_v_self, ((PyObject *)__pyx_v_other), 0)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 490, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":491 - * return NotImplemented - * self.symmetric_difference(other) - * return self # <<<<<<<<<<<<<< - * - * @cython.ccall - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":487 - * self._members = other._members - * - * def __ixor__(self, other: IdentitySet) -> IdentitySet: # <<<<<<<<<<<<<< - * if not isinstance(other, IdentitySet): - * return NotImplemented - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__ixor__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":493 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def copy(self) -> IdentitySet: - * cp: IdentitySet = self.__new__(self.__class__) - */ - -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, int __pyx_skip_dispatch) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_cp = 0; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("copy", 1); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || __Pyx_PyType_HasFeature(Py_TYPE(((PyObject *)__pyx_v_self)), (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_typedict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_copy); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 493, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!__Pyx_IsSameCFunction(__pyx_t_1, (void*) __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy)) { - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, NULL}; - __pyx_t_2 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 0+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 493, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 493, __pyx_L1_error) - __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_2); - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_typedict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sqlalchemy/util/_collections_cy.py":495 - * @cython.ccall - * def copy(self) -> IdentitySet: - * cp: IdentitySet = self.__new__(self.__class__) # <<<<<<<<<<<<<< - * cp._members = self._members.copy() - * return cp - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 495, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 495, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_t_3}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 495, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet))))) __PYX_ERR(0, 495, __pyx_L1_error) - __pyx_v_cp = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":496 - * def copy(self) -> IdentitySet: - * cp: IdentitySet = self.__new__(self.__class__) - * cp._members = self._members.copy() # <<<<<<<<<<<<<< - * return cp - * - */ - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "copy"); - __PYX_ERR(0, 496, __pyx_L1_error) - } - __pyx_t_1 = PyDict_Copy(__pyx_v_self->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v_cp->_members); - __Pyx_DECREF(__pyx_v_cp->_members); - __pyx_v_cp->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "sqlalchemy/util/_collections_cy.py":497 - * cp: IdentitySet = self.__new__(self.__class__) - * cp._members = self._members.copy() - * return cp # <<<<<<<<<<<<<< - * - * def __copy__(self) -> IdentitySet: - */ - __Pyx_XDECREF((PyObject *)__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_cp); - __pyx_r = __pyx_v_cp; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":493 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def copy(self) -> IdentitySet: - * cp: IdentitySet = self.__new__(self.__class__) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.copy", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_cp); - __Pyx_XGIVEREF((PyObject *)__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy = {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("copy (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("copy", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "copy", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_62copy(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_62copy(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("copy", 1); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = ((PyObject *)__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_copy(__pyx_v_self, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 493, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.copy", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":499 - * return cp - * - * def __copy__(self) -> IdentitySet: # <<<<<<<<<<<<<< - * return self.copy() - * - */ - -/* Python wrapper */ -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__ = {"__copy__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__copy__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__copy__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__copy__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_64__copy__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_64__copy__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__copy__", 1); - - /* "sqlalchemy/util/_collections_cy.py":500 - * - * def __copy__(self) -> IdentitySet: - * return self.copy() # <<<<<<<<<<<<<< - * - * def __len__(self) -> int: - */ - __Pyx_XDECREF((PyObject *)__pyx_r); - __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self->__pyx_vtab)->copy(__pyx_v_self, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 500, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_t_1); - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":499 - * return cp - * - * def __copy__(self) -> IdentitySet: # <<<<<<<<<<<<<< - * return self.copy() - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__copy__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF((PyObject *)__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":502 - * return self.copy() - * - * def __len__(self) -> int: # <<<<<<<<<<<<<< - * return len(self._members) - * - */ - -/* Python wrapper */ -static Py_ssize_t __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__(PyObject *__pyx_v_self); /*proto*/ -static Py_ssize_t __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - Py_ssize_t __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_66__len__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static Py_ssize_t __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_66__len__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - Py_ssize_t __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__len__", 1); - - /* "sqlalchemy/util/_collections_cy.py":503 - * - * def __len__(self) -> int: - * return len(self._members) # <<<<<<<<<<<<<< - * - * def __iter__(self) -> Iterator[Any]: - */ - __pyx_t_1 = __pyx_v_self->_members; - __Pyx_INCREF(__pyx_t_1); - if (unlikely(__pyx_t_1 == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(0, 503, __pyx_L1_error) - } - __pyx_t_2 = PyDict_Size(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 503, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":502 - * return self.copy() - * - * def __len__(self) -> int: # <<<<<<<<<<<<<< - * return len(self._members) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":505 - * return len(self._members) - * - * def __iter__(self) -> Iterator[Any]: # <<<<<<<<<<<<<< - * return iter(self._members.values()) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_69__iter__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_69__iter__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_68__iter__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_68__iter__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__iter__", 1); - - /* "sqlalchemy/util/_collections_cy.py":506 - * - * def __iter__(self) -> Iterator[Any]: - * return iter(self._members.values()) # <<<<<<<<<<<<<< - * - * def __hash__(self) -> NoReturn: - */ - __Pyx_XDECREF(__pyx_r); - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "values"); - __PYX_ERR(0, 506, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_PyDict_Values(__pyx_v_self->_members); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 506, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 506, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":505 - * return len(self._members) - * - * def __iter__(self) -> Iterator[Any]: # <<<<<<<<<<<<<< - * return iter(self._members.values()) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":508 - * return iter(self._members.values()) - * - * def __hash__(self) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError("set objects are unhashable") - * - */ - -/* Python wrapper */ -static Py_hash_t __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_71__hash__(PyObject *__pyx_v_self); /*proto*/ -static Py_hash_t __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_71__hash__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - Py_hash_t __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__hash__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_70__hash__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static Py_hash_t __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_70__hash__(CYTHON_UNUSED struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - Py_hash_t __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__hash__", 1); - - /* "sqlalchemy/util/_collections_cy.py":509 - * - * def __hash__(self) -> NoReturn: - * raise TypeError("set objects are unhashable") # <<<<<<<<<<<<<< - * - * def __repr__(self) -> str: - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 509, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 509, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":508 - * return iter(self._members.values()) - * - * def __hash__(self) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError("set objects are unhashable") - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__hash__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - if (unlikely(__pyx_r == -1) && !PyErr_Occurred()) __pyx_r = -2; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_collections_cy.py":511 - * raise TypeError("set objects are unhashable") - * - * def __repr__(self) -> str: # <<<<<<<<<<<<<< - * return "%s(%r)" % ( - * self.__class__.__name__, - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_72__repr__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_72__repr__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - Py_UCS4 __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__repr__", 1); - - /* "sqlalchemy/util/_collections_cy.py":512 - * - * def __repr__(self) -> str: - * return "%s(%r)" % ( # <<<<<<<<<<<<<< - * self.__class__.__name__, - * list(self._members.values()), - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyTuple_New(4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 512, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = 0; - __pyx_t_3 = 127; - - /* "sqlalchemy/util/_collections_cy.py":513 - * def __repr__(self) -> str: - * return "%s(%r)" % ( - * self.__class__.__name__, # <<<<<<<<<<<<<< - * list(self._members.values()), - * ) - */ - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_name); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = __Pyx_PyObject_FormatSimpleAndDecref(PyObject_Unicode(__pyx_t_5), __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 513, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; - __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); - __pyx_t_4 = 0; - __Pyx_INCREF(__pyx_kp_u__2); - __pyx_t_2 += 1; - __Pyx_GIVEREF(__pyx_kp_u__2); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_kp_u__2); - - /* "sqlalchemy/util/_collections_cy.py":514 - * return "%s(%r)" % ( - * self.__class__.__name__, - * list(self._members.values()), # <<<<<<<<<<<<<< - * ) - */ - if (unlikely(__pyx_v_self->_members == Py_None)) { - PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "values"); - __PYX_ERR(0, 514, __pyx_L1_error) - } - __pyx_t_4 = __Pyx_PyDict_Values(__pyx_v_self->_members); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = __Pyx_PySequence_ListKeepNew(__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_4 = __Pyx_PyObject_FormatSimpleAndDecref(PyObject_Repr(__pyx_t_5), __pyx_empty_unicode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; - __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); - __pyx_t_4 = 0; - __Pyx_INCREF(__pyx_kp_u__3); - __pyx_t_2 += 1; - __Pyx_GIVEREF(__pyx_kp_u__3); - PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_kp_u__3); - - /* "sqlalchemy/util/_collections_cy.py":512 - * - * def __repr__(self) -> str: - * return "%s(%r)" % ( # <<<<<<<<<<<<<< - * self.__class__.__name__, - * list(self._members.values()), - */ - __pyx_t_4 = __Pyx_PyUnicode_Join(__pyx_t_1, 4, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 512, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_collections_cy.py":511 - * raise TypeError("set objects are unhashable") - * - * def __repr__(self) -> str: # <<<<<<<<<<<<<< - * return "%s(%r)" % ( - * self.__class__.__name__, - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_74__reduce_cython__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_74__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 1); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = (self._members,) # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v_self->_members); - __Pyx_GIVEREF(__pyx_v_self->_members); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->_members)) __PYX_ERR(1, 5, __pyx_L1_error); - __pyx_v_state = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = (self._members,) - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v__dict = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":7 - * state = (self._members,) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_2 = (__pyx_v__dict != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(1, 8, __pyx_L1_error); - __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = self._members is not None - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = (self._members,) - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = self._members is not None # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state - */ - /*else*/ { - __pyx_t_2 = (__pyx_v_self->_members != ((PyObject*)Py_None)); - __pyx_v_use_setstate = __pyx_t_2; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = self._members is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state - * else: - */ - if (__pyx_v_use_setstate) { - - /* "(tree fragment)":13 - * use_setstate = self._members is not None - * if use_setstate: - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_IdentitySet); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_183888701); - __Pyx_GIVEREF(__pyx_int_183888701); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_183888701)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_3); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_3 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = self._members is not None - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, None), state - * else: - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_IdentitySet); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_183888701); - __Pyx_GIVEREF(__pyx_int_183888701); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_183888701)) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_4); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_4 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 16, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(1, 16, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v___pyx_state = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(1, 16, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_76__setstate_cython__(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v_self), __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_11IdentitySet_76__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 1); - - /* "(tree fragment)":17 - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_IdentitySet__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.IdentitySet.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_OrderedSet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet = {"__pyx_unpickle_OrderedSet", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[3] = {0,0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_OrderedSet (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_OrderedSet", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_OrderedSet", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_OrderedSet") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 3)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_OrderedSet", 1, 3, 3, __pyx_nargs); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_OrderedSet", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_4__pyx_unpickle_OrderedSet(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_4__pyx_unpickle_OrderedSet(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_OrderedSet", 1); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum - */ - __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__5, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum - * __pyx_result = OrderedSet.__new__(__pyx_type) - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(1, 5, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_t_1); - __pyx_v___pyx_PickleError = __pyx_t_1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum # <<<<<<<<<<<<<< - * __pyx_result = OrderedSet.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum - * __pyx_result = OrderedSet.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_v___pyx_result = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum - * __pyx_result = OrderedSet.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_2 = (__pyx_v___pyx_state != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":9 - * __pyx_result = OrderedSet.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_OrderedSet__set_state(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum - * __pyx_result = OrderedSet.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): - * __pyx_result._list = __pyx_state[0] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_OrderedSet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_OrderedSet", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._list = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_OrderedSet__set_state(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - Py_ssize_t __pyx_t_3; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - unsigned int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_OrderedSet__set_state", 1); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): - * __pyx_result._list = __pyx_state[0] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[1]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyList_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("list", __pyx_t_1))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->_list); - __Pyx_DECREF(__pyx_v___pyx_result->_list); - __pyx_v___pyx_result->_list = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): - * __pyx_result._list = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[1]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_3 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_4 = (__pyx_t_3 > 1); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_2 = __pyx_t_4; - __pyx_L4_bool_binop_done:; - if (__pyx_t_2) { - - /* "(tree fragment)":14 - * __pyx_result._list = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< - */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_7 = NULL; - __pyx_t_8 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_8 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): - * __pyx_result._list = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[1]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._list = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_OrderedSet__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_IdentitySet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet = {"__pyx_unpickle_IdentitySet", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[3] = {0,0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_IdentitySet (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_IdentitySet", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_IdentitySet", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_IdentitySet") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 3)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_IdentitySet", 1, 3, 3, __pyx_nargs); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_IdentitySet", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_15_collections_cy_6__pyx_unpickle_IdentitySet(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_15_collections_cy_6__pyx_unpickle_IdentitySet(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_IdentitySet", 1); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xaf5eb3d, 0x88a83ae, 0x3ac67e8): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum - */ - __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__7, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum not in (0xaf5eb3d, 0x88a83ae, 0x3ac67e8): - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum - * __pyx_result = IdentitySet.__new__(__pyx_type) - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(1, 5, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_t_1); - __pyx_v___pyx_PickleError = __pyx_t_1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum not in (0xaf5eb3d, 0x88a83ae, 0x3ac67e8): - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum # <<<<<<<<<<<<<< - * __pyx_result = IdentitySet.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xaf5eb3d, 0x88a83ae, 0x3ac67e8): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum - * __pyx_result = IdentitySet.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_v___pyx_result = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum - * __pyx_result = IdentitySet.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_2 = (__pyx_v___pyx_state != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":9 - * __pyx_result = IdentitySet.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_IdentitySet__set_state(((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xaf5eb3d, 0x88a83ae, 0x3ac67e8) = (_members))" % __pyx_checksum - * __pyx_result = IdentitySet.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): - * __pyx_result._members = __pyx_state[0] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_IdentitySet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_IdentitySet", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._members = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - */ - -static PyObject *__pyx_f_10sqlalchemy_4util_15_collections_cy___pyx_unpickle_IdentitySet__set_state(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - Py_ssize_t __pyx_t_3; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - unsigned int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_IdentitySet__set_state", 1); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): - * __pyx_result._members = __pyx_state[0] # <<<<<<<<<<<<<< - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[1]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!(likely(PyDict_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None) || __Pyx_RaiseUnexpectedTypeError("dict", __pyx_t_1))) __PYX_ERR(1, 12, __pyx_L1_error) - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_GOTREF(__pyx_v___pyx_result->_members); - __Pyx_DECREF(__pyx_v___pyx_result->_members); - __pyx_v___pyx_result->_members = ((PyObject*)__pyx_t_1); - __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): - * __pyx_result._members = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[1]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_3 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_4 = (__pyx_t_3 > 1); - if (__pyx_t_4) { - } else { - __pyx_t_2 = __pyx_t_4; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error) - __pyx_t_2 = __pyx_t_4; - __pyx_L4_bool_binop_done:; - if (__pyx_t_2) { - - /* "(tree fragment)":14 - * __pyx_result._members = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< - */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 14, __pyx_L1_error) - } - __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_7 = NULL; - __pyx_t_8 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_8 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): - * __pyx_result._members = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[1]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_IdentitySet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_IdentitySet__set_state(IdentitySet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._members = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("sqlalchemy.util._collections_cy.__pyx_unpickle_IdentitySet__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} -static struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_OrderedSet __pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet; - -static PyObject *__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyTypeObject *t, PyObject *a, PyObject *k) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *p; - PyObject *o = __Pyx_PyType_GetSlot((&PySet_Type), tp_new, newfunc)(t, a, k); - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)o); - p->__pyx_vtab = __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_OrderedSet; - p->_list = ((PyObject*)Py_None); Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *o) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely((PY_VERSION_HEX >= 0x03080000 || __Pyx_PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)) && __Pyx_PyObject_GetSlot(o, tp_finalize, destructor)) && !__Pyx_PyObject_GC_IsFinalized(o)) { - if (__Pyx_PyObject_GetSlot(o, tp_dealloc, destructor) == __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - } - #endif - PyObject_GC_UnTrack(o); - __Pyx_TRASHCAN_BEGIN(o, __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet) - Py_CLEAR(p->_list); - PyObject_GC_Track(o); - __Pyx_PyType_GetSlot((&PySet_Type), tp_dealloc, destructor)(o); - __Pyx_TRASHCAN_END -} - -static int __pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)o; - if (!(&PySet_Type)->tp_traverse); else { e = (&PySet_Type)->tp_traverse(o,v,a); if (e) return e; } - if (p->_list) { - e = (*v)(p->_list, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *)o; - if (!(&PySet_Type)->tp_clear); else (&PySet_Type)->tp_clear(o); - tmp = ((PyObject*)p->_list); - p->_list = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} -static PyObject *__pyx_sq_item_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *o, Py_ssize_t i) { - PyObject *r; - PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; - r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); - Py_DECREF(x); - return r; -} - -static CYTHON_INLINE PyObject *__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_add : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_add); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_add == &__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_23__add__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_add == &__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_right) { - return __pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static CYTHON_INLINE PyObject *__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_subtract : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_subtract); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_subtract == &__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_45__sub__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_subtract == &__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_right) { - return __pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static CYTHON_INLINE PyObject *__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_and : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_and); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_and == &__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_37__and__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_and == &__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_right) { - return __pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static CYTHON_INLINE PyObject *__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_xor : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_xor); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_xor == &__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_41__xor__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_xor == &__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_right) { - return __pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static CYTHON_INLINE PyObject *__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_or : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_or); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_33__or__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - if (maybe_self_is_right) { - return __pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static PyObject *__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__(PyObject *self, CYTHON_UNUSED PyObject *arg) { - return __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__(self); -} - -static PyMethodDef __pyx_methods_10sqlalchemy_4util_15_collections_cy_OrderedSet[] = { - {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"add", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add, METH_O, 0}, - {"remove", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove, METH_O, 0}, - {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"insert", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"discard", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard, METH_O, 0}, - {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__repr__", (PyCFunction)__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__, METH_NOARGS|METH_COEXIST, 0}, - {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update, METH_VARARGS|METH_KEYWORDS, 0}, - {"union", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union, METH_VARARGS|METH_KEYWORDS, 0}, - {"intersection", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection, METH_VARARGS|METH_KEYWORDS, 0}, - {"difference", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference, METH_VARARGS|METH_KEYWORDS, 0}, - {"intersection_update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update, METH_VARARGS|METH_KEYWORDS, 0}, - {"difference_update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update, METH_VARARGS|METH_KEYWORDS, 0}, - {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {0, 0, 0, 0} -}; -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_slots[] = { - {Py_tp_dealloc, (void *)__pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_tp_repr, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__}, - {Py_nb_add, (void *)__pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_nb_subtract, (void *)__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_nb_and, (void *)__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_nb_xor, (void *)__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_nb_or, (void *)__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_nb_inplace_subtract, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_57__isub__}, - {Py_nb_inplace_and, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_49__iand__}, - {Py_nb_inplace_xor, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_53__ixor__}, - {Py_nb_inplace_or, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_29__ior__}, - {Py_sq_item, (void *)__pyx_sq_item_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_mp_subscript, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_19__getitem__}, - {Py_tp_doc, (void *)PyDoc_STR("A set implementation that maintains insertion order.")}, - {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_tp_iter, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_21__iter__}, - {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {Py_tp_init, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_3__init__}, - {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet}, - {0, 0}, -}; -static PyType_Spec __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_spec = { - "sqlalchemy.util._collections_cy.OrderedSet", - sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet), - 0, - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, - __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_slots, -}; -#else - -static PyNumberMethods __pyx_tp_as_number_OrderedSet = { - __pyx_nb_add_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_add*/ - __pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_subtract*/ - 0, /*nb_multiply*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_divide*/ - #endif - 0, /*nb_remainder*/ - 0, /*nb_divmod*/ - 0, /*nb_power*/ - 0, /*nb_negative*/ - 0, /*nb_positive*/ - 0, /*nb_absolute*/ - 0, /*nb_bool*/ - 0, /*nb_invert*/ - 0, /*nb_lshift*/ - 0, /*nb_rshift*/ - __pyx_nb_and_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_and*/ - __pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_xor*/ - __pyx_nb_or_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*nb_or*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_coerce*/ - #endif - 0, /*nb_int*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_long*/ - #else - 0, /*reserved*/ - #endif - 0, /*nb_float*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_oct*/ - #endif - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_hex*/ - #endif - 0, /*nb_inplace_add*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_57__isub__, /*nb_inplace_subtract*/ - 0, /*nb_inplace_multiply*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_inplace_divide*/ - #endif - 0, /*nb_inplace_remainder*/ - 0, /*nb_inplace_power*/ - 0, /*nb_inplace_lshift*/ - 0, /*nb_inplace_rshift*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_49__iand__, /*nb_inplace_and*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_53__ixor__, /*nb_inplace_xor*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_29__ior__, /*nb_inplace_or*/ - 0, /*nb_floor_divide*/ - 0, /*nb_true_divide*/ - 0, /*nb_inplace_floor_divide*/ - 0, /*nb_inplace_true_divide*/ - 0, /*nb_index*/ - #if PY_VERSION_HEX >= 0x03050000 - 0, /*nb_matrix_multiply*/ - #endif - #if PY_VERSION_HEX >= 0x03050000 - 0, /*nb_inplace_matrix_multiply*/ - #endif -}; - -static PySequenceMethods __pyx_tp_as_sequence_OrderedSet = { - 0, /*sq_length*/ - 0, /*sq_concat*/ - 0, /*sq_repeat*/ - __pyx_sq_item_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*sq_item*/ - 0, /*sq_slice*/ - 0, /*sq_ass_item*/ - 0, /*sq_ass_slice*/ - 0, /*sq_contains*/ - 0, /*sq_inplace_concat*/ - 0, /*sq_inplace_repeat*/ -}; - -static PyMappingMethods __pyx_tp_as_mapping_OrderedSet = { - 0, /*mp_length*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_19__getitem__, /*mp_subscript*/ - 0, /*mp_ass_subscript*/ -}; - -static PyTypeObject __pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet = { - PyVarObject_HEAD_INIT(0, 0) - "sqlalchemy.util._collections_cy.""OrderedSet", /*tp_name*/ - sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_25__repr__, /*tp_repr*/ - &__pyx_tp_as_number_OrderedSet, /*tp_as_number*/ - &__pyx_tp_as_sequence_OrderedSet, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_OrderedSet, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ - PyDoc_STR("A set implementation that maintains insertion order."), /*tp_doc*/ - __pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_traverse*/ - __pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_21__iter__, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - #if !CYTHON_USE_TYPE_SPECS - 0, /*tp_dictoffset*/ - #endif - __pyx_pw_10sqlalchemy_4util_15_collections_cy_10OrderedSet_3__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_10sqlalchemy_4util_15_collections_cy_OrderedSet, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - #if CYTHON_USE_TP_FINALIZE - 0, /*tp_finalize*/ - #else - NULL, /*tp_finalize*/ - #endif - #endif - #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, /*tp_vectorcall*/ - #endif - #if __PYX_NEED_TP_PRINT_SLOT == 1 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030C0000 - 0, /*tp_watched*/ - #endif - #if PY_VERSION_HEX >= 0x030d00A4 - 0, /*tp_versions_used*/ - #endif - #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, /*tp_pypy_flags*/ - #endif -}; -#endif -static struct __pyx_vtabstruct_10sqlalchemy_4util_15_collections_cy_IdentitySet __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet; - -static PyObject *__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *p; - PyObject *o; - #if CYTHON_COMPILING_IN_LIMITED_API - allocfunc alloc_func = (allocfunc)PyType_GetSlot(t, Py_tp_alloc); - o = alloc_func(t, 0); - #else - if (likely(!__Pyx_PyType_HasFeature(t, Py_TPFLAGS_IS_ABSTRACT))) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - #endif - p = ((struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)o); - p->__pyx_vtab = __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_IdentitySet; - p->_members = ((PyObject*)Py_None); Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *o) { - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely((PY_VERSION_HEX >= 0x03080000 || __Pyx_PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)) && __Pyx_PyObject_GetSlot(o, tp_finalize, destructor)) && !__Pyx_PyObject_GC_IsFinalized(o)) { - if (__Pyx_PyObject_GetSlot(o, tp_dealloc, destructor) == __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_IdentitySet) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - } - #endif - PyObject_GC_UnTrack(o); - Py_CLEAR(p->_members); - #if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY - (*Py_TYPE(o)->tp_free)(o); - #else - { - freefunc tp_free = (freefunc)PyType_GetSlot(Py_TYPE(o), Py_tp_free); - if (tp_free) tp_free(o); - } - #endif -} - -static int __pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)o; - if (p->_members) { - e = (*v)(p->_members, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *o) { - PyObject* tmp; - struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *p = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *)o; - tmp = ((PyObject*)p->_members); - p->_members = ((PyObject*)Py_None); Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyObject *__pyx_tp_richcompare_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *o1, PyObject *o2, int op) { - switch (op) { - case Py_EQ: { - return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_15__eq__(o1, o2); - } - case Py_NE: { - return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_17__ne__(o1, o2); - } - case Py_LT: { - return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_23__lt__(o1, o2); - } - case Py_GT: { - return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_29__gt__(o1, o2); - } - case Py_LE: { - return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_21__le__(o1, o2); - } - case Py_GE: { - return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_27__ge__(o1, o2); - } - default: { - return __Pyx_NewRef(Py_NotImplemented); - } - } -} - -static CYTHON_INLINE PyObject *__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_subtract : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_subtract); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_subtract == &__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_41__sub__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_subtract == &__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (maybe_self_is_right) { - return __pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static CYTHON_INLINE PyObject *__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_and : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_and); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_and == &__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_49__and__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_and == &__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (maybe_self_is_right) { - return __pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static CYTHON_INLINE PyObject *__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_xor : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_xor); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_xor == &__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_57__xor__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_xor == &__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (maybe_self_is_right) { - return __pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static CYTHON_INLINE PyObject *__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_or : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_or); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_33__or__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - if (maybe_self_is_right) { - return __pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet_maybe_call_slot(__Pyx_PyType_GetSlot(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, tp_base, PyTypeObject*), left, right ); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static PyObject *__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__(PyObject *self, CYTHON_UNUSED PyObject *arg) { - return __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__(self); -} - -static PyMethodDef __pyx_methods_10sqlalchemy_4util_15_collections_cy_IdentitySet[] = { - {"add", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add, METH_O, 0}, - {"discard", (PyCFunction)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard, METH_O, 0}, - {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__copy__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__repr__", (PyCFunction)__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__, METH_NOARGS|METH_COEXIST, 0}, - {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {0, 0, 0, 0} -}; -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_slots[] = { - {Py_tp_dealloc, (void *)__pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_tp_repr, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__}, - {Py_nb_subtract, (void *)__pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_nb_and, (void *)__pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_nb_xor, (void *)__pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_nb_or, (void *)__pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_nb_inplace_subtract, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_45__isub__}, - {Py_nb_inplace_and, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_53__iand__}, - {Py_nb_inplace_xor, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_61__ixor__}, - {Py_nb_inplace_or, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_37__ior__}, - {Py_sq_length, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__}, - {Py_sq_contains, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_5__contains__}, - {Py_mp_length, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__}, - {Py_tp_hash, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_71__hash__}, - {Py_tp_doc, (void *)PyDoc_STR("A set that considers only object id() for uniqueness.\n\n This strategy has edge cases for builtin types- it's possible to have\n two 'foo' strings in one of these sets, for example. Use sparingly.\n\n ")}, - {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_tp_richcompare, (void *)__pyx_tp_richcompare_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_tp_iter, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_69__iter__}, - {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {Py_tp_init, (void *)__pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_1__init__}, - {Py_tp_new, (void *)__pyx_tp_new_10sqlalchemy_4util_15_collections_cy_IdentitySet}, - {0, 0}, -}; -static PyType_Spec __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_spec = { - "sqlalchemy.util._collections_cy.IdentitySet", - sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet), - 0, - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, - __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_slots, -}; -#else - -static PyNumberMethods __pyx_tp_as_number_IdentitySet = { - 0, /*nb_add*/ - __pyx_nb_subtract_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*nb_subtract*/ - 0, /*nb_multiply*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_divide*/ - #endif - 0, /*nb_remainder*/ - 0, /*nb_divmod*/ - 0, /*nb_power*/ - 0, /*nb_negative*/ - 0, /*nb_positive*/ - 0, /*nb_absolute*/ - 0, /*nb_bool*/ - 0, /*nb_invert*/ - 0, /*nb_lshift*/ - 0, /*nb_rshift*/ - __pyx_nb_and_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*nb_and*/ - __pyx_nb_xor_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*nb_xor*/ - __pyx_nb_or_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*nb_or*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_coerce*/ - #endif - 0, /*nb_int*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_long*/ - #else - 0, /*reserved*/ - #endif - 0, /*nb_float*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_oct*/ - #endif - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_hex*/ - #endif - 0, /*nb_inplace_add*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_45__isub__, /*nb_inplace_subtract*/ - 0, /*nb_inplace_multiply*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_inplace_divide*/ - #endif - 0, /*nb_inplace_remainder*/ - 0, /*nb_inplace_power*/ - 0, /*nb_inplace_lshift*/ - 0, /*nb_inplace_rshift*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_53__iand__, /*nb_inplace_and*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_61__ixor__, /*nb_inplace_xor*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_37__ior__, /*nb_inplace_or*/ - 0, /*nb_floor_divide*/ - 0, /*nb_true_divide*/ - 0, /*nb_inplace_floor_divide*/ - 0, /*nb_inplace_true_divide*/ - 0, /*nb_index*/ - #if PY_VERSION_HEX >= 0x03050000 - 0, /*nb_matrix_multiply*/ - #endif - #if PY_VERSION_HEX >= 0x03050000 - 0, /*nb_inplace_matrix_multiply*/ - #endif -}; - -static PySequenceMethods __pyx_tp_as_sequence_IdentitySet = { - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__, /*sq_length*/ - 0, /*sq_concat*/ - 0, /*sq_repeat*/ - 0, /*sq_item*/ - 0, /*sq_slice*/ - 0, /*sq_ass_item*/ - 0, /*sq_ass_slice*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_5__contains__, /*sq_contains*/ - 0, /*sq_inplace_concat*/ - 0, /*sq_inplace_repeat*/ -}; - -static PyMappingMethods __pyx_tp_as_mapping_IdentitySet = { - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_67__len__, /*mp_length*/ - 0, /*mp_subscript*/ - 0, /*mp_ass_subscript*/ -}; - -static PyTypeObject __pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet = { - PyVarObject_HEAD_INIT(0, 0) - "sqlalchemy.util._collections_cy.""IdentitySet", /*tp_name*/ - sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_73__repr__, /*tp_repr*/ - &__pyx_tp_as_number_IdentitySet, /*tp_as_number*/ - &__pyx_tp_as_sequence_IdentitySet, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_IdentitySet, /*tp_as_mapping*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_71__hash__, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - PyDoc_STR("A set that considers only object id() for uniqueness.\n\n This strategy has edge cases for builtin types- it's possible to have\n two 'foo' strings in one of these sets, for example. Use sparingly.\n\n "), /*tp_doc*/ - __pyx_tp_traverse_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_traverse*/ - __pyx_tp_clear_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_clear*/ - __pyx_tp_richcompare_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_69__iter__, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - #if !CYTHON_USE_TYPE_SPECS - 0, /*tp_dictoffset*/ - #endif - __pyx_pw_10sqlalchemy_4util_15_collections_cy_11IdentitySet_1__init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_10sqlalchemy_4util_15_collections_cy_IdentitySet, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - #if CYTHON_USE_TP_FINALIZE - 0, /*tp_finalize*/ - #else - NULL, /*tp_finalize*/ - #endif - #endif - #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, /*tp_vectorcall*/ - #endif - #if __PYX_NEED_TP_PRINT_SLOT == 1 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030C0000 - 0, /*tp_watched*/ - #endif - #if PY_VERSION_HEX >= 0x030d00A4 - 0, /*tp_versions_used*/ - #endif - #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, /*tp_pypy_flags*/ - #endif -}; -#endif - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif -/* #### Code section: pystring_table ### */ - -static int __Pyx_CreateStringTabAndInitStrings(void) { - __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_n_s_AbstractSet, __pyx_k_AbstractSet, sizeof(__pyx_k_AbstractSet), 0, 0, 1, 1}, - {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, - {&__pyx_n_s_Dict, __pyx_k_Dict, sizeof(__pyx_k_Dict), 0, 0, 1, 1}, - {&__pyx_n_s_Hashable, __pyx_k_Hashable, sizeof(__pyx_k_Hashable), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet, __pyx_k_IdentitySet, sizeof(__pyx_k_IdentitySet), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet___copy, __pyx_k_IdentitySet___copy, sizeof(__pyx_k_IdentitySet___copy), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet___reduce_cython, __pyx_k_IdentitySet___reduce_cython, sizeof(__pyx_k_IdentitySet___reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet___setstate_cython, __pyx_k_IdentitySet___setstate_cython, sizeof(__pyx_k_IdentitySet___setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_add, __pyx_k_IdentitySet_add, sizeof(__pyx_k_IdentitySet_add), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_clear, __pyx_k_IdentitySet_clear, sizeof(__pyx_k_IdentitySet_clear), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_copy, __pyx_k_IdentitySet_copy, sizeof(__pyx_k_IdentitySet_copy), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_difference, __pyx_k_IdentitySet_difference, sizeof(__pyx_k_IdentitySet_difference), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_difference_update, __pyx_k_IdentitySet_difference_update, sizeof(__pyx_k_IdentitySet_difference_update), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_discard, __pyx_k_IdentitySet_discard, sizeof(__pyx_k_IdentitySet_discard), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_intersection, __pyx_k_IdentitySet_intersection, sizeof(__pyx_k_IdentitySet_intersection), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_intersection_update, __pyx_k_IdentitySet_intersection_update, sizeof(__pyx_k_IdentitySet_intersection_update), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_issubset, __pyx_k_IdentitySet_issubset, sizeof(__pyx_k_IdentitySet_issubset), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_issuperset, __pyx_k_IdentitySet_issuperset, sizeof(__pyx_k_IdentitySet_issuperset), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_pop, __pyx_k_IdentitySet_pop, sizeof(__pyx_k_IdentitySet_pop), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_remove, __pyx_k_IdentitySet_remove, sizeof(__pyx_k_IdentitySet_remove), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_symmetric_difference, __pyx_k_IdentitySet_symmetric_difference, sizeof(__pyx_k_IdentitySet_symmetric_difference), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_symmetric_difference_2, __pyx_k_IdentitySet_symmetric_difference_2, sizeof(__pyx_k_IdentitySet_symmetric_difference_2), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_union, __pyx_k_IdentitySet_union, sizeof(__pyx_k_IdentitySet_union), 0, 0, 1, 1}, - {&__pyx_n_s_IdentitySet_update, __pyx_k_IdentitySet_update, sizeof(__pyx_k_IdentitySet_update), 0, 0, 1, 1}, - {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, - {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0_2, __pyx_k_Incompatible_checksums_0x_x_vs_0_2, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0_2), 0, 0, 1, 0}, - {&__pyx_n_s_IndexError, __pyx_k_IndexError, sizeof(__pyx_k_IndexError), 0, 0, 1, 1}, - {&__pyx_n_s_Iterable, __pyx_k_Iterable, sizeof(__pyx_k_Iterable), 0, 0, 1, 1}, - {&__pyx_kp_s_Iterable_Any, __pyx_k_Iterable_Any, sizeof(__pyx_k_Iterable_Any), 0, 0, 1, 0}, - {&__pyx_kp_s_Iterable_Hashable, __pyx_k_Iterable_Hashable, sizeof(__pyx_k_Iterable_Hashable), 0, 0, 1, 0}, - {&__pyx_kp_s_Iterable__S, __pyx_k_Iterable__S, sizeof(__pyx_k_Iterable__S), 0, 0, 1, 0}, - {&__pyx_kp_s_Iterable__T, __pyx_k_Iterable__T, sizeof(__pyx_k_Iterable__T), 0, 0, 1, 0}, - {&__pyx_n_s_Iterator, __pyx_k_Iterator, sizeof(__pyx_k_Iterator), 0, 0, 1, 1}, - {&__pyx_n_s_KeyError, __pyx_k_KeyError, sizeof(__pyx_k_KeyError), 0, 0, 1, 1}, - {&__pyx_n_s_List, __pyx_k_List, sizeof(__pyx_k_List), 0, 0, 1, 1}, - {&__pyx_n_s_NoReturn, __pyx_k_NoReturn, sizeof(__pyx_k_NoReturn), 0, 0, 1, 1}, - {&__pyx_n_s_None, __pyx_k_None, sizeof(__pyx_k_None), 0, 0, 1, 1}, - {&__pyx_n_s_NotImplemented, __pyx_k_NotImplemented, sizeof(__pyx_k_NotImplemented), 0, 0, 1, 1}, - {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet, __pyx_k_OrderedSet, sizeof(__pyx_k_OrderedSet), 0, 0, 1, 1}, - {&__pyx_kp_s_OrderedSet_Union__T__S, __pyx_k_OrderedSet_Union__T__S, sizeof(__pyx_k_OrderedSet_Union__T__S), 0, 0, 1, 0}, - {&__pyx_kp_s_OrderedSet__T, __pyx_k_OrderedSet__T, sizeof(__pyx_k_OrderedSet__T), 0, 0, 1, 0}, - {&__pyx_n_s_OrderedSet___class_getitem, __pyx_k_OrderedSet___class_getitem, sizeof(__pyx_k_OrderedSet___class_getitem), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet___reduce_cython, __pyx_k_OrderedSet___reduce_cython, sizeof(__pyx_k_OrderedSet___reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet___setstate_cython, __pyx_k_OrderedSet___setstate_cython, sizeof(__pyx_k_OrderedSet___setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_add, __pyx_k_OrderedSet_add, sizeof(__pyx_k_OrderedSet_add), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_clear, __pyx_k_OrderedSet_clear, sizeof(__pyx_k_OrderedSet_clear), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_copy, __pyx_k_OrderedSet_copy, sizeof(__pyx_k_OrderedSet_copy), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_difference, __pyx_k_OrderedSet_difference, sizeof(__pyx_k_OrderedSet_difference), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_difference_update, __pyx_k_OrderedSet_difference_update, sizeof(__pyx_k_OrderedSet_difference_update), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_discard, __pyx_k_OrderedSet_discard, sizeof(__pyx_k_OrderedSet_discard), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_insert, __pyx_k_OrderedSet_insert, sizeof(__pyx_k_OrderedSet_insert), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_intersection, __pyx_k_OrderedSet_intersection, sizeof(__pyx_k_OrderedSet_intersection), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_intersection_update, __pyx_k_OrderedSet_intersection_update, sizeof(__pyx_k_OrderedSet_intersection_update), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_pop, __pyx_k_OrderedSet_pop, sizeof(__pyx_k_OrderedSet_pop), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_remove, __pyx_k_OrderedSet_remove, sizeof(__pyx_k_OrderedSet_remove), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_symmetric_difference, __pyx_k_OrderedSet_symmetric_difference, sizeof(__pyx_k_OrderedSet_symmetric_difference), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_symmetric_difference_2, __pyx_k_OrderedSet_symmetric_difference_2, sizeof(__pyx_k_OrderedSet_symmetric_difference_2), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_union, __pyx_k_OrderedSet_union, sizeof(__pyx_k_OrderedSet_union), 0, 0, 1, 1}, - {&__pyx_n_s_OrderedSet_update, __pyx_k_OrderedSet_update, sizeof(__pyx_k_OrderedSet_update), 0, 0, 1, 1}, - {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_S, __pyx_k_S, sizeof(__pyx_k_S), 0, 0, 1, 1}, - {&__pyx_n_u_S, __pyx_k_S, sizeof(__pyx_k_S), 0, 1, 0, 1}, - {&__pyx_n_s_Self, __pyx_k_Self, sizeof(__pyx_k_Self), 0, 0, 1, 1}, - {&__pyx_n_s_Set, __pyx_k_Set, sizeof(__pyx_k_Set), 0, 0, 1, 1}, - {&__pyx_n_s_T, __pyx_k_T, sizeof(__pyx_k_T), 0, 0, 1, 1}, - {&__pyx_n_u_T, __pyx_k_T, sizeof(__pyx_k_T), 0, 1, 0, 1}, - {&__pyx_n_s_Tuple, __pyx_k_Tuple, sizeof(__pyx_k_Tuple), 0, 0, 1, 1}, - {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, - {&__pyx_n_s_TypeVar, __pyx_k_TypeVar, sizeof(__pyx_k_TypeVar), 0, 0, 1, 1}, - {&__pyx_n_s_Union, __pyx_k_Union, sizeof(__pyx_k_Union), 0, 0, 1, 1}, - {&__pyx_kp_u__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 1, 0, 0}, - {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, - {&__pyx_kp_u__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 1, 0, 0}, - {&__pyx_n_s__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 0, 1, 1}, - {&__pyx_n_s_a, __pyx_k_a, sizeof(__pyx_k_a), 0, 0, 1, 1}, - {&__pyx_n_s_add, __pyx_k_add, sizeof(__pyx_k_add), 0, 0, 1, 1}, - {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, - {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, - {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1}, - {&__pyx_n_s_class_getitem, __pyx_k_class_getitem, sizeof(__pyx_k_class_getitem), 0, 0, 1, 1}, - {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_cls, __pyx_k_cls, sizeof(__pyx_k_cls), 0, 0, 1, 1}, - {&__pyx_n_s_copy, __pyx_k_copy, sizeof(__pyx_k_copy), 0, 0, 1, 1}, - {&__pyx_n_s_copy_2, __pyx_k_copy_2, sizeof(__pyx_k_copy_2), 0, 0, 1, 1}, - {&__pyx_kp_s_cython_Py_ssize_t, __pyx_k_cython_Py_ssize_t, sizeof(__pyx_k_cython_Py_ssize_t), 0, 0, 1, 0}, - {&__pyx_n_s_d, __pyx_k_d, sizeof(__pyx_k_d), 0, 0, 1, 1}, - {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, - {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, - {&__pyx_n_s_difference, __pyx_k_difference, sizeof(__pyx_k_difference), 0, 0, 1, 1}, - {&__pyx_n_s_difference_update, __pyx_k_difference_update, sizeof(__pyx_k_difference_update), 0, 0, 1, 1}, - {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, - {&__pyx_n_s_discard, __pyx_k_discard, sizeof(__pyx_k_discard), 0, 0, 1, 1}, - {&__pyx_n_s_element, __pyx_k_element, sizeof(__pyx_k_element), 0, 0, 1, 1}, - {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, - {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_init, __pyx_k_init, sizeof(__pyx_k_init), 0, 0, 1, 1}, - {&__pyx_n_s_insert, __pyx_k_insert, sizeof(__pyx_k_insert), 0, 0, 1, 1}, - {&__pyx_n_s_intersection, __pyx_k_intersection, sizeof(__pyx_k_intersection), 0, 0, 1, 1}, - {&__pyx_n_s_intersection_update, __pyx_k_intersection_update, sizeof(__pyx_k_intersection_update), 0, 0, 1, 1}, - {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, - {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, - {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, - {&__pyx_n_s_issubset, __pyx_k_issubset, sizeof(__pyx_k_issubset), 0, 0, 1, 1}, - {&__pyx_n_s_issuperset, __pyx_k_issuperset, sizeof(__pyx_k_issuperset), 0, 0, 1, 1}, - {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, - {&__pyx_n_s_iterable, __pyx_k_iterable, sizeof(__pyx_k_iterable), 0, 0, 1, 1}, - {&__pyx_n_s_iterables, __pyx_k_iterables, sizeof(__pyx_k_iterables), 0, 0, 1, 1}, - {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, - {&__pyx_n_s_keys, __pyx_k_keys, sizeof(__pyx_k_keys), 0, 0, 1, 1}, - {&__pyx_n_u_len, __pyx_k_len, sizeof(__pyx_k_len), 0, 1, 0, 1}, - {&__pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_k_lib_sqlalchemy_util__collections, sizeof(__pyx_k_lib_sqlalchemy_util__collections), 0, 0, 1, 0}, - {&__pyx_n_u_list, __pyx_k_list, sizeof(__pyx_k_list), 0, 1, 0, 1}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_u_members, __pyx_k_members, sizeof(__pyx_k_members), 0, 1, 0, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_n_s_other, __pyx_k_other, sizeof(__pyx_k_other), 0, 0, 1, 1}, - {&__pyx_n_s_other_set, __pyx_k_other_set, sizeof(__pyx_k_other_set), 0, 0, 1, 1}, - {&__pyx_n_s_pair, __pyx_k_pair, sizeof(__pyx_k_pair), 0, 0, 1, 1}, - {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, - {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, - {&__pyx_kp_u_pop_from_an_empty_set, __pyx_k_pop_from_an_empty_set, sizeof(__pyx_k_pop_from_an_empty_set), 0, 1, 0, 0}, - {&__pyx_n_s_popitem, __pyx_k_popitem, sizeof(__pyx_k_popitem), 0, 0, 1, 1}, - {&__pyx_n_s_pos, __pyx_k_pos, sizeof(__pyx_k_pos), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_IdentitySet, __pyx_k_pyx_unpickle_IdentitySet, sizeof(__pyx_k_pyx_unpickle_IdentitySet), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_OrderedSet, __pyx_k_pyx_unpickle_OrderedSet, sizeof(__pyx_k_pyx_unpickle_OrderedSet), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_n_s_remove, __pyx_k_remove, sizeof(__pyx_k_remove), 0, 0, 1, 1}, - {&__pyx_n_s_repr, __pyx_k_repr, sizeof(__pyx_k_repr), 0, 0, 1, 1}, - {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, - {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, - {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, - {&__pyx_n_s_seq, __pyx_k_seq, sizeof(__pyx_k_seq), 0, 0, 1, 1}, - {&__pyx_kp_u_set_objects_are_unhashable, __pyx_k_set_objects_are_unhashable, sizeof(__pyx_k_set_objects_are_unhashable), 0, 1, 0, 0}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_slots, __pyx_k_slots, sizeof(__pyx_k_slots), 0, 0, 1, 1}, - {&__pyx_n_s_sqlalchemy_util__collections_cy, __pyx_k_sqlalchemy_util__collections_cy, sizeof(__pyx_k_sqlalchemy_util__collections_cy), 0, 0, 1, 1}, - {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, - {&__pyx_n_s_str, __pyx_k_str, sizeof(__pyx_k_str), 0, 0, 1, 1}, - {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, - {&__pyx_n_s_symmetric_difference, __pyx_k_symmetric_difference, sizeof(__pyx_k_symmetric_difference), 0, 0, 1, 1}, - {&__pyx_n_s_symmetric_difference_update, __pyx_k_symmetric_difference_update, sizeof(__pyx_k_symmetric_difference_update), 0, 0, 1, 1}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_kp_s_type_Self, __pyx_k_type_Self, sizeof(__pyx_k_type_Self), 0, 0, 1, 0}, - {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, - {&__pyx_n_s_union, __pyx_k_union, sizeof(__pyx_k_union), 0, 0, 1, 1}, - {&__pyx_n_s_unique_list, __pyx_k_unique_list, sizeof(__pyx_k_unique_list), 0, 0, 1, 1}, - {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, - {&__pyx_n_s_use_setstate, __pyx_k_use_setstate, sizeof(__pyx_k_use_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, - {&__pyx_n_s_values, __pyx_k_values, sizeof(__pyx_k_values), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} - }; - return __Pyx_InitStrings(__pyx_string_tab); -} -/* #### Code section: cached_builtins ### */ -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_IndexError = __Pyx_GetBuiltinName(__pyx_n_s_IndexError); if (!__pyx_builtin_IndexError) __PYX_ERR(0, 126, __pyx_L1_error) - __pyx_builtin_KeyError = __Pyx_GetBuiltinName(__pyx_n_s_KeyError); if (!__pyx_builtin_KeyError) __PYX_ERR(0, 127, __pyx_L1_error) - __pyx_builtin_NotImplemented = __Pyx_GetBuiltinName(__pyx_n_s_NotImplemented); if (!__pyx_builtin_NotImplemented) __PYX_ERR(0, 346, __pyx_L1_error) - __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 509, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: cached_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "sqlalchemy/util/_collections_cy.py":127 - * value = self._list.pop() - * except IndexError: - * raise KeyError("pop from an empty set") from None # <<<<<<<<<<<<<< - * set.remove(self, value) - * return value - */ - __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_pop_from_an_empty_set); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 127, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - - /* "sqlalchemy/util/_collections_cy.py":509 - * - * def __hash__(self) -> NoReturn: - * raise TypeError("set objects are unhashable") # <<<<<<<<<<<<<< - * - * def __repr__(self) -> str: - */ - __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_set_objects_are_unhashable); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 509, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__4); - __Pyx_GIVEREF(__pyx_tuple__4); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe74c1d9, 0x484d571, 0xbc1b299): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe74c1d9, 0x484d571, 0xbc1b299) = (_list))" % __pyx_checksum - */ - __pyx_tuple__5 = PyTuple_Pack(3, __pyx_int_242532825, __pyx_int_75814257, __pyx_int_197243545); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__5); - __Pyx_GIVEREF(__pyx_tuple__5); - __pyx_tuple__7 = PyTuple_Pack(3, __pyx_int_183888701, __pyx_int_143295406, __pyx_int_61630440); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__7); - __Pyx_GIVEREF(__pyx_tuple__7); - - /* "sqlalchemy/util/_collections_cy.py":38 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_is_compiled, 38, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 38, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":44 - * - * # END GENERATED CYTHON IMPORT - * _T = TypeVar("_T") # <<<<<<<<<<<<<< - * _S = TypeVar("_S") - * - */ - __pyx_tuple__10 = PyTuple_Pack(1, __pyx_n_u_T); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__10); - __Pyx_GIVEREF(__pyx_tuple__10); - - /* "sqlalchemy/util/_collections_cy.py":45 - * # END GENERATED CYTHON IMPORT - * _T = TypeVar("_T") - * _S = TypeVar("_S") # <<<<<<<<<<<<<< - * - * - */ - __pyx_tuple__11 = PyTuple_Pack(1, __pyx_n_u_S); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__11); - __Pyx_GIVEREF(__pyx_tuple__11); - - /* "sqlalchemy/util/_collections_cy.py":48 - * - * - * @cython.ccall # <<<<<<<<<<<<<< - * def unique_list(seq: Iterable[_T]) -> List[_T]: - * # this version seems somewhat faster for smaller sizes, but it's - */ - __pyx_tuple__12 = PyTuple_Pack(1, __pyx_n_s_seq); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__12); - __Pyx_GIVEREF(__pyx_tuple__12); - __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_unique_list, 48, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 48, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":83 - * """A set implementation that maintains insertion order.""" - * - * __slots__ = ("_list",) # <<<<<<<<<<<<<< - * _list: List[_T] - * - */ - __pyx_tuple__14 = PyTuple_Pack(1, __pyx_n_u_list); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 83, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__14); - __Pyx_GIVEREF(__pyx_tuple__14); - - /* "sqlalchemy/util/_collections_cy.py":86 - * _list: List[_T] - * - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__(cls, key: Any) -> type[Self]: - * return cls - */ - __pyx_tuple__15 = PyTuple_Pack(2, __pyx_n_s_cls, __pyx_n_s_key); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); - __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_class_getitem, 86, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(0, 86, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":101 - * set.__init__(self) - * - * def copy(self) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self._from_list(list(self._list)) - * - */ - __pyx_tuple__17 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__17); - __Pyx_GIVEREF(__pyx_tuple__17); - __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_copy, 101, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 101, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":113 - * return new - * - * def add(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element not in self: - * self._list.append(element) - */ - __pyx_tuple__19 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_element); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__19); - __Pyx_GIVEREF(__pyx_tuple__19); - __pyx_codeobj__20 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_add, 113, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__20)) __PYX_ERR(0, 113, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":118 - * set.add(self, element) - * - * def remove(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * # set.remove will raise if element is not in self - * set.remove(self, element) - */ - __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_remove, 118, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(0, 118, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":123 - * self._list.remove(element) - * - * def pop(self) -> _T: # <<<<<<<<<<<<<< - * try: - * value = self._list.pop() - */ - __pyx_tuple__22 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_value); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 123, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__22); - __Pyx_GIVEREF(__pyx_tuple__22); - __pyx_codeobj__23 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_pop, 123, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__23)) __PYX_ERR(0, 123, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":131 - * return value - * - * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element not in self: - * self._list.insert(pos, element) - */ - __pyx_tuple__24 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_pos, __pyx_n_s_element); if (unlikely(!__pyx_tuple__24)) __PYX_ERR(0, 131, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__24); - __Pyx_GIVEREF(__pyx_tuple__24); - __pyx_codeobj__25 = (PyObject*)__Pyx_PyCode_New(3, 3, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__24, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_insert, 131, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__25)) __PYX_ERR(0, 131, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":136 - * set.add(self, element) - * - * def discard(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element in self: - * set.remove(self, element) - */ - __pyx_codeobj__26 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_discard, 136, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__26)) __PYX_ERR(0, 136, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":141 - * self._list.remove(element) - * - * def clear(self) -> None: # <<<<<<<<<<<<<< - * set.clear(self) # type: ignore[arg-type] - * self._list = [] - */ - __pyx_codeobj__27 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_clear, 141, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__27)) __PYX_ERR(0, 141, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":160 - * - * # @cython.ccall # cdef function cannot have star argument - * def update(self, *iterables: Iterable[_T]) -> None: # <<<<<<<<<<<<<< - * for iterable in iterables: - * for element in iterable: - */ - __pyx_tuple__28 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_iterables, __pyx_n_s_iterable, __pyx_n_s_element); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(0, 160, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__28); - __Pyx_GIVEREF(__pyx_tuple__28); - __pyx_codeobj__29 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__28, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_update, 160, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__29)) __PYX_ERR(0, 160, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":177 - * - * # @cython.ccall # cdef function cannot have star argument - * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< - * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) - * result.update(*other) - */ - __pyx_tuple__30 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_other, __pyx_n_s_result); if (unlikely(!__pyx_tuple__30)) __PYX_ERR(0, 177, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__30); - __Pyx_GIVEREF(__pyx_tuple__30); - __pyx_codeobj__31 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__30, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_union, 177, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__31)) __PYX_ERR(0, 177, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":186 - * - * # @cython.ccall # cdef function cannot have star argument - * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * other_set: Set[Any] = set.intersection(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) - */ - __pyx_tuple__32 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_other, __pyx_n_s_other_set, __pyx_n_s_a); if (unlikely(!__pyx_tuple__32)) __PYX_ERR(0, 186, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__32); - __Pyx_GIVEREF(__pyx_tuple__32); - __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__32, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_intersection, 186, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(0, 186, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":193 - * return self.intersection(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference( - */ - __pyx_tuple__34 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_other); if (unlikely(!__pyx_tuple__34)) __PYX_ERR(0, 193, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__34); - __Pyx_GIVEREF(__pyx_tuple__34); - __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__34, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_symmetric_difference, 193, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(0, 193, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":219 - * - * # @cython.ccall # cdef function cannot have star argument - * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * other_set: Set[Any] = set.difference(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) - */ - __pyx_codeobj__36 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__32, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_difference, 219, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__36)) __PYX_ERR(0, 219, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":227 - * - * # @cython.ccall # cdef function cannot have star argument - * def intersection_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< - * set.intersection_update(self, *other) - * self._list = [a for a in self._list if a in self] - */ - __pyx_tuple__37 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_other, __pyx_n_s_a); if (unlikely(!__pyx_tuple__37)) __PYX_ERR(0, 227, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__37); - __Pyx_GIVEREF(__pyx_tuple__37); - __pyx_codeobj__38 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_intersection_update, 227, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__38)) __PYX_ERR(0, 227, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":235 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: - */ - __pyx_codeobj__39 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__34, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_symmetric_difference_update, 235, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__39)) __PYX_ERR(0, 235, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":250 - * - * # @cython.ccall # cdef function cannot have star argument - * def difference_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< - * set.difference_update(self, *other) - * self._list = [a for a in self._list if a in self] - */ - __pyx_codeobj__40 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_difference_update, 250, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__40)) __PYX_ERR(0, 250, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_tuple__41 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__41)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__41); - __Pyx_GIVEREF(__pyx_tuple__41); - __pyx_codeobj__42 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__41, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__42)) __PYX_ERR(1, 1, __pyx_L1_error) - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) - */ - __pyx_tuple__43 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__43)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__43); - __Pyx_GIVEREF(__pyx_tuple__43); - __pyx_codeobj__44 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__43, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__44)) __PYX_ERR(1, 16, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":282 - * """ - * - * __slots__ = ("_members",) # <<<<<<<<<<<<<< - * _members: Dict[int, Any] - * - */ - __pyx_tuple__45 = PyTuple_Pack(1, __pyx_n_u_members); if (unlikely(!__pyx_tuple__45)) __PYX_ERR(0, 282, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__45); - __Pyx_GIVEREF(__pyx_tuple__45); - - /* "sqlalchemy/util/_collections_cy.py":291 - * self.update(iterable) - * - * def add(self, value: Any, /) -> None: # <<<<<<<<<<<<<< - * self._members[_get_id(value)] = value - * - */ - __pyx_codeobj__46 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_add, 291, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__46)) __PYX_ERR(0, 291, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":297 - * return _get_id(value) in self._members - * - * @cython.ccall # <<<<<<<<<<<<<< - * def remove(self, value: Any, /): - * del self._members[_get_id(value)] - */ - __pyx_codeobj__47 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_remove, 297, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__47)) __PYX_ERR(0, 297, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":301 - * del self._members[_get_id(value)] - * - * def discard(self, value, /) -> None: # <<<<<<<<<<<<<< - * try: - * self.remove(value) - */ - __pyx_codeobj__48 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_discard, 301, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__48)) __PYX_ERR(0, 301, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":307 - * pass - * - * def pop(self) -> Any: # <<<<<<<<<<<<<< - * pair: Tuple[Any, Any] - * try: - */ - __pyx_tuple__49 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pair); if (unlikely(!__pyx_tuple__49)) __PYX_ERR(0, 307, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__49); - __Pyx_GIVEREF(__pyx_tuple__49); - __pyx_codeobj__50 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__49, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_pop, 307, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__50)) __PYX_ERR(0, 307, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":315 - * raise KeyError("pop from an empty set") - * - * def clear(self) -> None: # <<<<<<<<<<<<<< - * self._members.clear() - * - */ - __pyx_codeobj__51 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_clear, 315, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__51)) __PYX_ERR(0, 315, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":334 - * return True - * - * @cython.ccall # <<<<<<<<<<<<<< - * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - */ - __pyx_tuple__52 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_iterable); if (unlikely(!__pyx_tuple__52)) __PYX_ERR(0, 334, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__52); - __Pyx_GIVEREF(__pyx_tuple__52); - __pyx_codeobj__53 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_issubset, 334, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__53)) __PYX_ERR(0, 334, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":354 - * return len(self) < len(other) and self.issubset(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - */ - __pyx_codeobj__54 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_issuperset, 354, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__54)) __PYX_ERR(0, 354, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":374 - * return len(self) > len(other) and self.issuperset(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def union(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__class__() - */ - __pyx_codeobj__55 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_union, 374, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__55)) __PYX_ERR(0, 374, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":386 - * return self.union(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def update(self, iterable: Iterable[Any], /): - * members: Dict[int, Any] = self._members - */ - __pyx_codeobj__56 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_update, 386, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__56)) __PYX_ERR(0, 386, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":401 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - __pyx_codeobj__57 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_difference, 401, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__57)) __PYX_ERR(0, 401, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":420 - * - * # def difference_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.difference(iterable) - */ - __pyx_codeobj__58 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_difference_update, 420, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__58)) __PYX_ERR(0, 420, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":431 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - __pyx_codeobj__59 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_intersection, 431, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__59)) __PYX_ERR(0, 431, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":449 - * - * # def intersection_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def intersection_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.intersection(iterable) - */ - __pyx_codeobj__60 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_intersection_update, 449, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__60)) __PYX_ERR(0, 449, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":460 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - __pyx_codeobj__61 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_symmetric_difference, 460, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__61)) __PYX_ERR(0, 460, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":482 - * - * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def symmetric_difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.symmetric_difference(iterable) - */ - __pyx_codeobj__62 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__52, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_symmetric_difference_update, 482, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__62)) __PYX_ERR(0, 482, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":493 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def copy(self) -> IdentitySet: - * cp: IdentitySet = self.__new__(self.__class__) - */ - __pyx_codeobj__63 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_copy, 493, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__63)) __PYX_ERR(0, 493, __pyx_L1_error) - - /* "sqlalchemy/util/_collections_cy.py":499 - * return cp - * - * def __copy__(self) -> IdentitySet: # <<<<<<<<<<<<<< - * return self.copy() - * - */ - __pyx_codeobj__64 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__collections, __pyx_n_s_copy_2, 499, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__64)) __PYX_ERR(0, 499, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_codeobj__65 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__41, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__65)) __PYX_ERR(1, 1, __pyx_L1_error) - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) - */ - __pyx_codeobj__66 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__43, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__66)) __PYX_ERR(1, 16, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __pyx_unpickle_OrderedSet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_tuple__67 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__67)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__67); - __Pyx_GIVEREF(__pyx_tuple__67); - __pyx_codeobj__68 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__67, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_OrderedSet, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__68)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_codeobj__69 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__67, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_IdentitySet, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__69)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} -/* #### Code section: init_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { - __pyx_umethod_PyDict_Type_keys.type = (PyObject*)&PyDict_Type; - __pyx_umethod_PyDict_Type_keys.method_name = &__pyx_n_s_keys; - __pyx_umethod_PyDict_Type_update.type = (PyObject*)&PyDict_Type; - __pyx_umethod_PyDict_Type_update.method_name = &__pyx_n_s_update; - __pyx_umethod_PyDict_Type_values.type = (PyObject*)&PyDict_Type; - __pyx_umethod_PyDict_Type_values.method_name = &__pyx_n_s_values; - __pyx_umethod_PyList_Type_pop.type = (PyObject*)&PyList_Type; - __pyx_umethod_PyList_Type_pop.method_name = &__pyx_n_s_pop; - if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_61630440 = PyInt_FromLong(61630440L); if (unlikely(!__pyx_int_61630440)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_75814257 = PyInt_FromLong(75814257L); if (unlikely(!__pyx_int_75814257)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_143295406 = PyInt_FromLong(143295406L); if (unlikely(!__pyx_int_143295406)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_183888701 = PyInt_FromLong(183888701L); if (unlikely(!__pyx_int_183888701)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_197243545 = PyInt_FromLong(197243545L); if (unlikely(!__pyx_int_197243545)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_242532825 = PyInt_FromLong(242532825L); if (unlikely(!__pyx_int_242532825)) __PYX_ERR(0, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: init_globals ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - return 0; -} -/* #### Code section: init_module ### */ - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - if (__Pyx_ExportFunction("_get_id", (void (*)(void))__pyx_f_10sqlalchemy_4util_15_collections_cy__get_id, "unsigned PY_LONG_LONG (PyObject *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_OrderedSet = &__pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet._from_list = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *))__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet__from_list; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet.symmetric_difference = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_OrderedSet.symmetric_difference_update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_10OrderedSet_symmetric_difference_update; - #if CYTHON_USE_TYPE_SPECS - __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PySet_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 80, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_spec, __pyx_t_1); - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet)) __PYX_ERR(0, 80, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet_spec, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) - #else - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet = &__pyx_type_10sqlalchemy_4util_15_collections_cy_OrderedSet; - #endif - if (sizeof(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_OrderedSet) != sizeof(PySetObject)) { - if (__Pyx_validate_extern_base((&PySet_Type)) < 0) __PYX_ERR(0, 80, __pyx_L1_error) - } - #if !CYTHON_COMPILING_IN_LIMITED_API - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_base = (&PySet_Type); - #endif - #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) - #endif - #if PY_MAJOR_VERSION < 3 - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_print = 0; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_dictoffset && __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_getattro == PyObject_GenericGetAttr)) { - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet->tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - #endif - if (__Pyx_SetVtable(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_MergeVtables(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) - #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_OrderedSet, (PyObject *) __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet) < 0) __PYX_ERR(0, 80, __pyx_L1_error) - #endif - __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_IdentitySet = &__pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.remove = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_remove; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.issubset = (int (*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issubset; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.issuperset = (int (*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_issuperset; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.__pyx_union = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_union; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_update; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.difference = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.difference_update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_difference_update; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.intersection = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.intersection_update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_intersection_update; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.symmetric_difference = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.symmetric_difference_update = (PyObject *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, PyObject *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_symmetric_difference_update; - __pyx_vtable_10sqlalchemy_4util_15_collections_cy_IdentitySet.copy = (struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *(*)(struct __pyx_obj_10sqlalchemy_4util_15_collections_cy_IdentitySet *, int __pyx_skip_dispatch))__pyx_f_10sqlalchemy_4util_15_collections_cy_11IdentitySet_copy; - #if CYTHON_USE_TYPE_SPECS - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_spec, NULL); if (unlikely(!__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet)) __PYX_ERR(0, 274, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet_spec, __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) - #else - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet = &__pyx_type_10sqlalchemy_4util_15_collections_cy_IdentitySet; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - #endif - #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) - #endif - #if PY_MAJOR_VERSION < 3 - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet->tp_print = 0; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet->tp_dictoffset && __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet->tp_getattro == PyObject_GenericGetAttr)) { - __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet->tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - #endif - if (__Pyx_SetVtable(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_vtabptr_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_MergeVtables(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) - #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_IdentitySet, (PyObject *) __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet) < 0) __PYX_ERR(0, 274, __pyx_L1_error) - #endif - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__collections_cy(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__collections_cy}, - {0, NULL} -}; -#endif - -#ifdef __cplusplus -namespace { - struct PyModuleDef __pyx_moduledef = - #else - static struct PyModuleDef __pyx_moduledef = - #endif - { - PyModuleDef_HEAD_INIT, - "_collections_cy", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #elif CYTHON_USE_MODULE_STATE - sizeof(__pyx_mstate), /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - #if CYTHON_USE_MODULE_STATE - __pyx_m_traverse, /* m_traverse */ - __pyx_m_clear, /* m_clear */ - NULL /* m_free */ - #else - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ - #endif - }; - #ifdef __cplusplus -} /* anonymous namespace */ -#endif -#endif - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_collections_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_collections_cy(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__collections_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__collections_cy(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) -#else -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) -#endif -{ - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { -#if CYTHON_COMPILING_IN_LIMITED_API - result = PyModule_AddObject(module, to_name, value); -#else - result = PyDict_SetItemString(moddict, to_name, value); -#endif - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - CYTHON_UNUSED_VAR(def); - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - moddict = module; -#else - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; -#endif - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__collections_cy(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - int stringtab_initialized = 0; - #if CYTHON_USE_MODULE_STATE - int pystate_addmodule_run = 0; - #endif - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_collections_cy' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_collections_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #elif CYTHON_USE_MODULE_STATE - __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - { - int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_collections_cy" pseudovariable */ - if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - pystate_addmodule_run = 1; - } - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #endif - CYTHON_UNUSED_VAR(__pyx_t_1); - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__collections_cy(void)", 0); - if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - PyEval_InitThreads(); - #endif - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - stringtab_initialized = 1; - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_sqlalchemy__util___collections_cy) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "sqlalchemy.util._collections_cy")) { - if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.util._collections_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - if (unlikely((__Pyx_modinit_function_export_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_type_import_code(); - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "sqlalchemy/util/_collections_cy.py":11 - * from __future__ import annotations - * - * from typing import AbstractSet # <<<<<<<<<<<<<< - * from typing import Any - * from typing import Dict - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_AbstractSet); - __Pyx_GIVEREF(__pyx_n_s_AbstractSet); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_AbstractSet)) __PYX_ERR(0, 11, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_AbstractSet); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_AbstractSet, __pyx_t_2) < 0) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":12 - * - * from typing import AbstractSet - * from typing import Any # <<<<<<<<<<<<<< - * from typing import Dict - * from typing import Hashable - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Any); - __Pyx_GIVEREF(__pyx_n_s_Any); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Any)) __PYX_ERR(0, 12, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Any); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_3) < 0) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":13 - * from typing import AbstractSet - * from typing import Any - * from typing import Dict # <<<<<<<<<<<<<< - * from typing import Hashable - * from typing import Iterable - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Dict); - __Pyx_GIVEREF(__pyx_n_s_Dict); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Dict)) __PYX_ERR(0, 13, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Dict); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Dict, __pyx_t_2) < 0) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":14 - * from typing import Any - * from typing import Dict - * from typing import Hashable # <<<<<<<<<<<<<< - * from typing import Iterable - * from typing import Iterator - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Hashable); - __Pyx_GIVEREF(__pyx_n_s_Hashable); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Hashable)) __PYX_ERR(0, 14, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Hashable); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Hashable, __pyx_t_3) < 0) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":15 - * from typing import Dict - * from typing import Hashable - * from typing import Iterable # <<<<<<<<<<<<<< - * from typing import Iterator - * from typing import List - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Iterable); - __Pyx_GIVEREF(__pyx_n_s_Iterable); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Iterable)) __PYX_ERR(0, 15, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Iterable, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":16 - * from typing import Hashable - * from typing import Iterable - * from typing import Iterator # <<<<<<<<<<<<<< - * from typing import List - * from typing import NoReturn - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Iterator); - __Pyx_GIVEREF(__pyx_n_s_Iterator); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Iterator)) __PYX_ERR(0, 16, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Iterator); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Iterator, __pyx_t_3) < 0) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":17 - * from typing import Iterable - * from typing import Iterator - * from typing import List # <<<<<<<<<<<<<< - * from typing import NoReturn - * from typing import Optional - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_List); - __Pyx_GIVEREF(__pyx_n_s_List); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_List)) __PYX_ERR(0, 17, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_List); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_List, __pyx_t_2) < 0) __PYX_ERR(0, 17, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":18 - * from typing import Iterator - * from typing import List - * from typing import NoReturn # <<<<<<<<<<<<<< - * from typing import Optional - * from typing import Set - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_NoReturn); - __Pyx_GIVEREF(__pyx_n_s_NoReturn); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_NoReturn)) __PYX_ERR(0, 18, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_NoReturn); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_NoReturn, __pyx_t_3) < 0) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":19 - * from typing import List - * from typing import NoReturn - * from typing import Optional # <<<<<<<<<<<<<< - * from typing import Set - * from typing import Tuple - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Optional); - __Pyx_GIVEREF(__pyx_n_s_Optional); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 19, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Optional); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_2) < 0) __PYX_ERR(0, 19, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":20 - * from typing import NoReturn - * from typing import Optional - * from typing import Set # <<<<<<<<<<<<<< - * from typing import Tuple - * from typing import TypeVar - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Set); - __Pyx_GIVEREF(__pyx_n_s_Set); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Set)) __PYX_ERR(0, 20, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Set); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Set, __pyx_t_3) < 0) __PYX_ERR(0, 20, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":21 - * from typing import Optional - * from typing import Set - * from typing import Tuple # <<<<<<<<<<<<<< - * from typing import TypeVar - * from typing import Union - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Tuple); - __Pyx_GIVEREF(__pyx_n_s_Tuple); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Tuple)) __PYX_ERR(0, 21, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Tuple); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Tuple, __pyx_t_2) < 0) __PYX_ERR(0, 21, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":22 - * from typing import Set - * from typing import Tuple - * from typing import TypeVar # <<<<<<<<<<<<<< - * from typing import Union - * - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_TypeVar); - __Pyx_GIVEREF(__pyx_n_s_TypeVar); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_TypeVar)) __PYX_ERR(0, 22, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_TypeVar, __pyx_t_3) < 0) __PYX_ERR(0, 22, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":23 - * from typing import Tuple - * from typing import TypeVar - * from typing import Union # <<<<<<<<<<<<<< - * - * from .typing import Self - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Union); - __Pyx_GIVEREF(__pyx_n_s_Union); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Union)) __PYX_ERR(0, 23, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Union); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Union, __pyx_t_2) < 0) __PYX_ERR(0, 23, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":25 - * from typing import Union - * - * from .typing import Self # <<<<<<<<<<<<<< - * - * # START GENERATED CYTHON IMPORT - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Self); - __Pyx_GIVEREF(__pyx_n_s_Self); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Self)) __PYX_ERR(0, 25, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Self, __pyx_t_3) < 0) __PYX_ERR(0, 25, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":29 - * # START GENERATED CYTHON IMPORT - * # This section is automatically generated by the script tools/cython_imports.py - * try: # <<<<<<<<<<<<<< - * # NOTE: the cython compiler needs this "import cython" in the file, it - * # can't be only "from sqlalchemy.util import cython" with the fallback - */ - { - (void)__pyx_t_1; (void)__pyx_t_4; (void)__pyx_t_5; /* mark used */ - /*try:*/ { - - /* "sqlalchemy/util/_collections_cy.py":33 - * # can't be only "from sqlalchemy.util import cython" with the fallback - * # in that module - * import cython # <<<<<<<<<<<<<< - * except ModuleNotFoundError: - * from sqlalchemy.util import cython - */ - } - } - - /* "sqlalchemy/util/_collections_cy.py":38 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 38, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 38, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__9)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 38, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(0, 38, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":44 - * - * # END GENERATED CYTHON IMPORT - * _T = TypeVar("_T") # <<<<<<<<<<<<<< - * _S = TypeVar("_S") - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_T, __pyx_t_2) < 0) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":45 - * # END GENERATED CYTHON IMPORT - * _T = TypeVar("_T") - * _S = TypeVar("_S") # <<<<<<<<<<<<<< - * - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_S, __pyx_t_3) < 0) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":48 - * - * - * @cython.ccall # <<<<<<<<<<<<<< - * def unique_list(seq: Iterable[_T]) -> List[_T]: - * # this version seems somewhat faster for smaller sizes, but it's - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_seq, __pyx_kp_s_Iterable__T) < 0) __PYX_ERR(0, 48, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_3unique_list, 0, __pyx_n_s_unique_list, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__13)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_unique_list, __pyx_t_2) < 0) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_collections_cy.py":83 - * """A set implementation that maintains insertion order.""" - * - * __slots__ = ("_list",) # <<<<<<<<<<<<<< - * _list: List[_T] - * - */ - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_slots, __pyx_tuple__14) < 0) __PYX_ERR(0, 83, __pyx_L1_error) - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":86 - * _list: List[_T] - * - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__(cls, key: Any) -> type[Self]: - * return cls - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 86, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_type_Self) < 0) __PYX_ERR(0, 86, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_1__class_getitem__, __Pyx_CYFUNCTION_CLASSMETHOD | __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet___class_getitem, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__16)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_class_getitem, __pyx_t_3) < 0) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - __Pyx_GetNameInClass(__pyx_t_3, (PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_class_getitem); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_Method_ClassMethod(__pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_class_getitem, __pyx_t_2) < 0) __PYX_ERR(0, 86, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":101 - * set.__init__(self) - * - * def copy(self) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * return self._from_list(list(self._list)) - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_OrderedSet__T) < 0) __PYX_ERR(0, 101, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_5copy, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_copy, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_copy, __pyx_t_3) < 0) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":113 - * return new - * - * def add(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element not in self: - * self._list.append(element) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_element, __pyx_n_s_T) < 0) __PYX_ERR(0, 113, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 113, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_7add, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_add, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__20)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_add, __pyx_t_2) < 0) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":118 - * set.add(self, element) - * - * def remove(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * # set.remove will raise if element is not in self - * set.remove(self, element) - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 118, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_element, __pyx_n_s_T) < 0) __PYX_ERR(0, 118, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 118, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_9remove, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_remove, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__21)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 118, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_remove, __pyx_t_3) < 0) __PYX_ERR(0, 118, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":123 - * self._list.remove(element) - * - * def pop(self) -> _T: # <<<<<<<<<<<<<< - * try: - * value = self._list.pop() - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 123, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_T) < 0) __PYX_ERR(0, 123, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_11pop, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_pop, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__23)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 123, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_pop, __pyx_t_2) < 0) __PYX_ERR(0, 123, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":131 - * return value - * - * def insert(self, pos: cython.Py_ssize_t, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element not in self: - * self._list.insert(pos, element) - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_pos, __pyx_kp_s_cython_Py_ssize_t) < 0) __PYX_ERR(0, 131, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_element, __pyx_n_s_T) < 0) __PYX_ERR(0, 131, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 131, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_13insert, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_insert, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__25)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 131, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_insert, __pyx_t_3) < 0) __PYX_ERR(0, 131, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":136 - * set.add(self, element) - * - * def discard(self, element: _T, /) -> None: # <<<<<<<<<<<<<< - * if element in self: - * set.remove(self, element) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 136, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_element, __pyx_n_s_T) < 0) __PYX_ERR(0, 136, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 136, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_15discard, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_discard, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__26)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 136, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_discard, __pyx_t_2) < 0) __PYX_ERR(0, 136, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":141 - * self._list.remove(element) - * - * def clear(self) -> None: # <<<<<<<<<<<<<< - * set.clear(self) # type: ignore[arg-type] - * self._list = [] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 141, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 141, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_17clear, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_clear, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__27)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 141, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_clear, __pyx_t_3) < 0) __PYX_ERR(0, 141, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":157 - * return "%s(%r)" % (self.__class__.__name__, self._list) - * - * __str__ = __repr__ # <<<<<<<<<<<<<< - * - * # @cython.ccall # cdef function cannot have star argument - */ - __Pyx_GetNameInClass(__pyx_t_3, (PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_repr); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 157, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_str, __pyx_t_3) < 0) __PYX_ERR(0, 157, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":160 - * - * # @cython.ccall # cdef function cannot have star argument - * def update(self, *iterables: Iterable[_T]) -> None: # <<<<<<<<<<<<<< - * for iterable in iterables: - * for element in iterable: - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 160, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterables, __pyx_kp_s_Iterable__T) < 0) __PYX_ERR(0, 160, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 160, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_27update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__29)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 160, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_update, __pyx_t_2) < 0) __PYX_ERR(0, 160, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":177 - * - * # @cython.ccall # cdef function cannot have star argument - * def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]: # <<<<<<<<<<<<<< - * result: OrderedSet[Union[_T, _S]] = self._from_list(list(self._list)) - * result.update(*other) - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 177, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Iterable__S) < 0) __PYX_ERR(0, 177, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_OrderedSet_Union__T__S) < 0) __PYX_ERR(0, 177, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_31union, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_union, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__31)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 177, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_union, __pyx_t_3) < 0) __PYX_ERR(0, 177, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":186 - * - * # @cython.ccall # cdef function cannot have star argument - * def intersection(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * other_set: Set[Any] = set.intersection(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 186, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_other, __pyx_kp_s_Iterable_Hashable) < 0) __PYX_ERR(0, 186, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_OrderedSet__T) < 0) __PYX_ERR(0, 186, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_35intersection, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_intersection, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__33)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 186, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_intersection, __pyx_t_2) < 0) __PYX_ERR(0, 186, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":193 - * return self.intersection(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference( - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 193, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Iterable__S) < 0) __PYX_ERR(0, 193, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_39symmetric_difference, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_symmetric_difference, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__35)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 193, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_symmetric_difference, __pyx_t_3) < 0) __PYX_ERR(0, 193, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":219 - * - * # @cython.ccall # cdef function cannot have star argument - * def difference(self, *other: Iterable[Hashable]) -> OrderedSet[_T]: # <<<<<<<<<<<<<< - * other_set: Set[Any] = set.difference(self, *other) - * return self._from_list([a for a in self._list if a in other_set]) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 219, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_other, __pyx_kp_s_Iterable_Hashable) < 0) __PYX_ERR(0, 219, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_OrderedSet__T) < 0) __PYX_ERR(0, 219, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_43difference, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_difference, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__36)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 219, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_difference, __pyx_t_2) < 0) __PYX_ERR(0, 219, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":227 - * - * # @cython.ccall # cdef function cannot have star argument - * def intersection_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< - * set.intersection_update(self, *other) - * self._list = [a for a in self._list if a in self] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 227, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Iterable_Hashable) < 0) __PYX_ERR(0, 227, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 227, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_47intersection_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_intersection_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__38)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 227, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_intersection_update, __pyx_t_3) < 0) __PYX_ERR(0, 227, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":235 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def symmetric_difference_update(self, other: Iterable[_T], /) -> None: - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 235, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_other, __pyx_kp_s_Iterable__T) < 0) __PYX_ERR(0, 235, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_51symmetric_difference_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_symmetric_difference_2, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__39)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 235, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_symmetric_difference_update, __pyx_t_2) < 0) __PYX_ERR(0, 235, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":250 - * - * # @cython.ccall # cdef function cannot have star argument - * def difference_update(self, *other: Iterable[Hashable]) -> None: # <<<<<<<<<<<<<< - * set.difference_update(self, *other) - * self._list = [a for a in self._list if a in self] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 250, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Iterable_Hashable) < 0) __PYX_ERR(0, 250, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 250, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_55difference_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet_difference_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__40)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 250, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_difference_update, __pyx_t_3) < 0) __PYX_ERR(0, 250, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_59__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet___reduce_cython, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__42)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_OrderedSet, (type(self), 0xe74c1d9, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_OrderedSet__set_state(self, __pyx_state) - */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_10OrderedSet_61__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_OrderedSet___setstate_cython, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__44)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_OrderedSet); - - /* "sqlalchemy/util/_collections_cy.py":282 - * """ - * - * __slots__ = ("_members",) # <<<<<<<<<<<<<< - * _members: Dict[int, Any] - * - */ - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_slots, __pyx_tuple__45) < 0) __PYX_ERR(0, 282, __pyx_L1_error) - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":291 - * self.update(iterable) - * - * def add(self, value: Any, /) -> None: # <<<<<<<<<<<<<< - * self._members[_get_id(value)] = value - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 291, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 291, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 291, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_3add, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_add, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__46)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 291, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_add, __pyx_t_2) < 0) __PYX_ERR(0, 291, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":297 - * return _get_id(value) in self._members - * - * @cython.ccall # <<<<<<<<<<<<<< - * def remove(self, value: Any, /): - * del self._members[_get_id(value)] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 297, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 297, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_7remove, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_remove, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__47)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 297, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_remove, __pyx_t_3) < 0) __PYX_ERR(0, 297, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":301 - * del self._members[_get_id(value)] - * - * def discard(self, value, /) -> None: # <<<<<<<<<<<<<< - * try: - * self.remove(value) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 301, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 301, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_9discard, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_discard, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__48)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 301, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_discard, __pyx_t_2) < 0) __PYX_ERR(0, 301, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":307 - * pass - * - * def pop(self) -> Any: # <<<<<<<<<<<<<< - * pair: Tuple[Any, Any] - * try: - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 307, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_Any) < 0) __PYX_ERR(0, 307, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_11pop, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_pop, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__50)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 307, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_pop, __pyx_t_3) < 0) __PYX_ERR(0, 307, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":315 - * raise KeyError("pop from an empty set") - * - * def clear(self) -> None: # <<<<<<<<<<<<<< - * self._members.clear() - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 315, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_None) < 0) __PYX_ERR(0, 315, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_13clear, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_clear, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__51)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 315, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_clear, __pyx_t_2) < 0) __PYX_ERR(0, 315, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":334 - * return True - * - * @cython.ccall # <<<<<<<<<<<<<< - * def issubset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 334, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 334, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_19issubset, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_issubset, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__53)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 334, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_issubset, __pyx_t_3) < 0) __PYX_ERR(0, 334, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":354 - * return len(self) < len(other) and self.issubset(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def issuperset(self, iterable: Iterable[Any], /) -> cython.bint: - * other: IdentitySet - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 354, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 354, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_25issuperset, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_issuperset, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__54)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 354, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_issuperset, __pyx_t_2) < 0) __PYX_ERR(0, 354, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":374 - * return len(self) > len(other) and self.issuperset(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def union(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__class__() - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 374, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 374, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_31union, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_union, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__55)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 374, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_union, __pyx_t_3) < 0) __PYX_ERR(0, 374, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":386 - * return self.union(other) - * - * @cython.ccall # <<<<<<<<<<<<<< - * def update(self, iterable: Iterable[Any], /): - * members: Dict[int, Any] = self._members - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 386, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 386, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_35update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__56)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 386, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_update, __pyx_t_2) < 0) __PYX_ERR(0, 386, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":401 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 401, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 401, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_39difference, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_difference, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__57)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 401, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_difference, __pyx_t_3) < 0) __PYX_ERR(0, 401, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":420 - * - * # def difference_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.difference(iterable) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 420, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 420, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_43difference_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_difference_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__58)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 420, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_difference_update, __pyx_t_2) < 0) __PYX_ERR(0, 420, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":431 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def intersection(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 431, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 431, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_47intersection, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_intersection, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__59)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 431, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_intersection, __pyx_t_3) < 0) __PYX_ERR(0, 431, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":449 - * - * # def intersection_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def intersection_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.intersection(iterable) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 449, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 449, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_51intersection_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_intersection_update, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__60)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 449, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_intersection_update, __pyx_t_2) < 0) __PYX_ERR(0, 449, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":460 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def symmetric_difference(self, iterable: Iterable[Any], /) -> IdentitySet: - * result: IdentitySet = self.__new__(self.__class__) - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 460, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 460, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_55symmetric_difference, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_symmetric_difference, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__61)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 460, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_symmetric_difference, __pyx_t_3) < 0) __PYX_ERR(0, 460, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":482 - * - * # def symmetric_difference_update(self, iterable: Iterable[Any]) -> None: - * @cython.ccall # <<<<<<<<<<<<<< - * def symmetric_difference_update(self, iterable: Iterable[Any], /): - * other: IdentitySet = self.symmetric_difference(iterable) - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_iterable, __pyx_kp_s_Iterable_Any) < 0) __PYX_ERR(0, 482, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_59symmetric_difference_update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_symmetric_difference_2, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__62)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_symmetric_difference_update, __pyx_t_2) < 0) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":493 - * return self - * - * @cython.ccall # <<<<<<<<<<<<<< - * def copy(self) -> IdentitySet: - * cp: IdentitySet = self.__new__(self.__class__) - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_63copy, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet_copy, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__63)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 493, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_copy, __pyx_t_2) < 0) __PYX_ERR(0, 493, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "sqlalchemy/util/_collections_cy.py":499 - * return cp - * - * def __copy__(self) -> IdentitySet: # <<<<<<<<<<<<<< - * return self.copy() - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 499, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_IdentitySet) < 0) __PYX_ERR(0, 499, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_65__copy__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet___copy, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__64)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 499, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_copy_2, __pyx_t_3) < 0) __PYX_ERR(0, 499, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_75__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet___reduce_cython, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__65)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_IdentitySet, (type(self), 0xaf5eb3d, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_IdentitySet__set_state(self, __pyx_state) - */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_11IdentitySet_77__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentitySet___setstate_cython, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__66)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_15_collections_cy_IdentitySet); - - /* "(tree fragment)":1 - * def __pyx_unpickle_OrderedSet(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_5__pyx_unpickle_OrderedSet, 0, __pyx_n_s_pyx_unpickle_OrderedSet, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__68)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_OrderedSet, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":11 - * __pyx_unpickle_OrderedSet__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_OrderedSet__set_state(OrderedSet __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * __pyx_result._list = __pyx_state[0] - * if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): - */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_15_collections_cy_7__pyx_unpickle_IdentitySet, 0, __pyx_n_s_pyx_unpickle_IdentitySet, NULL, __pyx_n_s_sqlalchemy_util__collections_cy, __pyx_d, ((PyObject *)__pyx_codeobj__69)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_IdentitySet, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_collections_cy.py":1 - * # util/_collections_cy.py # <<<<<<<<<<<<<< - * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors - * # - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_3) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - if (__pyx_m) { - if (__pyx_d && stringtab_initialized) { - __Pyx_AddTraceback("init sqlalchemy.util._collections_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - #if !CYTHON_USE_MODULE_STATE - Py_CLEAR(__pyx_m); - #else - Py_DECREF(__pyx_m); - if (pystate_addmodule_run) { - PyObject *tp, *value, *tb; - PyErr_Fetch(&tp, &value, &tb); - PyState_RemoveModule(&__pyx_moduledef); - PyErr_Restore(tp, value, tb); - } - #endif - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init sqlalchemy.util._collections_cy"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} -/* #### Code section: cleanup_globals ### */ -/* #### Code section: cleanup_module ### */ -/* #### Code section: main_method ### */ -/* #### Code section: utility_code_pragmas ### */ -#ifdef _MSC_VER -#pragma warning( push ) -/* Warning 4127: conditional expression is constant - * Cython uses constant conditional expressions to allow in inline functions to be optimized at - * compile-time, so this warning is not useful - */ -#pragma warning( disable : 4127 ) -#endif - - - -/* #### Code section: utility_code_def ### */ - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030C00A6 - PyObject *current_exception = tstate->current_exception; - if (unlikely(!current_exception)) return 0; - exc_type = (PyObject*) Py_TYPE(current_exception); - if (exc_type == err) return 1; -#else - exc_type = tstate->curexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; -#endif - #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(exc_type); - #endif - if (unlikely(PyTuple_Check(err))) { - result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - } else { - result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); - } - #if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(exc_type); - #endif - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject *tmp_value; - assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); - if (value) { - #if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) - #endif - PyException_SetTraceback(value, tb); - } - tmp_value = tstate->current_exception; - tstate->current_exception = value; - Py_XDECREF(tmp_value); - Py_XDECREF(type); - Py_XDECREF(tb); -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#endif -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject* exc_value; - exc_value = tstate->current_exception; - tstate->current_exception = 0; - *value = exc_value; - *type = NULL; - *tb = NULL; - if (exc_value) { - *type = (PyObject*) Py_TYPE(exc_value); - Py_INCREF(*type); - #if CYTHON_COMPILING_IN_CPYTHON - *tb = ((PyBaseExceptionObject*) exc_value)->traceback; - Py_XINCREF(*tb); - #else - *tb = PyException_GetTraceback(exc_value); - #endif - } -#else - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#endif -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* PyObjectGetAttrStrNoError */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - (void) PyObject_GetOptionalAttr(obj, attr_name, &result); - return result; -#else -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -#endif -} - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); - if (unlikely(!result) && !PyErr_Occurred()) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* TupleAndListFromArray */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { - PyObject *v; - Py_ssize_t i; - for (i = 0; i < length; i++) { - v = dest[i] = src[i]; - Py_INCREF(v); - } -} -static CYTHON_INLINE PyObject * -__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - Py_INCREF(__pyx_empty_tuple); - return __pyx_empty_tuple; - } - res = PyTuple_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); - return res; -} -static CYTHON_INLINE PyObject * -__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - return PyList_New(0); - } - res = PyList_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); - return res; -} -#endif - -/* BytesEquals */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else - if (s1 == s2) { - return (equals == Py_EQ); - } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { - const char *ps1, *ps2; - Py_ssize_t length = PyBytes_GET_SIZE(s1); - if (length != PyBytes_GET_SIZE(s2)) - return (equals == Py_NE); - ps1 = PyBytes_AS_STRING(s1); - ps2 = PyBytes_AS_STRING(s2); - if (ps1[0] != ps2[0]) { - return (equals == Py_NE); - } else if (length == 1) { - return (equals == Py_EQ); - } else { - int result; -#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) - Py_hash_t hash1, hash2; - hash1 = ((PyBytesObject*)s1)->ob_shash; - hash2 = ((PyBytesObject*)s2)->ob_shash; - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - return (equals == Py_NE); - } -#endif - result = memcmp(ps1, ps2, (size_t)length); - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { - return (equals == Py_NE); - } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { - return (equals == Py_NE); - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -#endif -} - -/* UnicodeEquals */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else -#if PY_MAJOR_VERSION < 3 - PyObject* owned_ref = NULL; -#endif - int s1_is_unicode, s2_is_unicode; - if (s1 == s2) { - goto return_eq; - } - s1_is_unicode = PyUnicode_CheckExact(s1); - s2_is_unicode = PyUnicode_CheckExact(s2); -#if PY_MAJOR_VERSION < 3 - if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { - owned_ref = PyUnicode_FromObject(s2); - if (unlikely(!owned_ref)) - return -1; - s2 = owned_ref; - s2_is_unicode = 1; - } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { - owned_ref = PyUnicode_FromObject(s1); - if (unlikely(!owned_ref)) - return -1; - s1 = owned_ref; - s1_is_unicode = 1; - } else if (((!s2_is_unicode) & (!s1_is_unicode))) { - return __Pyx_PyBytes_Equals(s1, s2, equals); - } -#endif - if (s1_is_unicode & s2_is_unicode) { - Py_ssize_t length; - int kind; - void *data1, *data2; - if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) - return -1; - length = __Pyx_PyUnicode_GET_LENGTH(s1); - if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { - goto return_ne; - } -#if CYTHON_USE_UNICODE_INTERNALS - { - Py_hash_t hash1, hash2; - #if CYTHON_PEP393_ENABLED - hash1 = ((PyASCIIObject*)s1)->hash; - hash2 = ((PyASCIIObject*)s2)->hash; - #else - hash1 = ((PyUnicodeObject*)s1)->hash; - hash2 = ((PyUnicodeObject*)s2)->hash; - #endif - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - goto return_ne; - } - } -#endif - kind = __Pyx_PyUnicode_KIND(s1); - if (kind != __Pyx_PyUnicode_KIND(s2)) { - goto return_ne; - } - data1 = __Pyx_PyUnicode_DATA(s1); - data2 = __Pyx_PyUnicode_DATA(s2); - if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { - goto return_ne; - } else if (length == 1) { - goto return_eq; - } else { - int result = memcmp(data1, data2, (size_t)(length * kind)); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & s2_is_unicode) { - goto return_ne; - } else if ((s2 == Py_None) & s1_is_unicode) { - goto return_ne; - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -return_eq: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ); -return_ne: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_NE); -#endif -} - -/* fastcall */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) -{ - Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); - for (i = 0; i < n; i++) - { - if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; - } - for (i = 0; i < n; i++) - { - int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); - if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; - return kwvalues[i]; - } - } - return NULL; -} -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { - Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); - PyObject *dict; - dict = PyDict_New(); - if (unlikely(!dict)) - return NULL; - for (i=0; i= 0x031000A1) || likely(PySet_GET_SIZE(result))) - return result; - Py_DECREF(result); -#endif - } -#if CYTHON_USE_TYPE_SLOTS - return PyFrozenSet_Type.tp_new(&PyFrozenSet_Type, __pyx_empty_tuple, NULL); -#else - return PyObject_Call((PyObject*)&PyFrozenSet_Type, __pyx_empty_tuple, NULL); -#endif -} - -/* PySetContains */ -static int __Pyx_PySet_ContainsUnhashable(PyObject *set, PyObject *key) { - int result = -1; - if (PySet_Check(key) && PyErr_ExceptionMatches(PyExc_TypeError)) { - PyObject *tmpkey; - PyErr_Clear(); - tmpkey = __Pyx_PyFrozenSet_New(key); - if (tmpkey != NULL) { - result = PySet_Contains(set, tmpkey); - Py_DECREF(tmpkey); - } - } - return result; -} -static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq) { - int result = PySet_Contains(set, key); - if (unlikely(result < 0)) { - result = __Pyx_PySet_ContainsUnhashable(set, key); - } - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); - while (1) { - Py_XDECREF(key); key = NULL; - Py_XDECREF(value); value = NULL; - if (kwds_is_tuple) { - Py_ssize_t size; -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(kwds); -#else - size = PyTuple_Size(kwds); - if (size < 0) goto bad; -#endif - if (pos >= size) break; -#if CYTHON_AVOID_BORROWED_REFS - key = __Pyx_PySequence_ITEM(kwds, pos); - if (!key) goto bad; -#elif CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kwds, pos); -#else - key = PyTuple_GetItem(kwds, pos); - if (!key) goto bad; -#endif - value = kwvalues[pos]; - pos++; - } - else - { - if (!PyDict_Next(kwds, &pos, &key, &value)) break; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - } - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); - Py_DECREF(key); -#endif - key = NULL; - value = NULL; - continue; - } -#if !CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - Py_INCREF(value); - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = ( - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key) - ); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - Py_XDECREF(key); - Py_XDECREF(value); - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - Py_XDECREF(key); - Py_XDECREF(value); - return -1; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { - return NULL; - } - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { - return NULL; - } - #endif - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = Py_TYPE(func)->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); - self = __Pyx_CyOrPyCFunction_GET_SELF(func); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectFastCall */ -#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API -static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { - PyObject *argstuple; - PyObject *result = 0; - size_t i; - argstuple = PyTuple_New((Py_ssize_t)nargs); - if (unlikely(!argstuple)) return NULL; - for (i = 0; i < nargs; i++) { - Py_INCREF(args[i]); - if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; - } - result = __Pyx_PyObject_Call(func, argstuple, kwargs); - bad: - Py_DECREF(argstuple); - return result; -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { - Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); -#if CYTHON_COMPILING_IN_CPYTHON - if (nargs == 0 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) - return __Pyx_PyObject_CallMethO(func, NULL); - } - else if (nargs == 1 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) - return __Pyx_PyObject_CallMethO(func, args[0]); - } -#endif - #if PY_VERSION_HEX < 0x030800B1 - #if CYTHON_FAST_PYCCALL - if (PyCFunction_Check(func)) { - if (kwargs) { - return _PyCFunction_FastCallDict(func, args, nargs, kwargs); - } else { - return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); - } - } - #if PY_VERSION_HEX >= 0x030700A1 - if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { - return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); - } - #endif - #endif - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); - } - #endif - #endif - if (kwargs == NULL) { - #if CYTHON_VECTORCALL - #if PY_VERSION_HEX < 0x03090000 - vectorcallfunc f = _PyVectorcall_Function(func); - #else - vectorcallfunc f = PyVectorcall_Function(func); - #endif - if (f) { - return f(func, args, (size_t)nargs, NULL); - } - #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL - if (__Pyx_CyFunction_CheckExact(func)) { - __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); - if (f) return f(func, args, (size_t)nargs, NULL); - } - #endif - } - if (nargs == 0) { - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); - } - #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API - return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); - #else - return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); - #endif -} - -/* KeywordStringCheck */ -static int __Pyx_CheckKeywordStrings( - PyObject *kw, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; -#if CYTHON_COMPILING_IN_PYPY - if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) - goto invalid_keyword; - return 1; -#else - if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { - Py_ssize_t kwsize; -#if CYTHON_ASSUME_SAFE_MACROS - kwsize = PyTuple_GET_SIZE(kw); -#else - kwsize = PyTuple_Size(kw); - if (kwsize < 0) return 0; -#endif - if (unlikely(kwsize == 0)) - return 1; - if (!kw_allowed) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, 0); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - goto invalid_keyword; - } -#if PY_VERSION_HEX < 0x03090000 - for (pos = 0; pos < kwsize; pos++) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, pos); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } -#endif - return 1; - } - while (PyDict_Next(kw, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_Check(key))) - #endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } - if (!kw_allowed && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - return 0; -#endif -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - -/* py_set_discard_unhashable */ -static int __Pyx_PySet_DiscardUnhashable(PyObject *set, PyObject *key) { - PyObject *tmpkey; - int rv; - if (likely(!PySet_Check(key) || !PyErr_ExceptionMatches(PyExc_TypeError))) - return -1; - PyErr_Clear(); - tmpkey = __Pyx_PyFrozenSet_New(key); - if (tmpkey == NULL) - return -1; - rv = PySet_Discard(set, tmpkey); - Py_DECREF(tmpkey); - return rv; -} - -/* py_set_remove */ -static int __Pyx_PySet_RemoveNotFound(PyObject *set, PyObject *key, int found) { - if (unlikely(found < 0)) { - found = __Pyx_PySet_DiscardUnhashable(set, key); - } - if (likely(found == 0)) { - PyObject *tup; - tup = PyTuple_Pack(1, key); - if (!tup) - return -1; - PyErr_SetObject(PyExc_KeyError, tup); - Py_DECREF(tup); - return -1; - } - return found; -} -static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key) { - int found = PySet_Discard(set, key); - if (unlikely(found != 1)) { - return __Pyx_PySet_RemoveNotFound(set, key, found); - } - return 0; -} - -/* PyObjectCallNoArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { - PyObject *arg[2] = {NULL, NULL}; - return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* PyObjectCallOneArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *args[2] = {NULL, arg}; - return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - __Pyx_TypeName type_name; - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR - if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) -#elif PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (likely(descr != NULL)) { - *method = descr; - return 0; - } - type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod0 */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { - PyObject *method = NULL, *result = NULL; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_CallOneArg(method, obj); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) goto bad; - result = __Pyx_PyObject_CallNoArg(method); - Py_DECREF(method); -bad: - return result; -} - -/* UnpackUnboundCMethod */ -static PyObject *__Pyx_SelflessCall(PyObject *method, PyObject *args, PyObject *kwargs) { - PyObject *result; - PyObject *selfless_args = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); - if (unlikely(!selfless_args)) return NULL; - result = PyObject_Call(method, selfless_args, kwargs); - Py_DECREF(selfless_args); - return result; -} -static PyMethodDef __Pyx_UnboundCMethod_Def = { - "CythonUnboundCMethod", - __PYX_REINTERPRET_FUNCION(PyCFunction, __Pyx_SelflessCall), - METH_VARARGS | METH_KEYWORDS, - NULL -}; -static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { - PyObject *method; - method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); - if (unlikely(!method)) - return -1; - target->method = method; -#if CYTHON_COMPILING_IN_CPYTHON - #if PY_MAJOR_VERSION >= 3 - if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type))) - #else - if (likely(!__Pyx_CyOrPyCFunction_Check(method))) - #endif - { - PyMethodDescrObject *descr = (PyMethodDescrObject*) method; - target->func = descr->d_method->ml_meth; - target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_STACKLESS); - } else -#endif -#if CYTHON_COMPILING_IN_PYPY -#else - if (PyCFunction_Check(method)) -#endif - { - PyObject *self; - int self_found; -#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY - self = PyObject_GetAttrString(method, "__self__"); - if (!self) { - PyErr_Clear(); - } -#else - self = PyCFunction_GET_SELF(method); -#endif - self_found = (self && self != Py_None); -#if CYTHON_COMPILING_IN_LIMITED_API || CYTHON_COMPILING_IN_PYPY - Py_XDECREF(self); -#endif - if (self_found) { - PyObject *unbound_method = PyCFunction_New(&__Pyx_UnboundCMethod_Def, method); - if (unlikely(!unbound_method)) return -1; - Py_DECREF(method); - target->method = unbound_method; - } - } - return 0; -} - -/* CallUnboundCMethod0 */ -static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self) { - PyObject *args, *result = NULL; - if (unlikely(!cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; -#if CYTHON_ASSUME_SAFE_MACROS - args = PyTuple_New(1); - if (unlikely(!args)) goto bad; - Py_INCREF(self); - PyTuple_SET_ITEM(args, 0, self); -#else - args = PyTuple_Pack(1, self); - if (unlikely(!args)) goto bad; -#endif - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); - Py_DECREF(args); -bad: - return result; -} - -/* pop */ -static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L) { - if (__Pyx_IS_TYPE(L, &PySet_Type)) { - return PySet_Pop(L); - } - return __Pyx_PyObject_CallMethod0(L, __pyx_n_s_pop); -} -#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS -static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L) { - if (likely(PyList_GET_SIZE(L) > (((PyListObject*)L)->allocated >> 1))) { - __Pyx_SET_SIZE(L, Py_SIZE(L) - 1); - return PyList_GET_ITEM(L, PyList_GET_SIZE(L)); - } - return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyList_Type_pop, L); -} -#endif - -/* GetTopmostException */ -#if CYTHON_USE_EXC_INFO_STACK && CYTHON_FAST_THREAD_STATE -static _PyErr_StackItem * -__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) -{ - _PyErr_StackItem *exc_info = tstate->exc_info; - while ((exc_info->exc_value == NULL || exc_info->exc_value == Py_None) && - exc_info->previous_item != NULL) - { - exc_info = exc_info->previous_item; - } - return exc_info; -} -#endif - -/* SaveResetException */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - PyObject *exc_value = exc_info->exc_value; - if (exc_value == NULL || exc_value == Py_None) { - *value = NULL; - *type = NULL; - *tb = NULL; - } else { - *value = exc_value; - Py_INCREF(*value); - *type = (PyObject*) Py_TYPE(exc_value); - Py_INCREF(*type); - *tb = PyException_GetTraceback(exc_value); - } - #elif CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); - *type = exc_info->exc_type; - *value = exc_info->exc_value; - *tb = exc_info->exc_traceback; - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); - #else - *type = tstate->exc_type; - *value = tstate->exc_value; - *tb = tstate->exc_traceback; - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); - #endif -} -static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - #if CYTHON_USE_EXC_INFO_STACK && PY_VERSION_HEX >= 0x030B00a4 - _PyErr_StackItem *exc_info = tstate->exc_info; - PyObject *tmp_value = exc_info->exc_value; - exc_info->exc_value = value; - Py_XDECREF(tmp_value); - Py_XDECREF(type); - Py_XDECREF(tb); - #else - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if CYTHON_USE_EXC_INFO_STACK - _PyErr_StackItem *exc_info = tstate->exc_info; - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = type; - exc_info->exc_value = value; - exc_info->exc_traceback = tb; - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = type; - tstate->exc_value = value; - tstate->exc_traceback = tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); - #endif -} -#endif - -/* GetException */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) -#else -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) -#endif -{ - PyObject *local_type = NULL, *local_value, *local_tb = NULL; -#if CYTHON_FAST_THREAD_STATE - PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030C00A6 - local_value = tstate->current_exception; - tstate->current_exception = 0; - if (likely(local_value)) { - local_type = (PyObject*) Py_TYPE(local_value); - Py_INCREF(local_type); - local_tb = PyException_GetTraceback(local_value); - } - #else - local_type = tstate->curexc_type; - local_value = tstate->curexc_value; - local_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; - #endif -#else - PyErr_Fetch(&local_type, &local_value, &local_tb); -#endif - PyErr_NormalizeException(&local_type, &local_value, &local_tb); -#if CYTHON_FAST_THREAD_STATE && PY_VERSION_HEX >= 0x030C00A6 - if (unlikely(tstate->current_exception)) -#elif CYTHON_FAST_THREAD_STATE - if (unlikely(tstate->curexc_type)) -#else - if (unlikely(PyErr_Occurred())) -#endif - goto bad; - #if PY_MAJOR_VERSION >= 3 - if (local_tb) { - if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) - goto bad; - } - #endif - Py_XINCREF(local_tb); - Py_XINCREF(local_type); - Py_XINCREF(local_value); - *type = local_type; - *value = local_value; - *tb = local_tb; -#if CYTHON_FAST_THREAD_STATE - #if CYTHON_USE_EXC_INFO_STACK - { - _PyErr_StackItem *exc_info = tstate->exc_info; - #if PY_VERSION_HEX >= 0x030B00a4 - tmp_value = exc_info->exc_value; - exc_info->exc_value = local_value; - tmp_type = NULL; - tmp_tb = NULL; - Py_XDECREF(local_type); - Py_XDECREF(local_tb); - #else - tmp_type = exc_info->exc_type; - tmp_value = exc_info->exc_value; - tmp_tb = exc_info->exc_traceback; - exc_info->exc_type = local_type; - exc_info->exc_value = local_value; - exc_info->exc_traceback = local_tb; - #endif - } - #else - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = local_type; - tstate->exc_value = local_value; - tstate->exc_traceback = local_tb; - #endif - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#else - PyErr_SetExcInfo(local_type, local_value, local_tb); -#endif - return 0; -bad: - *type = 0; - *value = 0; - *tb = 0; - Py_XDECREF(local_type); - Py_XDECREF(local_value); - Py_XDECREF(local_tb); - return -1; -} - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - __Pyx_PyThreadState_declare - CYTHON_UNUSED_VAR(cause); - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { - #if PY_VERSION_HEX >= 0x030C00A6 - PyException_SetTraceback(value, tb); - #elif CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (unlikely(!j)) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; - PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; - if (mm && mm->mp_subscript) { - PyObject *r, *key = PyInt_FromSsize_t(i); - if (unlikely(!key)) return NULL; - r = mm->mp_subscript(o, key); - Py_DECREF(key); - return r; - } - if (likely(sm && sm->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { - Py_ssize_t l = sm->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return sm->sq_item(o, i); - } - } -#else - if (is_list || !PyMapping_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* PyObjectFormatAndDecref */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatSimpleAndDecref(PyObject* s, PyObject* f) { - if (unlikely(!s)) return NULL; - if (likely(PyUnicode_CheckExact(s))) return s; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_CheckExact(s))) { - PyObject *result = PyUnicode_FromEncodedObject(s, NULL, "strict"); - Py_DECREF(s); - return result; - } - #endif - return __Pyx_PyObject_FormatAndDecref(s, f); -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObject* f) { - PyObject *result; - if (unlikely(!s)) return NULL; - result = PyObject_Format(s, f); - Py_DECREF(s); - return result; -} - -/* JoinPyUnicode */ -static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, - Py_UCS4 max_char) { -#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - PyObject *result_uval; - int result_ukind, kind_shift; - Py_ssize_t i, char_pos; - void *result_udata; - CYTHON_MAYBE_UNUSED_VAR(max_char); -#if CYTHON_PEP393_ENABLED - result_uval = PyUnicode_New(result_ulength, max_char); - if (unlikely(!result_uval)) return NULL; - result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; - kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; - result_udata = PyUnicode_DATA(result_uval); -#else - result_uval = PyUnicode_FromUnicode(NULL, result_ulength); - if (unlikely(!result_uval)) return NULL; - result_ukind = sizeof(Py_UNICODE); - kind_shift = (result_ukind == 4) ? 2 : result_ukind - 1; - result_udata = PyUnicode_AS_UNICODE(result_uval); -#endif - assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); - char_pos = 0; - for (i=0; i < value_count; i++) { - int ukind; - Py_ssize_t ulength; - void *udata; - PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); - if (unlikely(__Pyx_PyUnicode_READY(uval))) - goto bad; - ulength = __Pyx_PyUnicode_GET_LENGTH(uval); - if (unlikely(!ulength)) - continue; - if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) - goto overflow; - ukind = __Pyx_PyUnicode_KIND(uval); - udata = __Pyx_PyUnicode_DATA(uval); - if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { - memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); - } else { - #if PY_VERSION_HEX >= 0x030d0000 - if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; - #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) - _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); - #else - Py_ssize_t j; - for (j=0; j < ulength; j++) { - Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); - __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); - } - #endif - } - char_pos += ulength; - } - return result_uval; -overflow: - PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); -bad: - Py_DECREF(result_uval); - return NULL; -#else - CYTHON_UNUSED_VAR(max_char); - CYTHON_UNUSED_VAR(result_ulength); - CYTHON_UNUSED_VAR(value_count); - return PyUnicode_Join(__pyx_empty_unicode, value_tuple); -#endif -} - -/* RaiseUnexpectedTypeError */ -static int -__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) -{ - __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, - expected, obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetAttr */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { -#if CYTHON_USE_TYPE_SLOTS -#if PY_MAJOR_VERSION >= 3 - if (likely(PyUnicode_Check(n))) -#else - if (likely(PyString_Check(n))) -#endif - return __Pyx_PyObject_GetAttrStr(o, n); -#endif - return PyObject_GetAttr(o, n); -} - -/* HasAttr */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { - PyObject *r; - if (unlikely(!__Pyx_PyBaseString_Check(n))) { - PyErr_SetString(PyExc_TypeError, - "hasattr(): attribute name must be string"); - return -1; - } - r = __Pyx_GetAttr(o, n); - if (!r) { - PyErr_Clear(); - return 0; - } else { - Py_DECREF(r); - return 1; - } -} - -/* GetAttr3 */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static PyObject *__Pyx_GetAttr3Default(PyObject *d) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - return NULL; - __Pyx_PyErr_Clear(); - Py_INCREF(d); - return d; -} -#endif -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - int res = PyObject_GetOptionalAttr(o, n, &r); - return (res != 0) ? r : __Pyx_NewRef(d); -#else - #if CYTHON_USE_TYPE_SLOTS - if (likely(PyString_Check(n))) { - r = __Pyx_PyObject_GetAttrStrNoError(o, n); - if (unlikely(!r) && likely(!PyErr_Occurred())) { - r = __Pyx_NewRef(d); - } - return r; - } - #endif - r = PyObject_GetAttr(o, n); - return (likely(r)) ? r : __Pyx_GetAttr3Default(d); -#endif -} - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#elif CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(!__pyx_m)) { - return NULL; - } - result = PyObject_GetAttr(__pyx_m, name); - if (likely(result)) { - return result; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* ExtTypeTest */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { - __Pyx_TypeName obj_type_name; - __Pyx_TypeName type_name; - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - if (likely(__Pyx_TypeCheck(obj, type))) - return 1; - obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - type_name = __Pyx_PyType_GetName(type); - PyErr_Format(PyExc_TypeError, - "Cannot convert " __Pyx_FMT_TYPENAME " to " __Pyx_FMT_TYPENAME, - obj_type_name, type_name); - __Pyx_DECREF_TypeName(obj_type_name); - __Pyx_DECREF_TypeName(type_name); - return 0; -} - -/* py_dict_keys */ -static CYTHON_INLINE PyObject* __Pyx_PyDict_Keys(PyObject* d) { - if (PY_MAJOR_VERSION >= 3) - return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyDict_Type_keys, d); - else - return PyDict_Keys(d); -} - -/* CallUnboundCMethod1 */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) { - if (likely(cfunc->func)) { - int flag = cfunc->flag; - if (flag == METH_O) { - return (*(cfunc->func))(self, arg); - } else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) { - #if PY_VERSION_HEX >= 0x030700A0 - return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1); - #else - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - #endif - } else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) { - return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL); - } - } - return __Pyx__CallUnboundCMethod1(cfunc, self, arg); -} -#endif -static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){ - PyObject *args, *result = NULL; - if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; -#if CYTHON_COMPILING_IN_CPYTHON - if (cfunc->func && (cfunc->flag & METH_VARARGS)) { - args = PyTuple_New(1); - if (unlikely(!args)) goto bad; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - if (cfunc->flag & METH_KEYWORDS) - result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL); - else - result = (*cfunc->func)(self, args); - } else { - args = PyTuple_New(2); - if (unlikely(!args)) goto bad; - Py_INCREF(self); - PyTuple_SET_ITEM(args, 0, self); - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 1, arg); - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); - } -#else - args = PyTuple_Pack(2, self, arg); - if (unlikely(!args)) goto bad; - result = __Pyx_PyObject_Call(cfunc->method, args, NULL); -#endif -bad: - Py_XDECREF(args); - return result; -} - -/* IterFinish */ -static CYTHON_INLINE int __Pyx_IterFinish(void) { - PyObject* exc_type; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - exc_type = __Pyx_PyErr_CurrentExceptionType(); - if (unlikely(exc_type)) { - if (unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) - return -1; - __Pyx_PyErr_Clear(); - return 0; - } - return 0; -} - -/* RaiseNeedMoreValuesToUnpack */ -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { - PyErr_Format(PyExc_ValueError, - "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", - index, (index == 1) ? "" : "s"); -} - -/* RaiseTooManyValuesToUnpack */ -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { - PyErr_Format(PyExc_ValueError, - "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); -} - -/* UnpackItemEndCheck */ -static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { - if (unlikely(retval)) { - Py_DECREF(retval); - __Pyx_RaiseTooManyValuesError(expected); - return -1; - } - return __Pyx_IterFinish(); -} - -/* RaiseNoneIterError */ -static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); -} - -/* UnpackTupleError */ -static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) { - if (t == Py_None) { - __Pyx_RaiseNoneNotIterableError(); - } else if (PyTuple_GET_SIZE(t) < index) { - __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t)); - } else { - __Pyx_RaiseTooManyValuesError(index); - } -} - -/* UnpackTuple2 */ -static CYTHON_INLINE int __Pyx_unpack_tuple2_exact( - PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) { - PyObject *value1 = NULL, *value2 = NULL; -#if CYTHON_COMPILING_IN_PYPY - value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad; - value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad; -#else - value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1); - value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2); -#endif - if (decref_tuple) { - Py_DECREF(tuple); - } - *pvalue1 = value1; - *pvalue2 = value2; - return 0; -#if CYTHON_COMPILING_IN_PYPY -bad: - Py_XDECREF(value1); - Py_XDECREF(value2); - if (decref_tuple) { Py_XDECREF(tuple); } - return -1; -#endif -} -static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, - int has_known_size, int decref_tuple) { - Py_ssize_t index; - PyObject *value1 = NULL, *value2 = NULL, *iter = NULL; - iternextfunc iternext; - iter = PyObject_GetIter(tuple); - if (unlikely(!iter)) goto bad; - if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; } - iternext = __Pyx_PyObject_GetIterNextFunc(iter); - value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; } - value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; } - if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad; - Py_DECREF(iter); - *pvalue1 = value1; - *pvalue2 = value2; - return 0; -unpacking_failed: - if (!has_known_size && __Pyx_IterFinish() == 0) - __Pyx_RaiseNeedMoreValuesError(index); -bad: - Py_XDECREF(iter); - Py_XDECREF(value1); - Py_XDECREF(value2); - if (decref_tuple) { Py_XDECREF(tuple); } - return -1; -} - -/* dict_iter */ -#if CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 -#include -#endif -static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name, - Py_ssize_t* p_orig_length, int* p_source_is_dict) { - is_dict = is_dict || likely(PyDict_CheckExact(iterable)); - *p_source_is_dict = is_dict; - if (is_dict) { -#if !CYTHON_COMPILING_IN_PYPY - *p_orig_length = PyDict_Size(iterable); - Py_INCREF(iterable); - return iterable; -#elif PY_MAJOR_VERSION >= 3 - static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL; - PyObject **pp = NULL; - if (method_name) { - const char *name = PyUnicode_AsUTF8(method_name); - if (strcmp(name, "iteritems") == 0) pp = &py_items; - else if (strcmp(name, "iterkeys") == 0) pp = &py_keys; - else if (strcmp(name, "itervalues") == 0) pp = &py_values; - if (pp) { - if (!*pp) { - *pp = PyUnicode_FromString(name + 4); - if (!*pp) - return NULL; - } - method_name = *pp; - } - } -#endif - } - *p_orig_length = 0; - if (method_name) { - PyObject* iter; - iterable = __Pyx_PyObject_CallMethod0(iterable, method_name); - if (!iterable) - return NULL; -#if !CYTHON_COMPILING_IN_PYPY - if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable)) - return iterable; -#endif - iter = PyObject_GetIter(iterable); - Py_DECREF(iterable); - return iter; - } - return PyObject_GetIter(iterable); -} -static CYTHON_INLINE int __Pyx_dict_iter_next( - PyObject* iter_obj, CYTHON_NCP_UNUSED Py_ssize_t orig_length, CYTHON_NCP_UNUSED Py_ssize_t* ppos, - PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) { - PyObject* next_item; -#if !CYTHON_COMPILING_IN_PYPY - if (source_is_dict) { - PyObject *key, *value; - if (unlikely(orig_length != PyDict_Size(iter_obj))) { - PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); - return -1; - } - if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) { - return 0; - } - if (pitem) { - PyObject* tuple = PyTuple_New(2); - if (unlikely(!tuple)) { - return -1; - } - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(tuple, 0, key); - PyTuple_SET_ITEM(tuple, 1, value); - *pitem = tuple; - } else { - if (pkey) { - Py_INCREF(key); - *pkey = key; - } - if (pvalue) { - Py_INCREF(value); - *pvalue = value; - } - } - return 1; - } else if (PyTuple_CheckExact(iter_obj)) { - Py_ssize_t pos = *ppos; - if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0; - *ppos = pos + 1; - next_item = PyTuple_GET_ITEM(iter_obj, pos); - Py_INCREF(next_item); - } else if (PyList_CheckExact(iter_obj)) { - Py_ssize_t pos = *ppos; - if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0; - *ppos = pos + 1; - next_item = PyList_GET_ITEM(iter_obj, pos); - Py_INCREF(next_item); - } else -#endif - { - next_item = PyIter_Next(iter_obj); - if (unlikely(!next_item)) { - return __Pyx_IterFinish(); - } - } - if (pitem) { - *pitem = next_item; - } else if (pkey && pvalue) { - if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1)) - return -1; - } else if (pkey) { - *pkey = next_item; - } else { - *pvalue = next_item; - } - return 1; -} - -/* ArgTypeTest */ -static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) -{ - __Pyx_TypeName type_name; - __Pyx_TypeName obj_type_name; - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - else if (exact) { - #if PY_MAJOR_VERSION == 2 - if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; - #endif - } - else { - if (likely(__Pyx_TypeCheck(obj, type))) return 1; - } - type_name = __Pyx_PyType_GetName(type); - obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, - "Argument '%.200s' has incorrect type (expected " __Pyx_FMT_TYPENAME - ", got " __Pyx_FMT_TYPENAME ")", name, type_name, obj_type_name); - __Pyx_DECREF_TypeName(type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* py_dict_values */ -static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d) { - if (PY_MAJOR_VERSION >= 3) - return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyDict_Type_values, d); - else - return PyDict_Values(d); -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *module = 0; - PyObject *empty_dict = 0; - PyObject *empty_list = 0; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (unlikely(!py_import)) - goto bad; - if (!from_list) { - empty_list = PyList_New(0); - if (unlikely(!empty_list)) - goto bad; - from_list = empty_list; - } - #endif - empty_dict = PyDict_New(); - if (unlikely(!empty_dict)) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, 1); - if (unlikely(!module)) { - if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (unlikely(!py_level)) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, level); - #endif - } - } -bad: - Py_XDECREF(empty_dict); - Py_XDECREF(empty_list); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - const char* module_name_str = 0; - PyObject* module_name = 0; - PyObject* module_dot = 0; - PyObject* full_name = 0; - PyErr_Clear(); - module_name_str = PyModule_GetName(module); - if (unlikely(!module_name_str)) { goto modbad; } - module_name = PyUnicode_FromString(module_name_str); - if (unlikely(!module_name)) { goto modbad; } - module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__6); - if (unlikely(!module_dot)) { goto modbad; } - full_name = PyUnicode_Concat(module_dot, name); - if (unlikely(!full_name)) { goto modbad; } - #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) - { - PyObject *modules = PyImport_GetModuleDict(); - if (unlikely(!modules)) - goto modbad; - value = PyObject_GetItem(modules, full_name); - } - #else - value = PyImport_GetModule(full_name); - #endif - modbad: - Py_XDECREF(full_name); - Py_XDECREF(module_dot); - Py_XDECREF(module_name); - } - if (unlikely(!value)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* FixUpExtensionType */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { -#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - CYTHON_UNUSED_VAR(spec); - CYTHON_UNUSED_VAR(type); -#else - const PyType_Slot *slot = spec->slots; - while (slot && slot->slot && slot->slot != Py_tp_members) - slot++; - if (slot && slot->slot == Py_tp_members) { - int changed = 0; -#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) - const -#endif - PyMemberDef *memb = (PyMemberDef*) slot->pfunc; - while (memb && memb->name) { - if (memb->name[0] == '_' && memb->name[1] == '_') { -#if PY_VERSION_HEX < 0x030900b1 - if (strcmp(memb->name, "__weaklistoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_weaklistoffset = memb->offset; - changed = 1; - } - else if (strcmp(memb->name, "__dictoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_dictoffset = memb->offset; - changed = 1; - } -#if CYTHON_METH_FASTCALL - else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); -#if PY_VERSION_HEX >= 0x030800b4 - type->tp_vectorcall_offset = memb->offset; -#else - type->tp_print = (printfunc) memb->offset; -#endif - changed = 1; - } -#endif -#else - if ((0)); -#endif -#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON - else if (strcmp(memb->name, "__module__") == 0) { - PyObject *descr; - assert(memb->type == T_OBJECT); - assert(memb->flags == 0 || memb->flags == READONLY); - descr = PyDescr_NewMember(type, memb); - if (unlikely(!descr)) - return -1; - if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { - Py_DECREF(descr); - return -1; - } - Py_DECREF(descr); - changed = 1; - } -#endif - } - memb++; - } - if (changed) - PyType_Modified(type); - } -#endif - return 0; -} -#endif - -/* FormatTypeName */ -#if CYTHON_COMPILING_IN_LIMITED_API -static __Pyx_TypeName -__Pyx_PyType_GetName(PyTypeObject* tp) -{ - PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, - __pyx_n_s_name); - if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { - PyErr_Clear(); - Py_XDECREF(name); - name = __Pyx_NewRef(__pyx_n_s__8); - } - return name; -} -#endif - -/* ValidateExternBase */ -static int __Pyx_validate_extern_base(PyTypeObject *base) { - Py_ssize_t itemsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_itemsize; -#endif -#if !CYTHON_COMPILING_IN_LIMITED_API - itemsize = ((PyTypeObject *)base)->tp_itemsize; -#else - py_itemsize = PyObject_GetAttrString((PyObject*)base, "__itemsize__"); - if (!py_itemsize) - return -1; - itemsize = PyLong_AsSsize_t(py_itemsize); - Py_DECREF(py_itemsize); - py_itemsize = 0; - if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) - return -1; -#endif - if (itemsize) { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(base); - PyErr_Format(PyExc_TypeError, - "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name); - __Pyx_DECREF_TypeName(b_name); - return -1; - } - return 0; -} - -/* ValidateBasesTuple */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { - Py_ssize_t i, n; -#if CYTHON_ASSUME_SAFE_MACROS - n = PyTuple_GET_SIZE(bases); -#else - n = PyTuple_Size(bases); - if (n < 0) return -1; -#endif - for (i = 1; i < n; i++) - { -#if CYTHON_AVOID_BORROWED_REFS - PyObject *b0 = PySequence_GetItem(bases, i); - if (!b0) return -1; -#elif CYTHON_ASSUME_SAFE_MACROS - PyObject *b0 = PyTuple_GET_ITEM(bases, i); -#else - PyObject *b0 = PyTuple_GetItem(bases, i); - if (!b0) return -1; -#endif - PyTypeObject *b; -#if PY_MAJOR_VERSION < 3 - if (PyClass_Check(b0)) - { - PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", - PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } -#endif - b = (PyTypeObject*) b0; - if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); - __Pyx_DECREF_TypeName(b_name); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - if (dictoffset == 0) - { - Py_ssize_t b_dictoffset = 0; -#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY - b_dictoffset = b->tp_dictoffset; -#else - PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); - if (!py_b_dictoffset) goto dictoffset_return; - b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); - Py_DECREF(py_b_dictoffset); - if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; -#endif - if (b_dictoffset) { - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "extension type '%.200s' has no __dict__ slot, " - "but base type '" __Pyx_FMT_TYPENAME "' has: " - "either add 'cdef dict __dict__' to the extension type " - "or add '__slots__ = [...]' to the base type", - type_name, b_name); - __Pyx_DECREF_TypeName(b_name); - } -#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) - dictoffset_return: -#endif -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - } -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - } - return 0; -} -#endif - -/* PyType_Ready */ -static int __Pyx_PyType_Ready(PyTypeObject *t) { -#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) - (void)__Pyx_PyObject_CallMethod0; -#if CYTHON_USE_TYPE_SPECS - (void)__Pyx_validate_bases_tuple; -#endif - return PyType_Ready(t); -#else - int r; - PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); - if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) - return -1; -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - { - int gc_was_enabled; - #if PY_VERSION_HEX >= 0x030A00b1 - gc_was_enabled = PyGC_Disable(); - (void)__Pyx_PyObject_CallMethod0; - #else - PyObject *ret, *py_status; - PyObject *gc = NULL; - #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) - gc = PyImport_GetModule(__pyx_kp_u_gc); - #endif - if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); - if (unlikely(!gc)) return -1; - py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); - if (unlikely(!py_status)) { - Py_DECREF(gc); - return -1; - } - gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); - Py_DECREF(py_status); - if (gc_was_enabled > 0) { - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); - if (unlikely(!ret)) { - Py_DECREF(gc); - return -1; - } - Py_DECREF(ret); - } else if (unlikely(gc_was_enabled == -1)) { - Py_DECREF(gc); - return -1; - } - #endif - t->tp_flags |= Py_TPFLAGS_HEAPTYPE; -#if PY_VERSION_HEX >= 0x030A0000 - t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; -#endif -#else - (void)__Pyx_PyObject_CallMethod0; -#endif - r = PyType_Ready(t); -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; - #if PY_VERSION_HEX >= 0x030A00b1 - if (gc_was_enabled) - PyGC_Enable(); - #else - if (gc_was_enabled) { - PyObject *tp, *v, *tb; - PyErr_Fetch(&tp, &v, &tb); - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); - if (likely(ret || r == -1)) { - Py_XDECREF(ret); - PyErr_Restore(tp, v, tb); - } else { - Py_XDECREF(tp); - Py_XDECREF(v); - Py_XDECREF(tb); - r = -1; - } - } - Py_DECREF(gc); - #endif - } -#endif - return r; -#endif -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, attr_name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(attr_name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* SetVTable */ -static int __Pyx_SetVtable(PyTypeObject *type, void *vtable) { - PyObject *ob = PyCapsule_New(vtable, 0, 0); - if (unlikely(!ob)) - goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(PyObject_SetAttr((PyObject *) type, __pyx_n_s_pyx_vtable, ob) < 0)) -#else - if (unlikely(PyDict_SetItem(type->tp_dict, __pyx_n_s_pyx_vtable, ob) < 0)) -#endif - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -/* GetVTable */ -static void* __Pyx_GetVtable(PyTypeObject *type) { - void* ptr; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *ob = PyObject_GetAttr((PyObject *)type, __pyx_n_s_pyx_vtable); -#else - PyObject *ob = PyObject_GetItem(type->tp_dict, __pyx_n_s_pyx_vtable); -#endif - if (!ob) - goto bad; - ptr = PyCapsule_GetPointer(ob, 0); - if (!ptr && !PyErr_Occurred()) - PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); - Py_DECREF(ob); - return ptr; -bad: - Py_XDECREF(ob); - return NULL; -} - -/* MergeVTables */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_MergeVtables(PyTypeObject *type) { - int i; - void** base_vtables; - __Pyx_TypeName tp_base_name; - __Pyx_TypeName base_name; - void* unknown = (void*)-1; - PyObject* bases = type->tp_bases; - int base_depth = 0; - { - PyTypeObject* base = type->tp_base; - while (base) { - base_depth += 1; - base = base->tp_base; - } - } - base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1)); - base_vtables[0] = unknown; - for (i = 1; i < PyTuple_GET_SIZE(bases); i++) { - void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))); - if (base_vtable != NULL) { - int j; - PyTypeObject* base = type->tp_base; - for (j = 0; j < base_depth; j++) { - if (base_vtables[j] == unknown) { - base_vtables[j] = __Pyx_GetVtable(base); - base_vtables[j + 1] = unknown; - } - if (base_vtables[j] == base_vtable) { - break; - } else if (base_vtables[j] == NULL) { - goto bad; - } - base = base->tp_base; - } - } - } - PyErr_Clear(); - free(base_vtables); - return 0; -bad: - tp_base_name = __Pyx_PyType_GetName(type->tp_base); - base_name = __Pyx_PyType_GetName((PyTypeObject*)PyTuple_GET_ITEM(bases, i)); - PyErr_Format(PyExc_TypeError, - "multiple bases have vtable conflict: '" __Pyx_FMT_TYPENAME "' and '" __Pyx_FMT_TYPENAME "'", tp_base_name, base_name); - __Pyx_DECREF_TypeName(tp_base_name); - __Pyx_DECREF_TypeName(base_name); - free(base_vtables); - return -1; -} -#endif - -/* SetupReduce */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStrNoError(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_getstate = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; - PyObject *getstate = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); -#else - getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); - if (!getstate && PyErr_Occurred()) { - goto __PYX_BAD; - } -#endif - if (getstate) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); -#else - object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); - if (!object_getstate && PyErr_Occurred()) { - goto __PYX_BAD; - } -#endif - if (object_getstate != getstate) { - goto __PYX_GOOD; - } - } -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) { - __Pyx_TypeName type_obj_name = - __Pyx_PyType_GetName((PyTypeObject*)type_obj); - PyErr_Format(PyExc_RuntimeError, - "Unable to initialize pickling for " __Pyx_FMT_TYPENAME, type_obj_name); - __Pyx_DECREF_TypeName(type_obj_name); - } - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); - Py_XDECREF(object_getstate); - Py_XDECREF(getstate); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} -#endif - -/* FetchSharedCythonModule */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void) { - return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); -} - -/* FetchCommonType */ -static int __Pyx_VerifyCachedType(PyObject *cached_type, - const char *name, - Py_ssize_t basicsize, - Py_ssize_t expected_basicsize) { - if (!PyType_Check(cached_type)) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s is not a type object", name); - return -1; - } - if (basicsize != expected_basicsize) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s has the wrong size, try recompiling", - name); - return -1; - } - return 0; -} -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { - PyObject* abi_module; - const char* object_name; - PyTypeObject *cached_type = NULL; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - object_name = strrchr(type->tp_name, '.'); - object_name = object_name ? object_name+1 : type->tp_name; - cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - if (__Pyx_VerifyCachedType( - (PyObject *)cached_type, - object_name, - cached_type->tp_basicsize, - type->tp_basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - if (PyType_Ready(type) < 0) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) - goto bad; - Py_INCREF(type); - cached_type = type; -done: - Py_DECREF(abi_module); - return cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#else -static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { - PyObject *abi_module, *cached_type = NULL; - const char* object_name = strrchr(spec->name, '.'); - object_name = object_name ? object_name+1 : spec->name; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - cached_type = PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - Py_ssize_t basicsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_basicsize; - py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); - if (unlikely(!py_basicsize)) goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; -#else - basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; -#endif - if (__Pyx_VerifyCachedType( - cached_type, - object_name, - basicsize, - spec->basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - CYTHON_UNUSED_VAR(module); - cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); - if (unlikely(!cached_type)) goto bad; - if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; -done: - Py_DECREF(abi_module); - assert(cached_type == NULL || PyType_Check(cached_type)); - return (PyTypeObject *) cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#endif - -/* PyVectorcallFastCallDict */ -#if CYTHON_METH_FASTCALL -static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - PyObject *res = NULL; - PyObject *kwnames; - PyObject **newargs; - PyObject **kwvalues; - Py_ssize_t i, pos; - size_t j; - PyObject *key, *value; - unsigned long keys_are_strings; - Py_ssize_t nkw = PyDict_GET_SIZE(kw); - newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); - if (unlikely(newargs == NULL)) { - PyErr_NoMemory(); - return NULL; - } - for (j = 0; j < nargs; j++) newargs[j] = args[j]; - kwnames = PyTuple_New(nkw); - if (unlikely(kwnames == NULL)) { - PyMem_Free(newargs); - return NULL; - } - kwvalues = newargs + nargs; - pos = i = 0; - keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; - while (PyDict_Next(kw, &pos, &key, &value)) { - keys_are_strings &= Py_TYPE(key)->tp_flags; - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(kwnames, i, key); - kwvalues[i] = value; - i++; - } - if (unlikely(!keys_are_strings)) { - PyErr_SetString(PyExc_TypeError, "keywords must be strings"); - goto cleanup; - } - res = vc(func, newargs, nargs, kwnames); -cleanup: - Py_DECREF(kwnames); - for (i = 0; i < nkw; i++) - Py_DECREF(kwvalues[i]); - PyMem_Free(newargs); - return res; -} -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { - return vc(func, args, nargs, NULL); - } - return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); -} -#endif - -/* CythonFunctionShared */ -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - if (__Pyx_CyFunction_Check(func)) { - return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; - } else if (PyCFunction_Check(func)) { - return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; - } - return 0; -} -#else -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -} -#endif -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - __Pyx_Py_XDECREF_SET( - __Pyx_CyFunction_GetClassObj(f), - ((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#else - __Pyx_Py_XDECREF_SET( - ((PyCMethodObject *) (f))->mm_class, - (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#endif -} -static PyObject * -__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) -{ - CYTHON_UNUSED_VAR(closure); - if (unlikely(op->func_doc == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); - if (unlikely(!op->func_doc)) return NULL; -#else - if (((PyCFunctionObject*)op)->m_ml->ml_doc) { -#if PY_MAJOR_VERSION >= 3 - op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#else - op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#endif - if (unlikely(op->func_doc == NULL)) - return NULL; - } else { - Py_INCREF(Py_None); - return Py_None; - } -#endif - } - Py_INCREF(op->func_doc); - return op->func_doc; -} -static int -__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (value == NULL) { - value = Py_None; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_doc, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_name == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_name = PyObject_GetAttrString(op->func, "__name__"); -#elif PY_MAJOR_VERSION >= 3 - op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#else - op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#endif - if (unlikely(op->func_name == NULL)) - return NULL; - } - Py_INCREF(op->func_name); - return op->func_name; -} -static int -__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__name__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_name, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_qualname); - return op->func_qualname; -} -static int -__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__qualname__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_qualname, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_dict == NULL)) { - op->func_dict = PyDict_New(); - if (unlikely(op->func_dict == NULL)) - return NULL; - } - Py_INCREF(op->func_dict); - return op->func_dict; -} -static int -__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(value == NULL)) { - PyErr_SetString(PyExc_TypeError, - "function's dictionary may not be deleted"); - return -1; - } - if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "setting function's dictionary to a non-dict"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_dict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_globals); - return op->func_globals; -} -static PyObject * -__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(op); - CYTHON_UNUSED_VAR(context); - Py_INCREF(Py_None); - return Py_None; -} -static PyObject * -__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) -{ - PyObject* result = (op->func_code) ? op->func_code : Py_None; - CYTHON_UNUSED_VAR(context); - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { - int result = 0; - PyObject *res = op->defaults_getter((PyObject *) op); - if (unlikely(!res)) - return -1; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - op->defaults_tuple = PyTuple_GET_ITEM(res, 0); - Py_INCREF(op->defaults_tuple); - op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); - Py_INCREF(op->defaults_kwdict); - #else - op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); - if (unlikely(!op->defaults_tuple)) result = -1; - else { - op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); - if (unlikely(!op->defaults_kwdict)) result = -1; - } - #endif - Py_DECREF(res); - return result; -} -static int -__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__defaults__ must be set to a tuple object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_tuple; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_tuple; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__kwdefaults__ must be set to a dict object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_kwdict; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_kwdict; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value || value == Py_None) { - value = NULL; - } else if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__annotations__ must be set to a dict object"); - return -1; - } - Py_XINCREF(value); - __Pyx_Py_XDECREF_SET(op->func_annotations, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->func_annotations; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - result = PyDict_New(); - if (unlikely(!result)) return NULL; - op->func_annotations = result; - } - Py_INCREF(result); - return result; -} -static PyObject * -__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { - int is_coroutine; - CYTHON_UNUSED_VAR(context); - if (op->func_is_coroutine) { - return __Pyx_NewRef(op->func_is_coroutine); - } - is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; -#if PY_VERSION_HEX >= 0x03050000 - if (is_coroutine) { - PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; - fromlist = PyList_New(1); - if (unlikely(!fromlist)) return NULL; - Py_INCREF(marker); -#if CYTHON_ASSUME_SAFE_MACROS - PyList_SET_ITEM(fromlist, 0, marker); -#else - if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { - Py_DECREF(marker); - Py_DECREF(fromlist); - return NULL; - } -#endif - module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); - Py_DECREF(fromlist); - if (unlikely(!module)) goto ignore; - op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); - Py_DECREF(module); - if (likely(op->func_is_coroutine)) { - return __Pyx_NewRef(op->func_is_coroutine); - } -ignore: - PyErr_Clear(); - } -#endif - op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); - return __Pyx_NewRef(op->func_is_coroutine); -} -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject * -__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_GetAttrString(op->func, "__module__"); -} -static int -__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_SetAttrString(op->func, "__module__", value); -} -#endif -static PyGetSetDef __pyx_CyFunction_getsets[] = { - {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, - {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, - {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, - {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, -#if CYTHON_COMPILING_IN_LIMITED_API - {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, -#endif - {0, 0, 0, 0, 0} -}; -static PyMemberDef __pyx_CyFunction_members[] = { -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, -#endif -#if CYTHON_USE_TYPE_SPECS - {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, -#if CYTHON_METH_FASTCALL -#if CYTHON_BACKPORT_VECTORCALL - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, -#else -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, -#endif -#endif -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, -#else - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, -#endif -#endif - {0, 0, 0, 0, 0} -}; -static PyObject * -__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) -{ - CYTHON_UNUSED_VAR(args); -#if PY_MAJOR_VERSION >= 3 - Py_INCREF(m->func_qualname); - return m->func_qualname; -#else - return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); -#endif -} -static PyMethodDef __pyx_CyFunction_methods[] = { - {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, - {0, 0, 0, 0} -}; -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) -#else -#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) -#endif -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { -#if !CYTHON_COMPILING_IN_LIMITED_API - PyCFunctionObject *cf = (PyCFunctionObject*) op; -#endif - if (unlikely(op == NULL)) - return NULL; -#if CYTHON_COMPILING_IN_LIMITED_API - op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); - if (unlikely(!op->func)) return NULL; -#endif - op->flags = flags; - __Pyx_CyFunction_weakreflist(op) = NULL; -#if !CYTHON_COMPILING_IN_LIMITED_API - cf->m_ml = ml; - cf->m_self = (PyObject *) op; -#endif - Py_XINCREF(closure); - op->func_closure = closure; -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_XINCREF(module); - cf->m_module = module; -#endif - op->func_dict = NULL; - op->func_name = NULL; - Py_INCREF(qualname); - op->func_qualname = qualname; - op->func_doc = NULL; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - op->func_classobj = NULL; -#else - ((PyCMethodObject*)op)->mm_class = NULL; -#endif - op->func_globals = globals; - Py_INCREF(op->func_globals); - Py_XINCREF(code); - op->func_code = code; - op->defaults_pyobjects = 0; - op->defaults_size = 0; - op->defaults = NULL; - op->defaults_tuple = NULL; - op->defaults_kwdict = NULL; - op->defaults_getter = NULL; - op->func_annotations = NULL; - op->func_is_coroutine = NULL; -#if CYTHON_METH_FASTCALL - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { - case METH_NOARGS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; - break; - case METH_O: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; - break; - case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; - break; - case METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; - break; - case METH_VARARGS | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = NULL; - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - Py_DECREF(op); - return NULL; - } -#endif - return (PyObject *) op; -} -static int -__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) -{ - Py_CLEAR(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_CLEAR(m->func); -#else - Py_CLEAR(((PyCFunctionObject*)m)->m_module); -#endif - Py_CLEAR(m->func_dict); - Py_CLEAR(m->func_name); - Py_CLEAR(m->func_qualname); - Py_CLEAR(m->func_doc); - Py_CLEAR(m->func_globals); - Py_CLEAR(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API -#if PY_VERSION_HEX < 0x030900B1 - Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); -#else - { - PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; - ((PyCMethodObject *) (m))->mm_class = NULL; - Py_XDECREF(cls); - } -#endif -#endif - Py_CLEAR(m->defaults_tuple); - Py_CLEAR(m->defaults_kwdict); - Py_CLEAR(m->func_annotations); - Py_CLEAR(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_XDECREF(pydefaults[i]); - PyObject_Free(m->defaults); - m->defaults = NULL; - } - return 0; -} -static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - if (__Pyx_CyFunction_weakreflist(m) != NULL) - PyObject_ClearWeakRefs((PyObject *) m); - __Pyx_CyFunction_clear(m); - __Pyx_PyHeapTypeObject_GC_Del(m); -} -static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - PyObject_GC_UnTrack(m); - __Pyx__CyFunction_dealloc(m); -} -static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) -{ - Py_VISIT(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(m->func); -#else - Py_VISIT(((PyCFunctionObject*)m)->m_module); -#endif - Py_VISIT(m->func_dict); - Py_VISIT(m->func_name); - Py_VISIT(m->func_qualname); - Py_VISIT(m->func_doc); - Py_VISIT(m->func_globals); - Py_VISIT(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); -#endif - Py_VISIT(m->defaults_tuple); - Py_VISIT(m->defaults_kwdict); - Py_VISIT(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_VISIT(pydefaults[i]); - } - return 0; -} -static PyObject* -__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) -{ -#if PY_MAJOR_VERSION >= 3 - return PyUnicode_FromFormat("", - op->func_qualname, (void *)op); -#else - return PyString_FromFormat("", - PyString_AsString(op->func_qualname), (void *)op); -#endif -} -static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *f = ((__pyx_CyFunctionObject*)func)->func; - PyObject *py_name = NULL; - PyCFunction meth; - int flags; - meth = PyCFunction_GetFunction(f); - if (unlikely(!meth)) return NULL; - flags = PyCFunction_GetFlags(f); - if (unlikely(flags < 0)) return NULL; -#else - PyCFunctionObject* f = (PyCFunctionObject*)func; - PyCFunction meth = f->m_ml->ml_meth; - int flags = f->m_ml->ml_flags; -#endif - Py_ssize_t size; - switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { - case METH_VARARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) - return (*meth)(self, arg); - break; - case METH_VARARGS | METH_KEYWORDS: - return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); - case METH_NOARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 0)) - return (*meth)(self, NULL); -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - case METH_O: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 1)) { - PyObject *result, *arg0; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - arg0 = PyTuple_GET_ITEM(arg, 0); - #else - arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; - #endif - result = (*meth)(self, arg0); - #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) - Py_DECREF(arg0); - #endif - return result; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - return NULL; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", - py_name); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", - f->m_ml->ml_name); -#endif - return NULL; -} -static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *self, *result; -#if CYTHON_COMPILING_IN_LIMITED_API - self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); - if (unlikely(!self) && PyErr_Occurred()) return NULL; -#else - self = ((PyCFunctionObject*)func)->m_self; -#endif - result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); - return result; -} -static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { - PyObject *result; - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; -#if CYTHON_METH_FASTCALL - __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); - if (vc) { -#if CYTHON_ASSUME_SAFE_MACROS - return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); -#else - (void) &__Pyx_PyVectorcall_FastCallDict; - return PyVectorcall_Call(func, args, kw); -#endif - } -#endif - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - Py_ssize_t argc; - PyObject *new_args; - PyObject *self; -#if CYTHON_ASSUME_SAFE_MACROS - argc = PyTuple_GET_SIZE(args); -#else - argc = PyTuple_Size(args); - if (unlikely(!argc) < 0) return NULL; -#endif - new_args = PyTuple_GetSlice(args, 1, argc); - if (unlikely(!new_args)) - return NULL; - self = PyTuple_GetItem(args, 0); - if (unlikely(!self)) { - Py_DECREF(new_args); -#if PY_MAJOR_VERSION > 2 - PyErr_Format(PyExc_TypeError, - "unbound method %.200S() needs an argument", - cyfunc->func_qualname); -#else - PyErr_SetString(PyExc_TypeError, - "unbound method needs an argument"); -#endif - return NULL; - } - result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); - Py_DECREF(new_args); - } else { - result = __Pyx_CyFunction_Call(func, args, kw); - } - return result; -} -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) -{ - int ret = 0; - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - if (unlikely(nargs < 1)) { - PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", - ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - ret = 1; - } - if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - return ret; -} -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 0)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, NULL); -} -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 1)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, args[0]); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; - PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); -} -#endif -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_CyFunctionType_slots[] = { - {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, - {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, - {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, - {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, - {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, - {Py_tp_methods, (void *)__pyx_CyFunction_methods}, - {Py_tp_members, (void *)__pyx_CyFunction_members}, - {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, - {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, - {0, 0}, -}; -static PyType_Spec __pyx_CyFunctionType_spec = { - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - __pyx_CyFunctionType_slots -}; -#else -static PyTypeObject __pyx_CyFunctionType_type = { - PyVarObject_HEAD_INIT(0, 0) - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, - (destructor) __Pyx_CyFunction_dealloc, -#if !CYTHON_METH_FASTCALL - 0, -#elif CYTHON_BACKPORT_VECTORCALL - (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), -#else - offsetof(PyCFunctionObject, vectorcall), -#endif - 0, - 0, -#if PY_MAJOR_VERSION < 3 - 0, -#else - 0, -#endif - (reprfunc) __Pyx_CyFunction_repr, - 0, - 0, - 0, - 0, - __Pyx_CyFunction_CallAsMethod, - 0, - 0, - 0, - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - 0, - (traverseproc) __Pyx_CyFunction_traverse, - (inquiry) __Pyx_CyFunction_clear, - 0, -#if PY_VERSION_HEX < 0x030500A0 - offsetof(__pyx_CyFunctionObject, func_weakreflist), -#else - offsetof(PyCFunctionObject, m_weakreflist), -#endif - 0, - 0, - __pyx_CyFunction_methods, - __pyx_CyFunction_members, - __pyx_CyFunction_getsets, - 0, - 0, - __Pyx_PyMethod_New, - 0, - offsetof(__pyx_CyFunctionObject, func_dict), - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -#if PY_VERSION_HEX >= 0x030400a1 - 0, -#endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, -#endif -#if __PYX_NEED_TP_PRINT_SLOT - 0, -#endif -#if PY_VERSION_HEX >= 0x030C0000 - 0, -#endif -#if PY_VERSION_HEX >= 0x030d00A4 - 0, -#endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, -#endif -}; -#endif -static int __pyx_CyFunction_init(PyObject *module) { -#if CYTHON_USE_TYPE_SPECS - __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); -#else - CYTHON_UNUSED_VAR(module); - __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); -#endif - if (unlikely(__pyx_CyFunctionType == NULL)) { - return -1; - } - return 0; -} -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults = PyObject_Malloc(size); - if (unlikely(!m->defaults)) - return PyErr_NoMemory(); - memset(m->defaults, 0, size); - m->defaults_pyobjects = pyobjects; - m->defaults_size = size; - return m->defaults; -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_tuple = tuple; - Py_INCREF(tuple); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_kwdict = dict; - Py_INCREF(dict); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->func_annotations = dict; - Py_INCREF(dict); -} - -/* CythonFunction */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { - PyObject *op = __Pyx_CyFunction_Init( - PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), - ml, flags, qualname, closure, module, globals, code - ); - if (likely(op)) { - PyObject_GC_Track(op); - } - return op; -} - -/* ClassMethod */ -static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { -#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM <= 0x05080000 - if (PyObject_TypeCheck(method, &PyWrapperDescr_Type)) { - return PyClassMethod_New(method); - } -#else -#if CYTHON_COMPILING_IN_PYPY - if (PyMethodDescr_Check(method)) -#else - #if PY_MAJOR_VERSION == 2 - static PyTypeObject *methoddescr_type = NULL; - if (unlikely(methoddescr_type == NULL)) { - PyObject *meth = PyObject_GetAttrString((PyObject*)&PyList_Type, "append"); - if (unlikely(!meth)) return NULL; - methoddescr_type = Py_TYPE(meth); - Py_DECREF(meth); - } - #else - PyTypeObject *methoddescr_type = &PyMethodDescr_Type; - #endif - if (__Pyx_TypeCheck(method, methoddescr_type)) -#endif - { - PyMethodDescrObject *descr = (PyMethodDescrObject *)method; - #if PY_VERSION_HEX < 0x03020000 - PyTypeObject *d_type = descr->d_type; - #else - PyTypeObject *d_type = descr->d_common.d_type; - #endif - return PyDescr_NewClassMethod(d_type, descr->d_method); - } -#endif - else if (PyMethod_Check(method)) { - return PyClassMethod_New(PyMethod_GET_FUNCTION(method)); - } - else { - return PyClassMethod_New(method); - } -} - -/* GetNameInClass */ -static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name) { - PyObject *result; - PyObject *dict; - assert(PyType_Check(nmspace)); -#if CYTHON_USE_TYPE_SLOTS - dict = ((PyTypeObject*)nmspace)->tp_dict; - Py_XINCREF(dict); -#else - dict = PyObject_GetAttr(nmspace, __pyx_n_s_dict); -#endif - if (likely(dict)) { - result = PyObject_GetItem(dict, name); - Py_DECREF(dict); - if (result) { - return result; - } - } - PyErr_Clear(); - __Pyx_GetModuleGlobalNameUncached(result, name); - return result; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - CYTHON_MAYBE_UNUSED_VAR(tstate); - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} -#endif - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, - PyObject *firstlineno, PyObject *name) { - PyObject *replace = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; - replace = PyObject_GetAttrString(code, "replace"); - if (likely(replace)) { - PyObject *result; - result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); - Py_DECREF(replace); - return result; - } - PyErr_Clear(); - #if __PYX_LIMITED_VERSION_HEX < 0x030780000 - { - PyObject *compiled = NULL, *result = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; - compiled = Py_CompileString( - "out = type(code)(\n" - " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" - " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" - " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" - " code.co_lnotab)\n", "", Py_file_input); - if (!compiled) return NULL; - result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); - Py_DECREF(compiled); - if (!result) PyErr_Print(); - Py_DECREF(result); - result = PyDict_GetItemString(scratch_dict, "out"); - if (result) Py_INCREF(result); - return result; - } - #else - return NULL; - #endif -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; - PyObject *replace = NULL, *getframe = NULL, *frame = NULL; - PyObject *exc_type, *exc_value, *exc_traceback; - int success = 0; - if (c_line) { - (void) __pyx_cfilenm; - (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); - } - PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); - code_object = Py_CompileString("_getframe()", filename, Py_eval_input); - if (unlikely(!code_object)) goto bad; - py_py_line = PyLong_FromLong(py_line); - if (unlikely(!py_py_line)) goto bad; - py_funcname = PyUnicode_FromString(funcname); - if (unlikely(!py_funcname)) goto bad; - dict = PyDict_New(); - if (unlikely(!dict)) goto bad; - { - PyObject *old_code_object = code_object; - code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); - Py_DECREF(old_code_object); - } - if (unlikely(!code_object)) goto bad; - getframe = PySys_GetObject("_getframe"); - if (unlikely(!getframe)) goto bad; - if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; - frame = PyEval_EvalCode(code_object, dict, dict); - if (unlikely(!frame) || frame == Py_None) goto bad; - success = 1; - bad: - PyErr_Restore(exc_type, exc_value, exc_traceback); - Py_XDECREF(code_object); - Py_XDECREF(py_py_line); - Py_XDECREF(py_funcname); - Py_XDECREF(dict); - Py_XDECREF(replace); - if (success) { - PyTraceBack_Here( - (struct _frame*)frame); - } - Py_XDECREF(frame); -} -#else -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = NULL; - PyObject *py_funcname = NULL; - #if PY_MAJOR_VERSION < 3 - PyObject *py_srcfile = NULL; - py_srcfile = PyString_FromString(filename); - if (!py_srcfile) goto bad; - #endif - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - funcname = PyUnicode_AsUTF8(py_funcname); - if (!funcname) goto bad; - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - if (!py_funcname) goto bad; - #endif - } - #if PY_MAJOR_VERSION < 3 - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - #else - py_code = PyCode_NewEmpty(filename, funcname, py_line); - #endif - Py_XDECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_funcname); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_srcfile); - #endif - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject *ptype, *pvalue, *ptraceback; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) { - /* If the code object creation fails, then we should clear the - fetched exception references and propagate the new exception */ - Py_XDECREF(ptype); - Py_XDECREF(pvalue); - Py_XDECREF(ptraceback); - goto bad; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} -#endif - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(long) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(long) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(long) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - long val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (long) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (long) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (long) -1; - } else { - stepval = v; - } - v = NULL; - val = (long) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((long) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((long) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (long) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG) -1, const_zero = (unsigned PY_LONG_LONG) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(unsigned PY_LONG_LONG) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(unsigned PY_LONG_LONG) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(unsigned PY_LONG_LONG), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(unsigned PY_LONG_LONG)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const int neg_one = (int) -1, const_zero = (int) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(int) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(int) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(int) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - int val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (int) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (int) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (int) -1; - } else { - stepval = v; - } - v = NULL; - val = (int) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((int) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((int) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (int) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (cls == a || cls == b) return 1; - mro = cls->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - PyObject *base = PyTuple_GET_ITEM(mro, i); - if (base == (PyObject *)a || base == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - if (exc_type1) { - return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); - } else { - return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030B00A4 - return Py_Version & ~0xFFUL; -#else - const char* rt_version = Py_GetVersion(); - unsigned long version = 0; - unsigned long factor = 0x01000000UL; - unsigned int digit = 0; - int i = 0; - while (factor) { - while ('0' <= rt_version[i] && rt_version[i] <= '9') { - digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); - ++i; - } - version += factor * digit; - if (rt_version[i] != '.') - break; - digit = 0; - factor >>= 8; - ++i; - } - return version; -#endif -} -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { - const unsigned long MAJOR_MINOR = 0xFFFF0000UL; - if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) - return 0; - if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) - return 1; - { - char message[200]; - PyOS_snprintf(message, sizeof(message), - "compile time Python version %d.%d " - "of module '%.100s' " - "%s " - "runtime version %d.%d", - (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), - __Pyx_MODULE_NAME, - (allow_newer) ? "was newer than" : "does not match", - (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) - ); - return PyErr_WarnEx(NULL, message, 1); - } -} - -/* FunctionExport */ -static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { - PyObject *d = 0; - PyObject *cobj = 0; - union { - void (*fp)(void); - void *p; - } tmp; - d = PyObject_GetAttrString(__pyx_m, (char *)"__pyx_capi__"); - if (!d) { - PyErr_Clear(); - d = PyDict_New(); - if (!d) - goto bad; - Py_INCREF(d); - if (PyModule_AddObject(__pyx_m, (char *)"__pyx_capi__", d) < 0) - goto bad; - } - tmp.fp = f; - cobj = PyCapsule_New(tmp.p, sig, 0); - if (!cobj) - goto bad; - if (PyDict_SetItemString(d, name, cobj) < 0) - goto bad; - Py_DECREF(cobj); - Py_DECREF(d); - return 0; -bad: - Py_XDECREF(cobj); - Py_XDECREF(d); - return -1; -} - -/* InitStrings */ -#if PY_MAJOR_VERSION >= 3 -static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { - if (t.is_unicode | t.is_str) { - if (t.intern) { - *str = PyUnicode_InternFromString(t.s); - } else if (t.encoding) { - *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); - } else { - *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); - } - } else { - *str = PyBytes_FromStringAndSize(t.s, t.n - 1); - } - if (!*str) - return -1; - if (PyObject_Hash(*str) == -1) - return -1; - return 0; -} -#endif -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { - while (t->p) { - #if PY_MAJOR_VERSION >= 3 - __Pyx_InitString(*t, t->p); - #else - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - #endif - ++t; - } - return 0; -} - -#include -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { - size_t len = strlen(s); - if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { - PyErr_SetString(PyExc_OverflowError, "byte string is too long"); - return -1; - } - return (Py_ssize_t) len; -} -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return __Pyx_PyUnicode_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return PyByteArray_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { - __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " - "The ability to return an instance of a strict subclass of int is deprecated, " - "and may be removed in a future version of Python.", - result_type_name)) { - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; - } - __Pyx_DECREF_TypeName(result_type_name); - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", - type_name, type_name, result_type_name); - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - if (likely(__Pyx_PyLong_IsCompact(b))) { - return __Pyx_PyLong_CompactValue(b); - } else { - const digit* digits = __Pyx_PyLong_Digits(b); - const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { - if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { - return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); -#if PY_MAJOR_VERSION < 3 - } else if (likely(PyInt_CheckExact(o))) { - return PyInt_AS_LONG(o); -#endif - } else { - Py_ssize_t ival; - PyObject *x; - x = PyNumber_Index(o); - if (!x) return -1; - ival = PyInt_AsLong(x); - Py_DECREF(x); - return ival; - } -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -/* #### Code section: utility_code_pragmas_end ### */ -#ifdef _MSC_VER -#pragma warning( pop ) -#endif - - - -/* #### Code section: end ### */ -#endif /* Py_PYTHON_H */ diff --git a/lib/sqlalchemy/util/_immutabledict_cy.c b/lib/sqlalchemy/util/_immutabledict_cy.c deleted file mode 100644 index 1290b9ec844..00000000000 --- a/lib/sqlalchemy/util/_immutabledict_cy.c +++ /dev/null @@ -1,15840 +0,0 @@ -/* Generated by Cython 3.0.11 */ - -/* BEGIN: Cython Metadata -{ - "distutils": { - "name": "sqlalchemy.util._immutabledict_cy", - "sources": [ - "lib/sqlalchemy/util/_immutabledict_cy.py" - ] - }, - "module_name": "sqlalchemy.util._immutabledict_cy" -} -END: Cython Metadata */ - -#ifndef PY_SSIZE_T_CLEAN -#define PY_SSIZE_T_CLEAN -#endif /* PY_SSIZE_T_CLEAN */ -#if defined(CYTHON_LIMITED_API) && 0 - #ifndef Py_LIMITED_API - #if CYTHON_LIMITED_API+0 > 0x03030000 - #define Py_LIMITED_API CYTHON_LIMITED_API - #else - #define Py_LIMITED_API 0x03030000 - #endif - #endif -#endif - -#include "Python.h" - - #if PY_MAJOR_VERSION <= 2 - #define PyDict_GetItemWithError _PyDict_GetItemWithError - #endif - -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02070000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.7+ or Python 3.3+. -#else -#if defined(CYTHON_LIMITED_API) && CYTHON_LIMITED_API -#define __PYX_EXTRA_ABI_MODULE_NAME "limited" -#else -#define __PYX_EXTRA_ABI_MODULE_NAME "" -#endif -#define CYTHON_ABI "3_0_11" __PYX_EXTRA_ABI_MODULE_NAME -#define __PYX_ABI_MODULE_NAME "_cython_" CYTHON_ABI -#define __PYX_TYPE_MODULE_PREFIX __PYX_ABI_MODULE_NAME "." -#define CYTHON_HEX_VERSION 0x03000BF0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(_WIN32) && !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #define HAVE_LONG_LONG -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#define __PYX_LIMITED_VERSION_HEX PY_VERSION_HEX -#if defined(GRAALVM_PYTHON) - /* For very preliminary testing purposes. Most variables are set the same as PyPy. - The existence of this section does not imply that anything works or is even tested */ - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 1 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(PYPY_VERSION) - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS (PY_MAJOR_VERSION >= 3) - #endif - #if PY_VERSION_HEX < 0x03090000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1 && PYPY_VERSION_NUM >= 0x07030C00) - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(CYTHON_LIMITED_API) - #ifdef Py_LIMITED_API - #undef __PYX_LIMITED_VERSION_HEX - #define __PYX_LIMITED_VERSION_HEX Py_LIMITED_API - #endif - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 1 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #undef CYTHON_CLINE_IN_TRACEBACK - #define CYTHON_CLINE_IN_TRACEBACK 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 1 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #endif - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #undef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 1 - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 0 - #endif - #undef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 -#elif defined(Py_GIL_DISABLED) || defined(Py_NOGIL) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #ifndef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL 0 - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL 1 - #endif - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 0 - #endif -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #define CYTHON_COMPILING_IN_LIMITED_API 0 - #define CYTHON_COMPILING_IN_GRAAL 0 - #define CYTHON_COMPILING_IN_NOGIL 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #ifndef CYTHON_USE_TYPE_SPECS - #define CYTHON_USE_TYPE_SPECS 0 - #endif - #ifndef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #ifndef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_GIL - #define CYTHON_FAST_GIL (PY_MAJOR_VERSION < 3 || PY_VERSION_HEX >= 0x03060000 && PY_VERSION_HEX < 0x030C00A6) - #endif - #ifndef CYTHON_METH_FASTCALL - #define CYTHON_METH_FASTCALL (PY_VERSION_HEX >= 0x030700A1) - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP487_INIT_SUBCLASS - #define CYTHON_PEP487_INIT_SUBCLASS 1 - #endif - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #elif !defined(CYTHON_PEP489_MULTI_PHASE_INIT) - #define CYTHON_PEP489_MULTI_PHASE_INIT 1 - #endif - #ifndef CYTHON_USE_MODULE_STATE - #define CYTHON_USE_MODULE_STATE 0 - #endif - #if PY_VERSION_HEX < 0x030400a1 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #elif !defined(CYTHON_USE_TP_FINALIZE) - #define CYTHON_USE_TP_FINALIZE 1 - #endif - #if PY_VERSION_HEX < 0x030600B1 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #elif !defined(CYTHON_USE_DICT_VERSIONS) - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX < 0x030C00A5) - #endif - #if PY_VERSION_HEX < 0x030700A3 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 - #elif !defined(CYTHON_USE_EXC_INFO_STACK) - #define CYTHON_USE_EXC_INFO_STACK 1 - #endif - #ifndef CYTHON_UPDATE_DESCRIPTOR_DOC - #define CYTHON_UPDATE_DESCRIPTOR_DOC 1 - #endif - #ifndef CYTHON_USE_FREELISTS - #define CYTHON_USE_FREELISTS 1 - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if !defined(CYTHON_VECTORCALL) -#define CYTHON_VECTORCALL (CYTHON_FAST_PYCCALL && PY_VERSION_HEX >= 0x030800B1) -#endif -#define CYTHON_BACKPORT_VECTORCALL (CYTHON_METH_FASTCALL && PY_VERSION_HEX < 0x030800B1) -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_MAJOR_VERSION < 3 - #include "longintrepr.h" - #endif - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(maybe_unused) - #define CYTHON_UNUSED [[maybe_unused]] - #endif - #endif - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR - #define CYTHON_MAYBE_UNUSED_VAR(x) CYTHON_UNUSED_VAR(x) -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_USE_CPP_STD_MOVE - #if defined(__cplusplus) && (\ - __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)) - #define CYTHON_USE_CPP_STD_MOVE 1 - #else - #define CYTHON_USE_CPP_STD_MOVE 0 - #endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned short uint16_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - #endif - #endif - #if _MSC_VER < 1300 - #ifdef _WIN64 - typedef unsigned long long __pyx_uintptr_t; - #else - typedef unsigned int __pyx_uintptr_t; - #endif - #else - #ifdef _WIN64 - typedef unsigned __int64 __pyx_uintptr_t; - #else - typedef unsigned __int32 __pyx_uintptr_t; - #endif - #endif -#else - #include - typedef uintptr_t __pyx_uintptr_t; -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) - /* for clang __has_cpp_attribute(fallthrough) is true even before C++17 - * but leads to warnings with -pedantic, since it is a C++17 feature */ - #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L) - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif -#ifdef __cplusplus - template - struct __PYX_IS_UNSIGNED_IMPL {static const bool value = T(0) < T(-1);}; - #define __PYX_IS_UNSIGNED(type) (__PYX_IS_UNSIGNED_IMPL::value) -#else - #define __PYX_IS_UNSIGNED(type) (((type)-1) > 0) -#endif -#if CYTHON_COMPILING_IN_PYPY == 1 - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x030A0000) -#else - #define __PYX_NEED_TP_PRINT_SLOT (PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000) -#endif -#define __PYX_REINTERPRET_FUNCION(func_pointer, other_pointer) ((func_pointer)(void(*)(void))(other_pointer)) - -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #elif defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_DefaultClassType PyClass_Type - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" - #define __Pyx_DefaultClassType PyType_Type -#if CYTHON_COMPILING_IN_LIMITED_API - static CYTHON_INLINE PyObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyObject *exception_table = NULL; - PyObject *types_module=NULL, *code_type=NULL, *result=NULL; - #if __PYX_LIMITED_VERSION_HEX < 0x030B0000 - PyObject *version_info; - PyObject *py_minor_version = NULL; - #endif - long minor_version = 0; - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); - #if __PYX_LIMITED_VERSION_HEX >= 0x030B0000 - minor_version = 11; - #else - if (!(version_info = PySys_GetObject("version_info"))) goto end; - if (!(py_minor_version = PySequence_GetItem(version_info, 1))) goto end; - minor_version = PyLong_AsLong(py_minor_version); - Py_DECREF(py_minor_version); - if (minor_version == -1 && PyErr_Occurred()) goto end; - #endif - if (!(types_module = PyImport_ImportModule("types"))) goto end; - if (!(code_type = PyObject_GetAttrString(types_module, "CodeType"))) goto end; - if (minor_version <= 7) { - (void)p; - result = PyObject_CallFunction(code_type, "iiiiiOOOOOOiOO", a, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else if (minor_version <= 10) { - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, fline, lnos, fv, cell); - } else { - if (!(exception_table = PyBytes_FromStringAndSize(NULL, 0))) goto end; - result = PyObject_CallFunction(code_type, "iiiiiiOOOOOOOiOO", a,p, k, l, s, f, code, - c, n, v, fn, name, name, fline, lnos, exception_table, fv, cell); - } - end: - Py_XDECREF(code_type); - Py_XDECREF(exception_table); - Py_XDECREF(types_module); - if (type) { - PyErr_Restore(type, value, traceback); - } - return result; - } - #ifndef CO_OPTIMIZED - #define CO_OPTIMIZED 0x0001 - #endif - #ifndef CO_NEWLOCALS - #define CO_NEWLOCALS 0x0002 - #endif - #ifndef CO_VARARGS - #define CO_VARARGS 0x0004 - #endif - #ifndef CO_VARKEYWORDS - #define CO_VARKEYWORDS 0x0008 - #endif - #ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x0200 - #endif - #ifndef CO_GENERATOR - #define CO_GENERATOR 0x0020 - #endif - #ifndef CO_COROUTINE - #define CO_COROUTINE 0x0080 - #endif -#elif PY_VERSION_HEX >= 0x030B0000 - static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int p, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - PyCodeObject *result; - PyObject *empty_bytes = PyBytes_FromStringAndSize("", 0); - if (!empty_bytes) return NULL; - result = - #if PY_VERSION_HEX >= 0x030C0000 - PyUnstable_Code_NewWithPosOnlyArgs - #else - PyCode_NewWithPosOnlyArgs - #endif - (a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, name, fline, lnos, empty_bytes); - Py_DECREF(empty_bytes); - return result; - } -#elif PY_VERSION_HEX >= 0x030800B2 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_NewWithPosOnlyArgs(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, p, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif -#endif -#if PY_VERSION_HEX >= 0x030900A4 || defined(Py_IS_TYPE) - #define __Pyx_IS_TYPE(ob, type) Py_IS_TYPE(ob, type) -#else - #define __Pyx_IS_TYPE(ob, type) (((const PyObject*)ob)->ob_type == (type)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_Is) - #define __Pyx_Py_Is(x, y) Py_Is(x, y) -#else - #define __Pyx_Py_Is(x, y) ((x) == (y)) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsNone) - #define __Pyx_Py_IsNone(ob) Py_IsNone(ob) -#else - #define __Pyx_Py_IsNone(ob) __Pyx_Py_Is((ob), Py_None) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsTrue) - #define __Pyx_Py_IsTrue(ob) Py_IsTrue(ob) -#else - #define __Pyx_Py_IsTrue(ob) __Pyx_Py_Is((ob), Py_True) -#endif -#if PY_VERSION_HEX >= 0x030A00B1 || defined(Py_IsFalse) - #define __Pyx_Py_IsFalse(ob) Py_IsFalse(ob) -#else - #define __Pyx_Py_IsFalse(ob) __Pyx_Py_Is((ob), Py_False) -#endif -#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj)) -#if PY_VERSION_HEX >= 0x030900F0 && !CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_GC_IsFinalized(o) PyObject_GC_IsFinalized(o) -#else - #define __Pyx_PyObject_GC_IsFinalized(o) _PyGC_FINALIZED(o) -#endif -#ifndef CO_COROUTINE - #define CO_COROUTINE 0x80 -#endif -#ifndef CO_ASYNC_GENERATOR - #define CO_ASYNC_GENERATOR 0x200 -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef Py_TPFLAGS_SEQUENCE - #define Py_TPFLAGS_SEQUENCE 0 -#endif -#ifndef Py_TPFLAGS_MAPPING - #define Py_TPFLAGS_MAPPING 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #if PY_VERSION_HEX >= 0x030d00A4 - # define __Pyx_PyCFunctionFast PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords PyCFunctionFastWithKeywords - #else - # define __Pyx_PyCFunctionFast _PyCFunctionFast - # define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords - #endif -#endif -#if CYTHON_METH_FASTCALL - #define __Pyx_METH_FASTCALL METH_FASTCALL - #define __Pyx_PyCFunction_FastCall __Pyx_PyCFunctionFast - #define __Pyx_PyCFunction_FastCallWithKeywords __Pyx_PyCFunctionFastWithKeywords -#else - #define __Pyx_METH_FASTCALL METH_VARARGS - #define __Pyx_PyCFunction_FastCall PyCFunction - #define __Pyx_PyCFunction_FastCallWithKeywords PyCFunctionWithKeywords -#endif -#if CYTHON_VECTORCALL - #define __pyx_vectorcallfunc vectorcallfunc - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET - #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n)) -#elif CYTHON_BACKPORT_VECTORCALL - typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, - size_t nargsf, PyObject *kwnames); - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET)) -#else - #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 - #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n)) -#endif -#if PY_MAJOR_VERSION >= 0x030900B1 -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_CheckExact(func) -#else -#define __Pyx_PyCFunction_CheckExact(func) PyCFunction_Check(func) -#endif -#define __Pyx_CyOrPyCFunction_Check(func) PyCFunction_Check(func) -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) (((PyCFunctionObject*)(func))->m_ml->ml_meth) -#elif !CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyOrPyCFunction_GET_FUNCTION(func) PyCFunction_GET_FUNCTION(func) -#endif -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_CyOrPyCFunction_GET_FLAGS(func) (((PyCFunctionObject*)(func))->m_ml->ml_flags) -static CYTHON_INLINE PyObject* __Pyx_CyOrPyCFunction_GET_SELF(PyObject *func) { - return (__Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_STATIC) ? NULL : ((PyCFunctionObject*)func)->m_self; -} -#endif -static CYTHON_INLINE int __Pyx__IsSameCFunction(PyObject *func, void *cfunc) { -#if CYTHON_COMPILING_IN_LIMITED_API - return PyCFunction_Check(func) && PyCFunction_GetFunction(func) == (PyCFunction) cfunc; -#else - return PyCFunction_Check(func) && PyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -#endif -} -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCFunction(func, cfunc) -#if __PYX_LIMITED_VERSION_HEX < 0x030900B1 - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) ((void)m, PyType_FromSpecWithBases(s, b)) - typedef PyObject *(*__Pyx_PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, size_t, PyObject *); -#else - #define __Pyx_PyType_FromModuleAndSpec(m, s, b) PyType_FromModuleAndSpec(m, s, b) - #define __Pyx_PyCMethod PyCMethod -#endif -#ifndef METH_METHOD - #define METH_METHOD 0x200 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyThreadState_Current PyThreadState_Get() -#elif !CYTHON_FAST_THREAD_STATE - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE void *__Pyx_PyModule_GetState(PyObject *op) -{ - void *result; - result = PyModule_GetState(op); - if (!result) - Py_FatalError("Couldn't find the module state"); - return result; -} -#endif -#define __Pyx_PyObject_GetSlot(obj, name, func_ctype) __Pyx_PyType_GetSlot(Py_TYPE(obj), name, func_ctype) -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((func_ctype) PyType_GetSlot((type), Py_##name)) -#else - #define __Pyx_PyType_GetSlot(type, name, func_ctype) ((type)->name) -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if PY_MAJOR_VERSION < 3 - #if CYTHON_COMPILING_IN_PYPY - #if PYPY_VERSION_NUM < 0x07030600 - #if defined(__cplusplus) && __cplusplus >= 201402L - [[deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")]] - #elif defined(__GNUC__) || defined(__clang__) - __attribute__ ((__deprecated__("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6"))) - #elif defined(_MSC_VER) - __declspec(deprecated("`with nogil:` inside a nogil function will not release the GIL in PyPy2 < 7.3.6")) - #endif - static CYTHON_INLINE int PyGILState_Check(void) { - return 0; - } - #else // PYPY_VERSION_NUM < 0x07030600 - #endif // PYPY_VERSION_NUM < 0x07030600 - #else - static CYTHON_INLINE int PyGILState_Check(void) { - PyThreadState * tstate = _PyThreadState_Current; - return tstate && (tstate == PyGILState_GetThisThreadState()); - } - #endif -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B4 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStrWithError(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStr(PyObject *dict, PyObject *name) { - PyObject *res = __Pyx_PyDict_GetItemStrWithError(dict, name); - if (res == NULL) PyErr_Clear(); - return res; -} -#elif PY_MAJOR_VERSION >= 3 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07020000) -#define __Pyx_PyDict_GetItemStrWithError PyDict_GetItemWithError -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#else -static CYTHON_INLINE PyObject * __Pyx_PyDict_GetItemStrWithError(PyObject *dict, PyObject *name) { -#if CYTHON_COMPILING_IN_PYPY - return PyDict_GetItem(dict, name); -#else - PyDictEntry *ep; - PyDictObject *mp = (PyDictObject*) dict; - long hash = ((PyStringObject *) name)->ob_shash; - assert(hash != -1); - ep = (mp->ma_lookup)(mp, name, hash); - if (ep == NULL) { - return NULL; - } - return ep->me_value; -#endif -} -#define __Pyx_PyDict_GetItemStr PyDict_GetItem -#endif -#if CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyType_GetFlags(tp) (((PyTypeObject *)tp)->tp_flags) - #define __Pyx_PyType_HasFeature(type, feature) ((__Pyx_PyType_GetFlags(type) & (feature)) != 0) - #define __Pyx_PyObject_GetIterNextFunc(obj) (Py_TYPE(obj)->tp_iternext) -#else - #define __Pyx_PyType_GetFlags(tp) (PyType_GetFlags((PyTypeObject *)tp)) - #define __Pyx_PyType_HasFeature(type, feature) PyType_HasFeature(type, feature) - #define __Pyx_PyObject_GetIterNextFunc(obj) PyIter_Next -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyObject_GenericSetAttr((PyObject*)tp, k, v) -#else - #define __Pyx_SetItemOnTypeDict(tp, k, v) PyDict_SetItem(tp->tp_dict, k, v) -#endif -#if CYTHON_USE_TYPE_SPECS && PY_VERSION_HEX >= 0x03080000 -#define __Pyx_PyHeapTypeObject_GC_Del(obj) {\ - PyTypeObject *type = Py_TYPE((PyObject*)obj);\ - assert(__Pyx_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE));\ - PyObject_GC_Del(obj);\ - Py_DECREF(type);\ -} -#else -#define __Pyx_PyHeapTypeObject_GC_Del(obj) PyObject_GC_Del(obj) -#endif -#if CYTHON_COMPILING_IN_LIMITED_API - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GetLength(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_ReadChar(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((void)u, 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((void)u, (0)) - #define __Pyx_PyUnicode_DATA(u) ((void*)u) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)k, PyUnicode_ReadChar((PyObject*)(d), i)) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GetLength(u)) -#elif PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_READY(op) (0) - #else - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #endif - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) ((int)PyUnicode_KIND(u)) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, (Py_UCS4) ch) - #if PY_VERSION_HEX >= 0x030C0000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #else - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #endif - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535U : 1114111U) - #define __Pyx_PyUnicode_KIND(u) ((int)sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = (Py_UNICODE) ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #if !defined(PyUnicode_DecodeUnicodeEscape) - #define PyUnicode_DecodeUnicodeEscape(s, size, errors) PyUnicode_Decode(s, size, "unicode_escape", errors) - #endif - #if !defined(PyUnicode_Contains) || (PY_MAJOR_VERSION == 2 && PYPY_VERSION_NUM < 0x07030500) - #undef PyUnicode_Contains - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) - #endif - #if !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) - #endif - #if !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) - #endif -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#if CYTHON_COMPILING_IN_CPYTHON - #define __Pyx_PySequence_ListKeepNew(obj)\ - (likely(PyList_CheckExact(obj) && Py_REFCNT(obj) == 1) ? __Pyx_NewRef(obj) : PySequence_List(obj)) -#else - #define __Pyx_PySequence_ListKeepNew(obj) PySequence_List(obj) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) __Pyx_IS_TYPE(obj, &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_ITEM(o, i) PySequence_ITEM(o, i) - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) (PyTuple_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyList_SET_ITEM(o, i, v) (PyList_SET_ITEM(o, i, v), (0)) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_GET_SIZE(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_GET_SIZE(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_GET_SIZE(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_GET_SIZE(o) -#else - #define __Pyx_PySequence_ITEM(o, i) PySequence_GetItem(o, i) - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) - #define __Pyx_PyTuple_SET_ITEM(o, i, v) PyTuple_SetItem(o, i, v) - #define __Pyx_PyList_SET_ITEM(o, i, v) PyList_SetItem(o, i, v) - #define __Pyx_PyTuple_GET_SIZE(o) PyTuple_Size(o) - #define __Pyx_PyList_GET_SIZE(o) PyList_Size(o) - #define __Pyx_PySet_GET_SIZE(o) PySet_Size(o) - #define __Pyx_PyBytes_GET_SIZE(o) PyBytes_Size(o) - #define __Pyx_PyByteArray_GET_SIZE(o) PyByteArray_Size(o) -#endif -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - #define __Pyx_PyImport_AddModuleRef(name) PyImport_AddModuleRef(name) -#else - static CYTHON_INLINE PyObject *__Pyx_PyImport_AddModuleRef(const char *name) { - PyObject *module = PyImport_AddModule(name); - Py_XINCREF(module); - return module; - } -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define __Pyx_Py3Int_Check(op) PyLong_Check(op) - #define __Pyx_Py3Int_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#else - #define __Pyx_Py3Int_Check(op) (PyLong_Check(op) || PyInt_Check(op)) - #define __Pyx_Py3Int_CheckExact(op) (PyLong_CheckExact(op) || PyInt_CheckExact(op)) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) - #if !defined(_USE_MATH_DEFINES) - #define _USE_MATH_DEFINES - #endif -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifdef CYTHON_EXTERN_C - #undef __PYX_EXTERN_C - #define __PYX_EXTERN_C CYTHON_EXTERN_C -#elif defined(__PYX_EXTERN_C) - #ifdef _MSC_VER - #pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.") - #else - #warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead. - #endif -#else - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__sqlalchemy__util___immutabledict_cy -#define __PYX_HAVE_API__sqlalchemy__util___immutabledict_cy -/* Early includes */ -#include -#include -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s); -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char*); -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*)(__pyx_uintptr_t) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -#define __Pyx_PyUnicode_FromOrdinal(o) PyUnicode_FromOrdinal((int)o) -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #if PY_VERSION_HEX >= 0x030C00A7 - #ifndef _PyLong_SIGN_MASK - #define _PyLong_SIGN_MASK 3 - #endif - #ifndef _PyLong_NON_SIZE_BITS - #define _PyLong_NON_SIZE_BITS 3 - #endif - #define __Pyx_PyLong_Sign(x) (((PyLongObject*)x)->long_value.lv_tag & _PyLong_SIGN_MASK) - #define __Pyx_PyLong_IsNeg(x) ((__Pyx_PyLong_Sign(x) & 2) != 0) - #define __Pyx_PyLong_IsNonNeg(x) (!__Pyx_PyLong_IsNeg(x)) - #define __Pyx_PyLong_IsZero(x) (__Pyx_PyLong_Sign(x) & 1) - #define __Pyx_PyLong_IsPos(x) (__Pyx_PyLong_Sign(x) == 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) (__Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) ((Py_ssize_t) (((PyLongObject*)x)->long_value.lv_tag >> _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_SignedDigitCount(x)\ - ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * __Pyx_PyLong_DigitCount(x)) - #if defined(PyUnstable_Long_IsCompact) && defined(PyUnstable_Long_CompactValue) - #define __Pyx_PyLong_IsCompact(x) PyUnstable_Long_IsCompact((PyLongObject*) x) - #define __Pyx_PyLong_CompactValue(x) PyUnstable_Long_CompactValue((PyLongObject*) x) - #else - #define __Pyx_PyLong_IsCompact(x) (((PyLongObject*)x)->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS)) - #define __Pyx_PyLong_CompactValue(x) ((1 - (Py_ssize_t) __Pyx_PyLong_Sign(x)) * (Py_ssize_t) __Pyx_PyLong_Digits(x)[0]) - #endif - typedef Py_ssize_t __Pyx_compact_pylong; - typedef size_t __Pyx_compact_upylong; - #else - #define __Pyx_PyLong_IsNeg(x) (Py_SIZE(x) < 0) - #define __Pyx_PyLong_IsNonNeg(x) (Py_SIZE(x) >= 0) - #define __Pyx_PyLong_IsZero(x) (Py_SIZE(x) == 0) - #define __Pyx_PyLong_IsPos(x) (Py_SIZE(x) > 0) - #define __Pyx_PyLong_CompactValueUnsigned(x) ((Py_SIZE(x) == 0) ? 0 : __Pyx_PyLong_Digits(x)[0]) - #define __Pyx_PyLong_DigitCount(x) __Pyx_sst_abs(Py_SIZE(x)) - #define __Pyx_PyLong_SignedDigitCount(x) Py_SIZE(x) - #define __Pyx_PyLong_IsCompact(x) (Py_SIZE(x) == 0 || Py_SIZE(x) == 1 || Py_SIZE(x) == -1) - #define __Pyx_PyLong_CompactValue(x)\ - ((Py_SIZE(x) == 0) ? (sdigit) 0 : ((Py_SIZE(x) < 0) ? -(sdigit)__Pyx_PyLong_Digits(x)[0] : (sdigit)__Pyx_PyLong_Digits(x)[0])) - typedef sdigit __Pyx_compact_pylong; - typedef digit __Pyx_compact_upylong; - #endif - #if PY_VERSION_HEX >= 0x030C00A5 - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->long_value.ob_digit) - #else - #define __Pyx_PyLong_Digits(x) (((PyLongObject*)x)->ob_digit) - #endif -#endif -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -#include -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = (char) c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#include -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -#if !CYTHON_USE_MODULE_STATE -static PyObject *__pyx_m = NULL; -#endif -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm = __FILE__; -static const char *__pyx_filename; - -/* #### Code section: filename_table ### */ - -static const char *__pyx_f[] = { - "lib/sqlalchemy/util/_immutabledict_cy.py", - "", - "type.pxd", -}; -/* #### Code section: utility_code_proto_before_types ### */ -/* ForceInitThreads.proto */ -#ifndef __PYX_FORCE_INIT_THREADS - #define __PYX_FORCE_INIT_THREADS 0 -#endif - -/* #### Code section: numeric_typedefs ### */ -/* #### Code section: complex_type_declarations ### */ -/* #### Code section: type_declarations ### */ - -/*--- Type declarations ---*/ -struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase; -struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict; - -/* "sqlalchemy/util/_immutabledict_cy.py":71 - * - * @cython.cclass - * class ImmutableDictBase(Dict[_KT, _VT]): # <<<<<<<<<<<<<< - * # NOTE: this method is required in 3.9 and speeds up the use case - * # ImmutableDictBase[str,int](a_dict) significantly - */ -struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase { - PyDictObject __pyx_base; -}; - - -/* "sqlalchemy/util/_immutabledict_cy.py":109 - * # a type checking section and other workaround for the crash - * @cython.cclass - * class immutabledict(Dict[_KT, _VT]): # <<<<<<<<<<<<<< - * """An immutable version of a dict.""" - * - */ -struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict { - PyDictObject __pyx_base; -}; - -/* #### Code section: utility_code_proto ### */ - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, Py_ssize_t); - void (*DECREF)(void*, PyObject*, Py_ssize_t); - void (*GOTREF)(void*, PyObject*, Py_ssize_t); - void (*GIVEREF)(void*, PyObject*, Py_ssize_t); - void* (*SetupContext)(const char*, Py_ssize_t, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__));\ - } - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), (__LINE__), (__FILE__)) - #define __Pyx_RefNannyFinishContextNogil() __Pyx_RefNannyFinishContext() -#endif - #define __Pyx_RefNannyFinishContextNogil() {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __Pyx_RefNannyFinishContext();\ - PyGILState_Release(__pyx_gilstate_save);\ - } - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), (__LINE__)) - #define __Pyx_XINCREF(r) do { if((r) == NULL); else {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) == NULL); else {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) == NULL); else {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) == NULL); else {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContextNogil() - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_Py_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; Py_XDECREF(tmp);\ - } while (0) -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#if PY_VERSION_HEX >= 0x030C00A6 -#define __Pyx_PyErr_Occurred() (__pyx_tstate->current_exception != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->current_exception ? (PyObject*) Py_TYPE(__pyx_tstate->current_exception) : (PyObject*) NULL) -#else -#define __Pyx_PyErr_Occurred() (__pyx_tstate->curexc_type != NULL) -#define __Pyx_PyErr_CurrentExceptionType() (__pyx_tstate->curexc_type) -#endif -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() (PyErr_Occurred() != NULL) -#define __Pyx_PyErr_CurrentExceptionType() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A6 -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* TupleAndListFromArray.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n); -static CYTHON_INLINE PyObject* __Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n); -#endif - -/* IncludeStringH.proto */ -#include - -/* BytesEquals.proto */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); - -/* UnicodeEquals.proto */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); - -/* fastcall.proto */ -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_VARARGS(args, i) PySequence_GetItem(args, i) -#elif CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i) -#else - #define __Pyx_Arg_VARARGS(args, i) PyTuple_GetItem(args, i) -#endif -#if CYTHON_AVOID_BORROWED_REFS - #define __Pyx_Arg_NewRef_VARARGS(arg) __Pyx_NewRef(arg) - #define __Pyx_Arg_XDECREF_VARARGS(arg) Py_XDECREF(arg) -#else - #define __Pyx_Arg_NewRef_VARARGS(arg) arg - #define __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds) -#define __Pyx_KwValues_VARARGS(args, nargs) NULL -#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s) -#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw) -#if CYTHON_METH_FASTCALL - #define __Pyx_Arg_FASTCALL(args, i) args[i] - #define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds) - #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs)) - static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s); -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 - CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues); - #else - #define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw) - #endif - #define __Pyx_Arg_NewRef_FASTCALL(arg) arg /* no-op, __Pyx_Arg_FASTCALL is direct and this needs - to have the same reference counting */ - #define __Pyx_Arg_XDECREF_FASTCALL(arg) -#else - #define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS - #define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS - #define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS - #define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS - #define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS - #define __Pyx_Arg_NewRef_FASTCALL(arg) __Pyx_Arg_NewRef_VARARGS(arg) - #define __Pyx_Arg_XDECREF_FASTCALL(arg) __Pyx_Arg_XDECREF_VARARGS(arg) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start) -#else -#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop) -#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop) -#endif - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, - const char* function_name); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* PyObjectFormatSimple.proto */ -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - PyObject_Format(s, f)) -#elif PY_MAJOR_VERSION < 3 - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - likely(PyString_CheckExact(s)) ? PyUnicode_FromEncodedObject(s, NULL, "strict") :\ - PyObject_Format(s, f)) -#elif CYTHON_USE_TYPE_SLOTS - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - likely(PyLong_CheckExact(s)) ? PyLong_Type.tp_repr(s) :\ - likely(PyFloat_CheckExact(s)) ? PyFloat_Type.tp_repr(s) :\ - PyObject_Format(s, f)) -#else - #define __Pyx_PyObject_FormatSimple(s, f) (\ - likely(PyUnicode_CheckExact(s)) ? (Py_INCREF(s), s) :\ - PyObject_Format(s, f)) -#endif - -/* UnicodeConcatInPlace.proto */ -# if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 - #if CYTHON_REFNANNY - #define __Pyx_PyUnicode_ConcatInPlace(left, right) __Pyx_PyUnicode_ConcatInPlaceImpl(&left, right, __pyx_refnanny) - #else - #define __Pyx_PyUnicode_ConcatInPlace(left, right) __Pyx_PyUnicode_ConcatInPlaceImpl(&left, right) - #endif - static CYTHON_INLINE PyObject *__Pyx_PyUnicode_ConcatInPlaceImpl(PyObject **p_left, PyObject *right - #if CYTHON_REFNANNY - , void* __pyx_refnanny - #endif - ); -#else -#define __Pyx_PyUnicode_ConcatInPlace __Pyx_PyUnicode_Concat -#endif -#define __Pyx_PyUnicode_ConcatInPlaceSafe(left, right) ((unlikely((left) == Py_None) || unlikely((right) == Py_None)) ?\ - PyNumber_InPlaceAdd(left, right) : __Pyx_PyUnicode_ConcatInPlace(left, right)) - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#if !CYTHON_VECTORCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif -#if !CYTHON_VECTORCALL -#if PY_VERSION_HEX >= 0x03080000 - #include "frameobject.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif - #define __Pxy_PyFrame_Initialize_Offsets() - #define __Pyx_PyFrame_GetLocalsplus(frame) ((frame)->f_localsplus) -#else - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif -#endif -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectFastCall.proto */ -#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, (size_t)(nargs), NULL) -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) do {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -#define __Pyx_GetModuleGlobalNameUncached(var, name) do {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} while(0) -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* KeywordStringCheck.proto */ -static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); - -/* GetAttr3.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); - -/* RaiseUnexpectedTypeError.proto */ -static int __Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj); - -/* JoinPyUnicode.proto */ -static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, - Py_UCS4 max_char); - -/* PyObjectCallNoArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); - -/* PySequenceContains.proto */ -static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { - int result = PySequence_Contains(seq, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* GetAttr.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); - -/* HasAttr.proto */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); - -/* GetItemInt.proto */ -#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ - (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ - __Pyx_GetItemInt_Generic(o, to_py_func(i)))) -#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ - (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ - __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ - (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - int wraparound, int boundscheck); -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, - int is_list, int wraparound, int boundscheck); - -/* IncludeStructmemberH.proto */ -#include - -/* FixUpExtensionType.proto */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type); -#endif - -/* FormatTypeName.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -typedef PyObject *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%U" -static __Pyx_TypeName __Pyx_PyType_GetName(PyTypeObject* tp); -#define __Pyx_DECREF_TypeName(obj) Py_XDECREF(obj) -#else -typedef const char *__Pyx_TypeName; -#define __Pyx_FMT_TYPENAME "%.200s" -#define __Pyx_PyType_GetName(tp) ((tp)->tp_name) -#define __Pyx_DECREF_TypeName(obj) -#endif - -/* ValidateExternBase.proto */ -static int __Pyx_validate_extern_base(PyTypeObject *base); - -/* PyObjectGetMethod.proto */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - -/* PyObjectCallMethod0.proto */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); - -/* ValidateBasesTuple.proto */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases); -#endif - -/* PyType_Ready.proto */ -CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* SetupReduce.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_setup_reduce(PyObject* type_obj); -#endif - -/* TypeImport.proto */ -#ifndef __PYX_HAVE_RT_ImportType_proto_3_0_11 -#define __PYX_HAVE_RT_ImportType_proto_3_0_11 -#if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L -#include -#endif -#if (defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) || __cplusplus >= 201103L -#define __PYX_GET_STRUCT_ALIGNMENT_3_0_11(s) alignof(s) -#else -#define __PYX_GET_STRUCT_ALIGNMENT_3_0_11(s) sizeof(void*) -#endif -enum __Pyx_ImportType_CheckSize_3_0_11 { - __Pyx_ImportType_CheckSize_Error_3_0_11 = 0, - __Pyx_ImportType_CheckSize_Warn_3_0_11 = 1, - __Pyx_ImportType_CheckSize_Ignore_3_0_11 = 2 -}; -static PyTypeObject *__Pyx_ImportType_3_0_11(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_11 check_size); -#endif - -/* FetchSharedCythonModule.proto */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void); - -/* FetchCommonType.proto */ -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); -#else -static PyTypeObject* __Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases); -#endif - -/* PyMethodNew.proto */ -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - PyObject *typesModule=NULL, *methodType=NULL, *result=NULL; - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - typesModule = PyImport_ImportModule("types"); - if (!typesModule) return NULL; - methodType = PyObject_GetAttrString(typesModule, "MethodType"); - Py_DECREF(typesModule); - if (!methodType) return NULL; - result = PyObject_CallFunctionObjArgs(methodType, func, self, NULL); - Py_DECREF(methodType); - return result; -} -#elif PY_MAJOR_VERSION >= 3 -static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *typ) { - CYTHON_UNUSED_VAR(typ); - if (!self) - return __Pyx_NewRef(func); - return PyMethod_New(func, self); -} -#else - #define __Pyx_PyMethod_New PyMethod_New -#endif - -/* PyVectorcallFastCallDict.proto */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw); -#endif - -/* CythonFunctionShared.proto */ -#define __Pyx_CyFunction_USED -#define __Pyx_CYFUNCTION_STATICMETHOD 0x01 -#define __Pyx_CYFUNCTION_CLASSMETHOD 0x02 -#define __Pyx_CYFUNCTION_CCLASS 0x04 -#define __Pyx_CYFUNCTION_COROUTINE 0x08 -#define __Pyx_CyFunction_GetClosure(f)\ - (((__pyx_CyFunctionObject *) (f))->func_closure) -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - #define __Pyx_CyFunction_GetClassObj(f)\ - (((__pyx_CyFunctionObject *) (f))->func_classobj) -#else - #define __Pyx_CyFunction_GetClassObj(f)\ - ((PyObject*) ((PyCMethodObject *) (f))->mm_class) -#endif -#define __Pyx_CyFunction_SetClassObj(f, classobj)\ - __Pyx__CyFunction_SetClassObj((__pyx_CyFunctionObject *) (f), (classobj)) -#define __Pyx_CyFunction_Defaults(type, f)\ - ((type *)(((__pyx_CyFunctionObject *) (f))->defaults)) -#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\ - ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g) -typedef struct { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject_HEAD - PyObject *func; -#elif PY_VERSION_HEX < 0x030900B1 - PyCFunctionObject func; -#else - PyCMethodObject func; -#endif -#if CYTHON_BACKPORT_VECTORCALL - __pyx_vectorcallfunc func_vectorcall; -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_weakreflist; -#endif - PyObject *func_dict; - PyObject *func_name; - PyObject *func_qualname; - PyObject *func_doc; - PyObject *func_globals; - PyObject *func_code; - PyObject *func_closure; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - PyObject *func_classobj; -#endif - void *defaults; - int defaults_pyobjects; - size_t defaults_size; - int flags; - PyObject *defaults_tuple; - PyObject *defaults_kwdict; - PyObject *(*defaults_getter)(PyObject *); - PyObject *func_annotations; - PyObject *func_is_coroutine; -} __pyx_CyFunctionObject; -#undef __Pyx_CyOrPyCFunction_Check -#define __Pyx_CyFunction_Check(obj) __Pyx_TypeCheck(obj, __pyx_CyFunctionType) -#define __Pyx_CyOrPyCFunction_Check(obj) __Pyx_TypeCheck2(obj, __pyx_CyFunctionType, &PyCFunction_Type) -#define __Pyx_CyFunction_CheckExact(obj) __Pyx_IS_TYPE(obj, __pyx_CyFunctionType) -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc); -#undef __Pyx_IsSameCFunction -#define __Pyx_IsSameCFunction(func, cfunc) __Pyx__IsSameCyOrCFunction(func, cfunc) -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj); -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m, - size_t size, - int pyobjects); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m, - PyObject *tuple); -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m, - PyObject *dict); -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, - PyObject *dict); -static int __pyx_CyFunction_init(PyObject *module); -#if CYTHON_METH_FASTCALL -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); -#if CYTHON_BACKPORT_VECTORCALL -#define __Pyx_CyFunction_func_vectorcall(f) (((__pyx_CyFunctionObject*)f)->func_vectorcall) -#else -#define __Pyx_CyFunction_func_vectorcall(f) (((PyCFunctionObject*)f)->vectorcall) -#endif -#endif - -/* CythonFunction.proto */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); - -/* SetNameInClass.proto */ -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 -#define __Pyx_SetNameInClass(ns, name, value)\ - (likely(PyDict_CheckExact(ns)) ? _PyDict_SetItem_KnownHash(ns, name, value, ((PyASCIIObject *) name)->hash) : PyObject_SetItem(ns, name, value)) -#elif CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_SetNameInClass(ns, name, value)\ - (likely(PyDict_CheckExact(ns)) ? PyDict_SetItem(ns, name, value) : PyObject_SetItem(ns, name, value)) -#else -#define __Pyx_SetNameInClass(ns, name, value) PyObject_SetItem(ns, name, value) -#endif - -/* CalculateMetaclass.proto */ -static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases); - -/* PyObjectCall2Args.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); - -/* PyObjectLookupSpecial.proto */ -#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS -#define __Pyx_PyObject_LookupSpecialNoError(obj, attr_name) __Pyx__PyObject_LookupSpecial(obj, attr_name, 0) -#define __Pyx_PyObject_LookupSpecial(obj, attr_name) __Pyx__PyObject_LookupSpecial(obj, attr_name, 1) -static CYTHON_INLINE PyObject* __Pyx__PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name, int with_error); -#else -#define __Pyx_PyObject_LookupSpecialNoError(o,n) __Pyx_PyObject_GetAttrStrNoError(o,n) -#define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n) -#endif - -/* Py3ClassCreate.proto */ -static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, PyObject *qualname, - PyObject *mkw, PyObject *modname, PyObject *doc); -static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, PyObject *dict, - PyObject *mkw, int calculate_metaclass, int allow_py2_metaclass); - -/* ClassMethod.proto */ -#include "descrobject.h" -CYTHON_UNUSED static PyObject* __Pyx_Method_ClassMethod(PyObject *method); - -/* GetNameInClass.proto */ -#define __Pyx_GetNameInClass(var, nmspace, name) (var) = __Pyx__GetNameInClass(nmspace, name) -static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -#if !CYTHON_COMPILING_IN_LIMITED_API -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); -#endif - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* GCCDiagnostics.proto */ -#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) -#define __Pyx_HAS_GCC_DIAGNOSTIC -#endif - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) __Pyx_IsAnySubtype2(Py_TYPE(obj), (PyTypeObject *)type1, (PyTypeObject *)type2) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_TypeCheck2(obj, type1, type2) (PyObject_TypeCheck(obj, (PyTypeObject *)type1) || PyObject_TypeCheck(obj, (PyTypeObject *)type2)) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyErr_ExceptionMatches2(err1, err2) __Pyx_PyErr_GivenExceptionMatches2(__Pyx_PyErr_CurrentExceptionType(), err1, err2) -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static unsigned long __Pyx_get_runtime_version(void); -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -/* #### Code section: module_declarations ### */ - -/* Module declarations from "cython" */ - -/* Module declarations from "libc.string" */ - -/* Module declarations from "libc.stdio" */ - -/* Module declarations from "__builtin__" */ - -/* Module declarations from "cpython.type" */ - -/* Module declarations from "cpython" */ - -/* Module declarations from "cpython.object" */ - -/* Module declarations from "cpython.pyport" */ - -/* Module declarations from "cpython.dict" */ - -/* Module declarations from "sqlalchemy.util._immutabledict_cy" */ -static PyObject *__pyx_f_10sqlalchemy_4util_17_immutabledict_cy___pyx_unpickle_ImmutableDictBase__set_state(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *, PyObject *); /*proto*/ -/* #### Code section: typeinfo ### */ -/* #### Code section: before_global_var ### */ -#define __Pyx_MODULE_NAME "sqlalchemy.util._immutabledict_cy" -extern int __pyx_module_is_main_sqlalchemy__util___immutabledict_cy; -int __pyx_module_is_main_sqlalchemy__util___immutabledict_cy = 0; - -/* Implementation of "sqlalchemy.util._immutabledict_cy" */ -/* #### Code section: global_var ### */ -static PyObject *__pyx_builtin_TypeError; -/* #### Code section: string_decls ### */ -static const char __pyx_k_[] = ")"; -static const char __pyx_k_d[] = "d"; -static const char __pyx_k_KT[] = "_KT"; -static const char __pyx_k_VT[] = "_VT"; -static const char __pyx_k__3[] = "."; -static const char __pyx_k__4[] = "?"; -static const char __pyx_k_gc[] = "gc"; -static const char __pyx_k_kw[] = "kw"; -static const char __pyx_k_or[] = "__or__"; -static const char __pyx_k_Any[] = "Any"; -static const char __pyx_k_arg[] = "arg"; -static const char __pyx_k_cls[] = "cls"; -static const char __pyx_k_doc[] = "__doc__"; -static const char __pyx_k_key[] = "key"; -static const char __pyx_k_new[] = "__new__"; -static const char __pyx_k_obj[] = "obj"; -static const char __pyx_k_pop[] = "pop"; -static const char __pyx_k_ror[] = "__ror__"; -static const char __pyx_k_Dict[] = "Dict"; -static const char __pyx_k_Self[] = "Self"; -static const char __pyx_k_bool[] = "bool"; -static const char __pyx_k_copy[] = "copy"; -static const char __pyx_k_dict[] = "__dict__"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_repr[] = "__repr__"; -static const char __pyx_k_self[] = "self"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_bound[] = "bound"; -static const char __pyx_k_class[] = "__class__"; -static const char __pyx_k_clear[] = "clear"; -static const char __pyx_k_dicts[] = "dicts"; -static const char __pyx_k_other[] = "other"; -static const char __pyx_k_slots[] = "__slots__"; -static const char __pyx_k_state[] = "state"; -static const char __pyx_k_super[] = "super"; -static const char __pyx_k_union[] = "union"; -static const char __pyx_k_value[] = "value"; -static const char __pyx_k_dict_2[] = "_dict"; -static const char __pyx_k_enable[] = "enable"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_module[] = "__module__"; -static const char __pyx_k_object[] = "object"; -static const char __pyx_k_pickle[] = "pickle"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_result[] = "result"; -static const char __pyx_k_return[] = "return"; -static const char __pyx_k_typing[] = "typing"; -static const char __pyx_k_update[] = "update"; -static const char __pyx_k_Mapping[] = "Mapping"; -static const char __pyx_k_TypeVar[] = "TypeVar"; -static const char __pyx_k_default[] = "default"; -static const char __pyx_k_delitem[] = "__delitem__"; -static const char __pyx_k_disable[] = "disable"; -static const char __pyx_k_popitem[] = "popitem"; -static const char __pyx_k_prepare[] = "__prepare__"; -static const char __pyx_k_setattr[] = "__setattr__"; -static const char __pyx_k_setitem[] = "__setitem__"; -static const char __pyx_k_Hashable[] = "Hashable"; -static const char __pyx_k_NoReturn[] = "NoReturn"; -static const char __pyx_k_Optional[] = "Optional"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_pyx_type[] = "__pyx_type"; -static const char __pyx_k_qualname[] = "__qualname__"; -static const char __pyx_k_readonly[] = "_readonly"; -static const char __pyx_k_set_name[] = "__set_name__"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_TypeError[] = "TypeError"; -static const char __pyx_k_isenabled[] = "isenabled"; -static const char __pyx_k_metaclass[] = "__metaclass__"; -static const char __pyx_k_pyx_state[] = "__pyx_state"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_type_Self[] = "type[Self]"; -static const char __pyx_k_merge_with[] = "merge_with"; -static const char __pyx_k_pyx_result[] = "__pyx_result"; -static const char __pyx_k_setdefault[] = "setdefault"; -static const char __pyx_k_PickleError[] = "PickleError"; -static const char __pyx_k_is_compiled[] = "_is_compiled"; -static const char __pyx_k_Optional_Any[] = "Optional[Any]"; -static const char __pyx_k_immutable_fn[] = "_immutable_fn"; -static const char __pyx_k_is_coroutine[] = "_is_coroutine"; -static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; -static const char __pyx_k_stringsource[] = ""; -static const char __pyx_k_use_setstate[] = "use_setstate"; -static const char __pyx_k_class_getitem[] = "__class_getitem__"; -static const char __pyx_k_immutabledict[] = "immutabledict("; -static const char __pyx_k_init_subclass[] = "__init_subclass__"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_immutabledict_2[] = "immutabledict"; -static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_ImmutableDictBase[] = "ImmutableDictBase"; -static const char __pyx_k_ReadOnlyContainer[] = "ReadOnlyContainer"; -static const char __pyx_k_immutabledict_pop[] = "immutabledict.pop"; -static const char __pyx_k_asyncio_coroutines[] = "asyncio.coroutines"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_immutabledict_copy[] = "immutabledict.copy"; -static const char __pyx_k_immutabledict_clear[] = "immutabledict.clear"; -static const char __pyx_k_immutabledict_union[] = "immutabledict.union"; -static const char __pyx_k_object_is_immutable[] = " object is immutable"; -static const char __pyx_k_immutabledict_update[] = "immutabledict.update"; -static const char __pyx_k_ImmutableDictBase_pop[] = "ImmutableDictBase.pop"; -static const char __pyx_k_immutabledict__KT__VT[] = "immutabledict[_KT, _VT]"; -static const char __pyx_k_immutabledict_popitem[] = "immutabledict.popitem"; -static const char __pyx_k_immutabledict___reduce[] = "immutabledict.__reduce__"; -static const char __pyx_k_ImmutableDictBase_clear[] = "ImmutableDictBase.clear"; -static const char __pyx_k_ImmutableDictBase_update[] = "ImmutableDictBase.update"; -static const char __pyx_k_Optional_Mapping__KT__VT[] = "Optional[Mapping[_KT, _VT]]"; -static const char __pyx_k_immutabledict_merge_with[] = "immutabledict.merge_with"; -static const char __pyx_k_immutabledict_setdefault[] = "immutabledict.setdefault"; -static const char __pyx_k_ImmutableDictBase_popitem[] = "ImmutableDictBase.popitem"; -static const char __pyx_k_ReadOnlyContainer___delitem[] = "ReadOnlyContainer.__delitem__"; -static const char __pyx_k_ReadOnlyContainer___setattr[] = "ReadOnlyContainer.__setattr__"; -static const char __pyx_k_ReadOnlyContainer___setitem[] = "ReadOnlyContainer.__setitem__"; -static const char __pyx_k_ReadOnlyContainer__readonly[] = "ReadOnlyContainer._readonly"; -static const char __pyx_k_ImmutableDictBase_setdefault[] = "ImmutableDictBase.setdefault"; -static const char __pyx_k_immutabledict___class_getitem[] = "immutabledict.__class_getitem__"; -static const char __pyx_k_pyx_unpickle_ImmutableDictBase[] = "__pyx_unpickle_ImmutableDictBase"; -static const char __pyx_k_object_is_immutable_and_or_read[] = " object is immutable and/or readonly"; -static const char __pyx_k_ImmutableDictBase___class_getite[] = "ImmutableDictBase.__class_getitem__"; -static const char __pyx_k_ImmutableDictBase___reduce_cytho[] = "ImmutableDictBase.__reduce_cython__"; -static const char __pyx_k_ImmutableDictBase___setstate_cyt[] = "ImmutableDictBase.__setstate_cython__"; -static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())"; -static const char __pyx_k_lib_sqlalchemy_util__immutabledi[] = "lib/sqlalchemy/util/_immutabledict_cy.py"; -static const char __pyx_k_sqlalchemy_util__immutabledict_c[] = "sqlalchemy.util._immutabledict_cy"; -/* #### Code section: decls ### */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_2_immutable_fn(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_obj); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer__readonly(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_2__delitem__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_4__setitem__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_6__setattr__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_2__delitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_4__setitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_6__setattr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_8clear(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_10pop(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_12popitem(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_14setdefault(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_16update(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED PyObject *__pyx_v_kw); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_18__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_20__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_2__delitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_4__setitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_6__setattr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_8clear(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_10pop(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_12popitem(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_14setdefault(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_16update(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED PyObject *__pyx_v_kw); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_18__repr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_20union(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_22merge_with(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v_dicts); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_24copy(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_26__reduce__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_28__ior__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v__immutabledict__value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_30__or__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_32__ror__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /* proto */ -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_4__pyx_unpickle_ImmutableDictBase(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ -/* #### Code section: late_includes ### */ -/* #### Code section: module_state ### */ -typedef struct { - PyObject *__pyx_d; - PyObject *__pyx_b; - PyObject *__pyx_cython_runtime; - PyObject *__pyx_empty_tuple; - PyObject *__pyx_empty_bytes; - PyObject *__pyx_empty_unicode; - #ifdef __Pyx_CyFunction_USED - PyTypeObject *__pyx_CyFunctionType; - #endif - #ifdef __Pyx_FusedFunction_USED - PyTypeObject *__pyx_FusedFunctionType; - #endif - #ifdef __Pyx_Generator_USED - PyTypeObject *__pyx_GeneratorType; - #endif - #ifdef __Pyx_IterableCoroutine_USED - PyTypeObject *__pyx_IterableCoroutineType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineAwaitType; - #endif - #ifdef __Pyx_Coroutine_USED - PyTypeObject *__pyx_CoroutineType; - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - PyTypeObject *__pyx_ptype_7cpython_4type_type; - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - #endif - #if CYTHON_USE_MODULE_STATE - PyObject *__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase; - PyObject *__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict; - #endif - PyTypeObject *__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase; - PyTypeObject *__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict; - PyObject *__pyx_kp_u_; - PyObject *__pyx_n_s_Any; - PyObject *__pyx_n_s_Dict; - PyObject *__pyx_n_s_Hashable; - PyObject *__pyx_n_s_ImmutableDictBase; - PyObject *__pyx_n_s_ImmutableDictBase___class_getite; - PyObject *__pyx_n_s_ImmutableDictBase___reduce_cytho; - PyObject *__pyx_n_s_ImmutableDictBase___setstate_cyt; - PyObject *__pyx_n_s_ImmutableDictBase_clear; - PyObject *__pyx_n_s_ImmutableDictBase_pop; - PyObject *__pyx_n_s_ImmutableDictBase_popitem; - PyObject *__pyx_n_s_ImmutableDictBase_setdefault; - PyObject *__pyx_n_s_ImmutableDictBase_update; - PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0; - PyObject *__pyx_n_s_KT; - PyObject *__pyx_n_u_KT; - PyObject *__pyx_n_s_Mapping; - PyObject *__pyx_n_s_NoReturn; - PyObject *__pyx_n_s_Optional; - PyObject *__pyx_kp_s_Optional_Any; - PyObject *__pyx_kp_s_Optional_Mapping__KT__VT; - PyObject *__pyx_n_s_PickleError; - PyObject *__pyx_n_s_ReadOnlyContainer; - PyObject *__pyx_n_s_ReadOnlyContainer___delitem; - PyObject *__pyx_n_s_ReadOnlyContainer___setattr; - PyObject *__pyx_n_s_ReadOnlyContainer___setitem; - PyObject *__pyx_n_s_ReadOnlyContainer__readonly; - PyObject *__pyx_n_s_Self; - PyObject *__pyx_n_s_TypeError; - PyObject *__pyx_n_s_TypeVar; - PyObject *__pyx_n_s_VT; - PyObject *__pyx_n_u_VT; - PyObject *__pyx_kp_u__3; - PyObject *__pyx_n_s__4; - PyObject *__pyx_n_s_arg; - PyObject *__pyx_n_s_asyncio_coroutines; - PyObject *__pyx_n_s_bool; - PyObject *__pyx_n_s_bound; - PyObject *__pyx_n_s_class; - PyObject *__pyx_n_s_class_getitem; - PyObject *__pyx_n_s_clear; - PyObject *__pyx_n_s_cline_in_traceback; - PyObject *__pyx_n_s_cls; - PyObject *__pyx_n_s_copy; - PyObject *__pyx_n_s_d; - PyObject *__pyx_n_s_default; - PyObject *__pyx_n_s_delitem; - PyObject *__pyx_n_s_dict; - PyObject *__pyx_n_s_dict_2; - PyObject *__pyx_n_s_dicts; - PyObject *__pyx_kp_u_disable; - PyObject *__pyx_n_s_doc; - PyObject *__pyx_kp_u_enable; - PyObject *__pyx_kp_u_gc; - PyObject *__pyx_n_s_getstate; - PyObject *__pyx_n_s_immutable_fn; - PyObject *__pyx_kp_u_immutabledict; - PyObject *__pyx_n_s_immutabledict_2; - PyObject *__pyx_kp_s_immutabledict__KT__VT; - PyObject *__pyx_n_s_immutabledict___class_getitem; - PyObject *__pyx_n_s_immutabledict___reduce; - PyObject *__pyx_n_s_immutabledict_clear; - PyObject *__pyx_n_s_immutabledict_copy; - PyObject *__pyx_n_s_immutabledict_merge_with; - PyObject *__pyx_n_s_immutabledict_pop; - PyObject *__pyx_n_s_immutabledict_popitem; - PyObject *__pyx_n_s_immutabledict_setdefault; - PyObject *__pyx_n_s_immutabledict_union; - PyObject *__pyx_n_s_immutabledict_update; - PyObject *__pyx_n_s_import; - PyObject *__pyx_n_s_init_subclass; - PyObject *__pyx_n_s_is_compiled; - PyObject *__pyx_n_s_is_coroutine; - PyObject *__pyx_kp_u_isenabled; - PyObject *__pyx_n_s_key; - PyObject *__pyx_n_s_kw; - PyObject *__pyx_kp_s_lib_sqlalchemy_util__immutabledi; - PyObject *__pyx_n_s_main; - PyObject *__pyx_n_s_merge_with; - PyObject *__pyx_n_s_metaclass; - PyObject *__pyx_n_s_module; - PyObject *__pyx_n_s_name; - PyObject *__pyx_n_s_new; - PyObject *__pyx_n_s_obj; - PyObject *__pyx_n_s_object; - PyObject *__pyx_kp_u_object_is_immutable; - PyObject *__pyx_kp_u_object_is_immutable_and_or_read; - PyObject *__pyx_n_s_or; - PyObject *__pyx_n_s_other; - PyObject *__pyx_n_s_pickle; - PyObject *__pyx_n_s_pop; - PyObject *__pyx_n_s_popitem; - PyObject *__pyx_n_s_prepare; - PyObject *__pyx_n_s_pyx_PickleError; - PyObject *__pyx_n_s_pyx_checksum; - PyObject *__pyx_n_s_pyx_result; - PyObject *__pyx_n_s_pyx_state; - PyObject *__pyx_n_s_pyx_type; - PyObject *__pyx_n_s_pyx_unpickle_ImmutableDictBase; - PyObject *__pyx_n_s_qualname; - PyObject *__pyx_n_s_readonly; - PyObject *__pyx_n_s_reduce; - PyObject *__pyx_n_s_reduce_cython; - PyObject *__pyx_n_s_reduce_ex; - PyObject *__pyx_n_s_repr; - PyObject *__pyx_n_s_result; - PyObject *__pyx_n_s_return; - PyObject *__pyx_n_s_ror; - PyObject *__pyx_n_s_self; - PyObject *__pyx_n_s_set_name; - PyObject *__pyx_n_s_setattr; - PyObject *__pyx_n_s_setdefault; - PyObject *__pyx_n_s_setitem; - PyObject *__pyx_n_s_setstate; - PyObject *__pyx_n_s_setstate_cython; - PyObject *__pyx_n_s_slots; - PyObject *__pyx_n_s_sqlalchemy_util__immutabledict_c; - PyObject *__pyx_n_s_state; - PyObject *__pyx_kp_s_stringsource; - PyObject *__pyx_n_s_super; - PyObject *__pyx_n_s_test; - PyObject *__pyx_kp_s_type_Self; - PyObject *__pyx_n_s_typing; - PyObject *__pyx_n_s_union; - PyObject *__pyx_n_s_update; - PyObject *__pyx_n_s_use_setstate; - PyObject *__pyx_n_s_value; - PyObject *__pyx_int_222419149; - PyObject *__pyx_int_228825662; - PyObject *__pyx_int_238750788; - PyObject *__pyx_tuple__2; - PyObject *__pyx_tuple__6; - PyObject *__pyx_tuple__8; - PyObject *__pyx_tuple__10; - PyObject *__pyx_tuple__12; - PyObject *__pyx_tuple__15; - PyObject *__pyx_tuple__16; - PyObject *__pyx_tuple__17; - PyObject *__pyx_tuple__20; - PyObject *__pyx_tuple__22; - PyObject *__pyx_tuple__25; - PyObject *__pyx_tuple__27; - PyObject *__pyx_tuple__29; - PyObject *__pyx_tuple__37; - PyObject *__pyx_tuple__39; - PyObject *__pyx_tuple__43; - PyObject *__pyx_codeobj__5; - PyObject *__pyx_codeobj__7; - PyObject *__pyx_codeobj__9; - PyObject *__pyx_codeobj__11; - PyObject *__pyx_codeobj__13; - PyObject *__pyx_codeobj__14; - PyObject *__pyx_codeobj__18; - PyObject *__pyx_codeobj__19; - PyObject *__pyx_codeobj__21; - PyObject *__pyx_codeobj__23; - PyObject *__pyx_codeobj__24; - PyObject *__pyx_codeobj__26; - PyObject *__pyx_codeobj__28; - PyObject *__pyx_codeobj__30; - PyObject *__pyx_codeobj__31; - PyObject *__pyx_codeobj__32; - PyObject *__pyx_codeobj__33; - PyObject *__pyx_codeobj__34; - PyObject *__pyx_codeobj__35; - PyObject *__pyx_codeobj__36; - PyObject *__pyx_codeobj__38; - PyObject *__pyx_codeobj__40; - PyObject *__pyx_codeobj__41; - PyObject *__pyx_codeobj__42; - PyObject *__pyx_codeobj__44; -} __pyx_mstate; - -#if CYTHON_USE_MODULE_STATE -#ifdef __cplusplus -namespace { - extern struct PyModuleDef __pyx_moduledef; -} /* anonymous namespace */ -#else -static struct PyModuleDef __pyx_moduledef; -#endif - -#define __pyx_mstate(o) ((__pyx_mstate *)__Pyx_PyModule_GetState(o)) - -#define __pyx_mstate_global (__pyx_mstate(PyState_FindModule(&__pyx_moduledef))) - -#define __pyx_m (PyState_FindModule(&__pyx_moduledef)) -#else -static __pyx_mstate __pyx_mstate_global_static = -#ifdef __cplusplus - {}; -#else - {0}; -#endif -static __pyx_mstate *__pyx_mstate_global = &__pyx_mstate_global_static; -#endif -/* #### Code section: module_state_clear ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_clear(PyObject *m) { - __pyx_mstate *clear_module_state = __pyx_mstate(m); - if (!clear_module_state) return 0; - Py_CLEAR(clear_module_state->__pyx_d); - Py_CLEAR(clear_module_state->__pyx_b); - Py_CLEAR(clear_module_state->__pyx_cython_runtime); - Py_CLEAR(clear_module_state->__pyx_empty_tuple); - Py_CLEAR(clear_module_state->__pyx_empty_bytes); - Py_CLEAR(clear_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_CLEAR(clear_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_CLEAR(clear_module_state->__pyx_FusedFunctionType); - #endif - Py_CLEAR(clear_module_state->__pyx_ptype_7cpython_4type_type); - Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - Py_CLEAR(clear_module_state->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - Py_CLEAR(clear_module_state->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - Py_CLEAR(clear_module_state->__pyx_kp_u_); - Py_CLEAR(clear_module_state->__pyx_n_s_Any); - Py_CLEAR(clear_module_state->__pyx_n_s_Dict); - Py_CLEAR(clear_module_state->__pyx_n_s_Hashable); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase___class_getite); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase___reduce_cytho); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase___setstate_cyt); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_clear); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_pop); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_popitem); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_setdefault); - Py_CLEAR(clear_module_state->__pyx_n_s_ImmutableDictBase_update); - Py_CLEAR(clear_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_CLEAR(clear_module_state->__pyx_n_s_KT); - Py_CLEAR(clear_module_state->__pyx_n_u_KT); - Py_CLEAR(clear_module_state->__pyx_n_s_Mapping); - Py_CLEAR(clear_module_state->__pyx_n_s_NoReturn); - Py_CLEAR(clear_module_state->__pyx_n_s_Optional); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_Any); - Py_CLEAR(clear_module_state->__pyx_kp_s_Optional_Mapping__KT__VT); - Py_CLEAR(clear_module_state->__pyx_n_s_PickleError); - Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer); - Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer___delitem); - Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer___setattr); - Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer___setitem); - Py_CLEAR(clear_module_state->__pyx_n_s_ReadOnlyContainer__readonly); - Py_CLEAR(clear_module_state->__pyx_n_s_Self); - Py_CLEAR(clear_module_state->__pyx_n_s_TypeError); - Py_CLEAR(clear_module_state->__pyx_n_s_TypeVar); - Py_CLEAR(clear_module_state->__pyx_n_s_VT); - Py_CLEAR(clear_module_state->__pyx_n_u_VT); - Py_CLEAR(clear_module_state->__pyx_kp_u__3); - Py_CLEAR(clear_module_state->__pyx_n_s__4); - Py_CLEAR(clear_module_state->__pyx_n_s_arg); - Py_CLEAR(clear_module_state->__pyx_n_s_asyncio_coroutines); - Py_CLEAR(clear_module_state->__pyx_n_s_bool); - Py_CLEAR(clear_module_state->__pyx_n_s_bound); - Py_CLEAR(clear_module_state->__pyx_n_s_class); - Py_CLEAR(clear_module_state->__pyx_n_s_class_getitem); - Py_CLEAR(clear_module_state->__pyx_n_s_clear); - Py_CLEAR(clear_module_state->__pyx_n_s_cline_in_traceback); - Py_CLEAR(clear_module_state->__pyx_n_s_cls); - Py_CLEAR(clear_module_state->__pyx_n_s_copy); - Py_CLEAR(clear_module_state->__pyx_n_s_d); - Py_CLEAR(clear_module_state->__pyx_n_s_default); - Py_CLEAR(clear_module_state->__pyx_n_s_delitem); - Py_CLEAR(clear_module_state->__pyx_n_s_dict); - Py_CLEAR(clear_module_state->__pyx_n_s_dict_2); - Py_CLEAR(clear_module_state->__pyx_n_s_dicts); - Py_CLEAR(clear_module_state->__pyx_kp_u_disable); - Py_CLEAR(clear_module_state->__pyx_n_s_doc); - Py_CLEAR(clear_module_state->__pyx_kp_u_enable); - Py_CLEAR(clear_module_state->__pyx_kp_u_gc); - Py_CLEAR(clear_module_state->__pyx_n_s_getstate); - Py_CLEAR(clear_module_state->__pyx_n_s_immutable_fn); - Py_CLEAR(clear_module_state->__pyx_kp_u_immutabledict); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_2); - Py_CLEAR(clear_module_state->__pyx_kp_s_immutabledict__KT__VT); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict___class_getitem); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict___reduce); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_clear); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_copy); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_merge_with); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_pop); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_popitem); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_setdefault); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_union); - Py_CLEAR(clear_module_state->__pyx_n_s_immutabledict_update); - Py_CLEAR(clear_module_state->__pyx_n_s_import); - Py_CLEAR(clear_module_state->__pyx_n_s_init_subclass); - Py_CLEAR(clear_module_state->__pyx_n_s_is_compiled); - Py_CLEAR(clear_module_state->__pyx_n_s_is_coroutine); - Py_CLEAR(clear_module_state->__pyx_kp_u_isenabled); - Py_CLEAR(clear_module_state->__pyx_n_s_key); - Py_CLEAR(clear_module_state->__pyx_n_s_kw); - Py_CLEAR(clear_module_state->__pyx_kp_s_lib_sqlalchemy_util__immutabledi); - Py_CLEAR(clear_module_state->__pyx_n_s_main); - Py_CLEAR(clear_module_state->__pyx_n_s_merge_with); - Py_CLEAR(clear_module_state->__pyx_n_s_metaclass); - Py_CLEAR(clear_module_state->__pyx_n_s_module); - Py_CLEAR(clear_module_state->__pyx_n_s_name); - Py_CLEAR(clear_module_state->__pyx_n_s_new); - Py_CLEAR(clear_module_state->__pyx_n_s_obj); - Py_CLEAR(clear_module_state->__pyx_n_s_object); - Py_CLEAR(clear_module_state->__pyx_kp_u_object_is_immutable); - Py_CLEAR(clear_module_state->__pyx_kp_u_object_is_immutable_and_or_read); - Py_CLEAR(clear_module_state->__pyx_n_s_or); - Py_CLEAR(clear_module_state->__pyx_n_s_other); - Py_CLEAR(clear_module_state->__pyx_n_s_pickle); - Py_CLEAR(clear_module_state->__pyx_n_s_pop); - Py_CLEAR(clear_module_state->__pyx_n_s_popitem); - Py_CLEAR(clear_module_state->__pyx_n_s_prepare); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_PickleError); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_checksum); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_result); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_state); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_type); - Py_CLEAR(clear_module_state->__pyx_n_s_pyx_unpickle_ImmutableDictBase); - Py_CLEAR(clear_module_state->__pyx_n_s_qualname); - Py_CLEAR(clear_module_state->__pyx_n_s_readonly); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_reduce_ex); - Py_CLEAR(clear_module_state->__pyx_n_s_repr); - Py_CLEAR(clear_module_state->__pyx_n_s_result); - Py_CLEAR(clear_module_state->__pyx_n_s_return); - Py_CLEAR(clear_module_state->__pyx_n_s_ror); - Py_CLEAR(clear_module_state->__pyx_n_s_self); - Py_CLEAR(clear_module_state->__pyx_n_s_set_name); - Py_CLEAR(clear_module_state->__pyx_n_s_setattr); - Py_CLEAR(clear_module_state->__pyx_n_s_setdefault); - Py_CLEAR(clear_module_state->__pyx_n_s_setitem); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_setstate_cython); - Py_CLEAR(clear_module_state->__pyx_n_s_slots); - Py_CLEAR(clear_module_state->__pyx_n_s_sqlalchemy_util__immutabledict_c); - Py_CLEAR(clear_module_state->__pyx_n_s_state); - Py_CLEAR(clear_module_state->__pyx_kp_s_stringsource); - Py_CLEAR(clear_module_state->__pyx_n_s_super); - Py_CLEAR(clear_module_state->__pyx_n_s_test); - Py_CLEAR(clear_module_state->__pyx_kp_s_type_Self); - Py_CLEAR(clear_module_state->__pyx_n_s_typing); - Py_CLEAR(clear_module_state->__pyx_n_s_union); - Py_CLEAR(clear_module_state->__pyx_n_s_update); - Py_CLEAR(clear_module_state->__pyx_n_s_use_setstate); - Py_CLEAR(clear_module_state->__pyx_n_s_value); - Py_CLEAR(clear_module_state->__pyx_int_222419149); - Py_CLEAR(clear_module_state->__pyx_int_228825662); - Py_CLEAR(clear_module_state->__pyx_int_238750788); - Py_CLEAR(clear_module_state->__pyx_tuple__2); - Py_CLEAR(clear_module_state->__pyx_tuple__6); - Py_CLEAR(clear_module_state->__pyx_tuple__8); - Py_CLEAR(clear_module_state->__pyx_tuple__10); - Py_CLEAR(clear_module_state->__pyx_tuple__12); - Py_CLEAR(clear_module_state->__pyx_tuple__15); - Py_CLEAR(clear_module_state->__pyx_tuple__16); - Py_CLEAR(clear_module_state->__pyx_tuple__17); - Py_CLEAR(clear_module_state->__pyx_tuple__20); - Py_CLEAR(clear_module_state->__pyx_tuple__22); - Py_CLEAR(clear_module_state->__pyx_tuple__25); - Py_CLEAR(clear_module_state->__pyx_tuple__27); - Py_CLEAR(clear_module_state->__pyx_tuple__29); - Py_CLEAR(clear_module_state->__pyx_tuple__37); - Py_CLEAR(clear_module_state->__pyx_tuple__39); - Py_CLEAR(clear_module_state->__pyx_tuple__43); - Py_CLEAR(clear_module_state->__pyx_codeobj__5); - Py_CLEAR(clear_module_state->__pyx_codeobj__7); - Py_CLEAR(clear_module_state->__pyx_codeobj__9); - Py_CLEAR(clear_module_state->__pyx_codeobj__11); - Py_CLEAR(clear_module_state->__pyx_codeobj__13); - Py_CLEAR(clear_module_state->__pyx_codeobj__14); - Py_CLEAR(clear_module_state->__pyx_codeobj__18); - Py_CLEAR(clear_module_state->__pyx_codeobj__19); - Py_CLEAR(clear_module_state->__pyx_codeobj__21); - Py_CLEAR(clear_module_state->__pyx_codeobj__23); - Py_CLEAR(clear_module_state->__pyx_codeobj__24); - Py_CLEAR(clear_module_state->__pyx_codeobj__26); - Py_CLEAR(clear_module_state->__pyx_codeobj__28); - Py_CLEAR(clear_module_state->__pyx_codeobj__30); - Py_CLEAR(clear_module_state->__pyx_codeobj__31); - Py_CLEAR(clear_module_state->__pyx_codeobj__32); - Py_CLEAR(clear_module_state->__pyx_codeobj__33); - Py_CLEAR(clear_module_state->__pyx_codeobj__34); - Py_CLEAR(clear_module_state->__pyx_codeobj__35); - Py_CLEAR(clear_module_state->__pyx_codeobj__36); - Py_CLEAR(clear_module_state->__pyx_codeobj__38); - Py_CLEAR(clear_module_state->__pyx_codeobj__40); - Py_CLEAR(clear_module_state->__pyx_codeobj__41); - Py_CLEAR(clear_module_state->__pyx_codeobj__42); - Py_CLEAR(clear_module_state->__pyx_codeobj__44); - return 0; -} -#endif -/* #### Code section: module_state_traverse ### */ -#if CYTHON_USE_MODULE_STATE -static int __pyx_m_traverse(PyObject *m, visitproc visit, void *arg) { - __pyx_mstate *traverse_module_state = __pyx_mstate(m); - if (!traverse_module_state) return 0; - Py_VISIT(traverse_module_state->__pyx_d); - Py_VISIT(traverse_module_state->__pyx_b); - Py_VISIT(traverse_module_state->__pyx_cython_runtime); - Py_VISIT(traverse_module_state->__pyx_empty_tuple); - Py_VISIT(traverse_module_state->__pyx_empty_bytes); - Py_VISIT(traverse_module_state->__pyx_empty_unicode); - #ifdef __Pyx_CyFunction_USED - Py_VISIT(traverse_module_state->__pyx_CyFunctionType); - #endif - #ifdef __Pyx_FusedFunction_USED - Py_VISIT(traverse_module_state->__pyx_FusedFunctionType); - #endif - Py_VISIT(traverse_module_state->__pyx_ptype_7cpython_4type_type); - Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - Py_VISIT(traverse_module_state->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - Py_VISIT(traverse_module_state->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - Py_VISIT(traverse_module_state->__pyx_kp_u_); - Py_VISIT(traverse_module_state->__pyx_n_s_Any); - Py_VISIT(traverse_module_state->__pyx_n_s_Dict); - Py_VISIT(traverse_module_state->__pyx_n_s_Hashable); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase___class_getite); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase___reduce_cytho); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase___setstate_cyt); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_clear); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_pop); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_popitem); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_setdefault); - Py_VISIT(traverse_module_state->__pyx_n_s_ImmutableDictBase_update); - Py_VISIT(traverse_module_state->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0); - Py_VISIT(traverse_module_state->__pyx_n_s_KT); - Py_VISIT(traverse_module_state->__pyx_n_u_KT); - Py_VISIT(traverse_module_state->__pyx_n_s_Mapping); - Py_VISIT(traverse_module_state->__pyx_n_s_NoReturn); - Py_VISIT(traverse_module_state->__pyx_n_s_Optional); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_Any); - Py_VISIT(traverse_module_state->__pyx_kp_s_Optional_Mapping__KT__VT); - Py_VISIT(traverse_module_state->__pyx_n_s_PickleError); - Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer); - Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer___delitem); - Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer___setattr); - Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer___setitem); - Py_VISIT(traverse_module_state->__pyx_n_s_ReadOnlyContainer__readonly); - Py_VISIT(traverse_module_state->__pyx_n_s_Self); - Py_VISIT(traverse_module_state->__pyx_n_s_TypeError); - Py_VISIT(traverse_module_state->__pyx_n_s_TypeVar); - Py_VISIT(traverse_module_state->__pyx_n_s_VT); - Py_VISIT(traverse_module_state->__pyx_n_u_VT); - Py_VISIT(traverse_module_state->__pyx_kp_u__3); - Py_VISIT(traverse_module_state->__pyx_n_s__4); - Py_VISIT(traverse_module_state->__pyx_n_s_arg); - Py_VISIT(traverse_module_state->__pyx_n_s_asyncio_coroutines); - Py_VISIT(traverse_module_state->__pyx_n_s_bool); - Py_VISIT(traverse_module_state->__pyx_n_s_bound); - Py_VISIT(traverse_module_state->__pyx_n_s_class); - Py_VISIT(traverse_module_state->__pyx_n_s_class_getitem); - Py_VISIT(traverse_module_state->__pyx_n_s_clear); - Py_VISIT(traverse_module_state->__pyx_n_s_cline_in_traceback); - Py_VISIT(traverse_module_state->__pyx_n_s_cls); - Py_VISIT(traverse_module_state->__pyx_n_s_copy); - Py_VISIT(traverse_module_state->__pyx_n_s_d); - Py_VISIT(traverse_module_state->__pyx_n_s_default); - Py_VISIT(traverse_module_state->__pyx_n_s_delitem); - Py_VISIT(traverse_module_state->__pyx_n_s_dict); - Py_VISIT(traverse_module_state->__pyx_n_s_dict_2); - Py_VISIT(traverse_module_state->__pyx_n_s_dicts); - Py_VISIT(traverse_module_state->__pyx_kp_u_disable); - Py_VISIT(traverse_module_state->__pyx_n_s_doc); - Py_VISIT(traverse_module_state->__pyx_kp_u_enable); - Py_VISIT(traverse_module_state->__pyx_kp_u_gc); - Py_VISIT(traverse_module_state->__pyx_n_s_getstate); - Py_VISIT(traverse_module_state->__pyx_n_s_immutable_fn); - Py_VISIT(traverse_module_state->__pyx_kp_u_immutabledict); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_2); - Py_VISIT(traverse_module_state->__pyx_kp_s_immutabledict__KT__VT); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict___class_getitem); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict___reduce); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_clear); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_copy); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_merge_with); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_pop); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_popitem); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_setdefault); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_union); - Py_VISIT(traverse_module_state->__pyx_n_s_immutabledict_update); - Py_VISIT(traverse_module_state->__pyx_n_s_import); - Py_VISIT(traverse_module_state->__pyx_n_s_init_subclass); - Py_VISIT(traverse_module_state->__pyx_n_s_is_compiled); - Py_VISIT(traverse_module_state->__pyx_n_s_is_coroutine); - Py_VISIT(traverse_module_state->__pyx_kp_u_isenabled); - Py_VISIT(traverse_module_state->__pyx_n_s_key); - Py_VISIT(traverse_module_state->__pyx_n_s_kw); - Py_VISIT(traverse_module_state->__pyx_kp_s_lib_sqlalchemy_util__immutabledi); - Py_VISIT(traverse_module_state->__pyx_n_s_main); - Py_VISIT(traverse_module_state->__pyx_n_s_merge_with); - Py_VISIT(traverse_module_state->__pyx_n_s_metaclass); - Py_VISIT(traverse_module_state->__pyx_n_s_module); - Py_VISIT(traverse_module_state->__pyx_n_s_name); - Py_VISIT(traverse_module_state->__pyx_n_s_new); - Py_VISIT(traverse_module_state->__pyx_n_s_obj); - Py_VISIT(traverse_module_state->__pyx_n_s_object); - Py_VISIT(traverse_module_state->__pyx_kp_u_object_is_immutable); - Py_VISIT(traverse_module_state->__pyx_kp_u_object_is_immutable_and_or_read); - Py_VISIT(traverse_module_state->__pyx_n_s_or); - Py_VISIT(traverse_module_state->__pyx_n_s_other); - Py_VISIT(traverse_module_state->__pyx_n_s_pickle); - Py_VISIT(traverse_module_state->__pyx_n_s_pop); - Py_VISIT(traverse_module_state->__pyx_n_s_popitem); - Py_VISIT(traverse_module_state->__pyx_n_s_prepare); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_PickleError); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_checksum); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_result); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_state); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_type); - Py_VISIT(traverse_module_state->__pyx_n_s_pyx_unpickle_ImmutableDictBase); - Py_VISIT(traverse_module_state->__pyx_n_s_qualname); - Py_VISIT(traverse_module_state->__pyx_n_s_readonly); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_reduce_ex); - Py_VISIT(traverse_module_state->__pyx_n_s_repr); - Py_VISIT(traverse_module_state->__pyx_n_s_result); - Py_VISIT(traverse_module_state->__pyx_n_s_return); - Py_VISIT(traverse_module_state->__pyx_n_s_ror); - Py_VISIT(traverse_module_state->__pyx_n_s_self); - Py_VISIT(traverse_module_state->__pyx_n_s_set_name); - Py_VISIT(traverse_module_state->__pyx_n_s_setattr); - Py_VISIT(traverse_module_state->__pyx_n_s_setdefault); - Py_VISIT(traverse_module_state->__pyx_n_s_setitem); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_setstate_cython); - Py_VISIT(traverse_module_state->__pyx_n_s_slots); - Py_VISIT(traverse_module_state->__pyx_n_s_sqlalchemy_util__immutabledict_c); - Py_VISIT(traverse_module_state->__pyx_n_s_state); - Py_VISIT(traverse_module_state->__pyx_kp_s_stringsource); - Py_VISIT(traverse_module_state->__pyx_n_s_super); - Py_VISIT(traverse_module_state->__pyx_n_s_test); - Py_VISIT(traverse_module_state->__pyx_kp_s_type_Self); - Py_VISIT(traverse_module_state->__pyx_n_s_typing); - Py_VISIT(traverse_module_state->__pyx_n_s_union); - Py_VISIT(traverse_module_state->__pyx_n_s_update); - Py_VISIT(traverse_module_state->__pyx_n_s_use_setstate); - Py_VISIT(traverse_module_state->__pyx_n_s_value); - Py_VISIT(traverse_module_state->__pyx_int_222419149); - Py_VISIT(traverse_module_state->__pyx_int_228825662); - Py_VISIT(traverse_module_state->__pyx_int_238750788); - Py_VISIT(traverse_module_state->__pyx_tuple__2); - Py_VISIT(traverse_module_state->__pyx_tuple__6); - Py_VISIT(traverse_module_state->__pyx_tuple__8); - Py_VISIT(traverse_module_state->__pyx_tuple__10); - Py_VISIT(traverse_module_state->__pyx_tuple__12); - Py_VISIT(traverse_module_state->__pyx_tuple__15); - Py_VISIT(traverse_module_state->__pyx_tuple__16); - Py_VISIT(traverse_module_state->__pyx_tuple__17); - Py_VISIT(traverse_module_state->__pyx_tuple__20); - Py_VISIT(traverse_module_state->__pyx_tuple__22); - Py_VISIT(traverse_module_state->__pyx_tuple__25); - Py_VISIT(traverse_module_state->__pyx_tuple__27); - Py_VISIT(traverse_module_state->__pyx_tuple__29); - Py_VISIT(traverse_module_state->__pyx_tuple__37); - Py_VISIT(traverse_module_state->__pyx_tuple__39); - Py_VISIT(traverse_module_state->__pyx_tuple__43); - Py_VISIT(traverse_module_state->__pyx_codeobj__5); - Py_VISIT(traverse_module_state->__pyx_codeobj__7); - Py_VISIT(traverse_module_state->__pyx_codeobj__9); - Py_VISIT(traverse_module_state->__pyx_codeobj__11); - Py_VISIT(traverse_module_state->__pyx_codeobj__13); - Py_VISIT(traverse_module_state->__pyx_codeobj__14); - Py_VISIT(traverse_module_state->__pyx_codeobj__18); - Py_VISIT(traverse_module_state->__pyx_codeobj__19); - Py_VISIT(traverse_module_state->__pyx_codeobj__21); - Py_VISIT(traverse_module_state->__pyx_codeobj__23); - Py_VISIT(traverse_module_state->__pyx_codeobj__24); - Py_VISIT(traverse_module_state->__pyx_codeobj__26); - Py_VISIT(traverse_module_state->__pyx_codeobj__28); - Py_VISIT(traverse_module_state->__pyx_codeobj__30); - Py_VISIT(traverse_module_state->__pyx_codeobj__31); - Py_VISIT(traverse_module_state->__pyx_codeobj__32); - Py_VISIT(traverse_module_state->__pyx_codeobj__33); - Py_VISIT(traverse_module_state->__pyx_codeobj__34); - Py_VISIT(traverse_module_state->__pyx_codeobj__35); - Py_VISIT(traverse_module_state->__pyx_codeobj__36); - Py_VISIT(traverse_module_state->__pyx_codeobj__38); - Py_VISIT(traverse_module_state->__pyx_codeobj__40); - Py_VISIT(traverse_module_state->__pyx_codeobj__41); - Py_VISIT(traverse_module_state->__pyx_codeobj__42); - Py_VISIT(traverse_module_state->__pyx_codeobj__44); - return 0; -} -#endif -/* #### Code section: module_state_defines ### */ -#define __pyx_d __pyx_mstate_global->__pyx_d -#define __pyx_b __pyx_mstate_global->__pyx_b -#define __pyx_cython_runtime __pyx_mstate_global->__pyx_cython_runtime -#define __pyx_empty_tuple __pyx_mstate_global->__pyx_empty_tuple -#define __pyx_empty_bytes __pyx_mstate_global->__pyx_empty_bytes -#define __pyx_empty_unicode __pyx_mstate_global->__pyx_empty_unicode -#ifdef __Pyx_CyFunction_USED -#define __pyx_CyFunctionType __pyx_mstate_global->__pyx_CyFunctionType -#endif -#ifdef __Pyx_FusedFunction_USED -#define __pyx_FusedFunctionType __pyx_mstate_global->__pyx_FusedFunctionType -#endif -#ifdef __Pyx_Generator_USED -#define __pyx_GeneratorType __pyx_mstate_global->__pyx_GeneratorType -#endif -#ifdef __Pyx_IterableCoroutine_USED -#define __pyx_IterableCoroutineType __pyx_mstate_global->__pyx_IterableCoroutineType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineAwaitType __pyx_mstate_global->__pyx_CoroutineAwaitType -#endif -#ifdef __Pyx_Coroutine_USED -#define __pyx_CoroutineType __pyx_mstate_global->__pyx_CoroutineType -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#define __pyx_ptype_7cpython_4type_type __pyx_mstate_global->__pyx_ptype_7cpython_4type_type -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#endif -#if CYTHON_USE_MODULE_STATE -#define __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase __pyx_mstate_global->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase -#define __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict __pyx_mstate_global->__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict -#endif -#define __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase __pyx_mstate_global->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase -#define __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict __pyx_mstate_global->__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict -#define __pyx_kp_u_ __pyx_mstate_global->__pyx_kp_u_ -#define __pyx_n_s_Any __pyx_mstate_global->__pyx_n_s_Any -#define __pyx_n_s_Dict __pyx_mstate_global->__pyx_n_s_Dict -#define __pyx_n_s_Hashable __pyx_mstate_global->__pyx_n_s_Hashable -#define __pyx_n_s_ImmutableDictBase __pyx_mstate_global->__pyx_n_s_ImmutableDictBase -#define __pyx_n_s_ImmutableDictBase___class_getite __pyx_mstate_global->__pyx_n_s_ImmutableDictBase___class_getite -#define __pyx_n_s_ImmutableDictBase___reduce_cytho __pyx_mstate_global->__pyx_n_s_ImmutableDictBase___reduce_cytho -#define __pyx_n_s_ImmutableDictBase___setstate_cyt __pyx_mstate_global->__pyx_n_s_ImmutableDictBase___setstate_cyt -#define __pyx_n_s_ImmutableDictBase_clear __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_clear -#define __pyx_n_s_ImmutableDictBase_pop __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_pop -#define __pyx_n_s_ImmutableDictBase_popitem __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_popitem -#define __pyx_n_s_ImmutableDictBase_setdefault __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_setdefault -#define __pyx_n_s_ImmutableDictBase_update __pyx_mstate_global->__pyx_n_s_ImmutableDictBase_update -#define __pyx_kp_s_Incompatible_checksums_0x_x_vs_0 __pyx_mstate_global->__pyx_kp_s_Incompatible_checksums_0x_x_vs_0 -#define __pyx_n_s_KT __pyx_mstate_global->__pyx_n_s_KT -#define __pyx_n_u_KT __pyx_mstate_global->__pyx_n_u_KT -#define __pyx_n_s_Mapping __pyx_mstate_global->__pyx_n_s_Mapping -#define __pyx_n_s_NoReturn __pyx_mstate_global->__pyx_n_s_NoReturn -#define __pyx_n_s_Optional __pyx_mstate_global->__pyx_n_s_Optional -#define __pyx_kp_s_Optional_Any __pyx_mstate_global->__pyx_kp_s_Optional_Any -#define __pyx_kp_s_Optional_Mapping__KT__VT __pyx_mstate_global->__pyx_kp_s_Optional_Mapping__KT__VT -#define __pyx_n_s_PickleError __pyx_mstate_global->__pyx_n_s_PickleError -#define __pyx_n_s_ReadOnlyContainer __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer -#define __pyx_n_s_ReadOnlyContainer___delitem __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer___delitem -#define __pyx_n_s_ReadOnlyContainer___setattr __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer___setattr -#define __pyx_n_s_ReadOnlyContainer___setitem __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer___setitem -#define __pyx_n_s_ReadOnlyContainer__readonly __pyx_mstate_global->__pyx_n_s_ReadOnlyContainer__readonly -#define __pyx_n_s_Self __pyx_mstate_global->__pyx_n_s_Self -#define __pyx_n_s_TypeError __pyx_mstate_global->__pyx_n_s_TypeError -#define __pyx_n_s_TypeVar __pyx_mstate_global->__pyx_n_s_TypeVar -#define __pyx_n_s_VT __pyx_mstate_global->__pyx_n_s_VT -#define __pyx_n_u_VT __pyx_mstate_global->__pyx_n_u_VT -#define __pyx_kp_u__3 __pyx_mstate_global->__pyx_kp_u__3 -#define __pyx_n_s__4 __pyx_mstate_global->__pyx_n_s__4 -#define __pyx_n_s_arg __pyx_mstate_global->__pyx_n_s_arg -#define __pyx_n_s_asyncio_coroutines __pyx_mstate_global->__pyx_n_s_asyncio_coroutines -#define __pyx_n_s_bool __pyx_mstate_global->__pyx_n_s_bool -#define __pyx_n_s_bound __pyx_mstate_global->__pyx_n_s_bound -#define __pyx_n_s_class __pyx_mstate_global->__pyx_n_s_class -#define __pyx_n_s_class_getitem __pyx_mstate_global->__pyx_n_s_class_getitem -#define __pyx_n_s_clear __pyx_mstate_global->__pyx_n_s_clear -#define __pyx_n_s_cline_in_traceback __pyx_mstate_global->__pyx_n_s_cline_in_traceback -#define __pyx_n_s_cls __pyx_mstate_global->__pyx_n_s_cls -#define __pyx_n_s_copy __pyx_mstate_global->__pyx_n_s_copy -#define __pyx_n_s_d __pyx_mstate_global->__pyx_n_s_d -#define __pyx_n_s_default __pyx_mstate_global->__pyx_n_s_default -#define __pyx_n_s_delitem __pyx_mstate_global->__pyx_n_s_delitem -#define __pyx_n_s_dict __pyx_mstate_global->__pyx_n_s_dict -#define __pyx_n_s_dict_2 __pyx_mstate_global->__pyx_n_s_dict_2 -#define __pyx_n_s_dicts __pyx_mstate_global->__pyx_n_s_dicts -#define __pyx_kp_u_disable __pyx_mstate_global->__pyx_kp_u_disable -#define __pyx_n_s_doc __pyx_mstate_global->__pyx_n_s_doc -#define __pyx_kp_u_enable __pyx_mstate_global->__pyx_kp_u_enable -#define __pyx_kp_u_gc __pyx_mstate_global->__pyx_kp_u_gc -#define __pyx_n_s_getstate __pyx_mstate_global->__pyx_n_s_getstate -#define __pyx_n_s_immutable_fn __pyx_mstate_global->__pyx_n_s_immutable_fn -#define __pyx_kp_u_immutabledict __pyx_mstate_global->__pyx_kp_u_immutabledict -#define __pyx_n_s_immutabledict_2 __pyx_mstate_global->__pyx_n_s_immutabledict_2 -#define __pyx_kp_s_immutabledict__KT__VT __pyx_mstate_global->__pyx_kp_s_immutabledict__KT__VT -#define __pyx_n_s_immutabledict___class_getitem __pyx_mstate_global->__pyx_n_s_immutabledict___class_getitem -#define __pyx_n_s_immutabledict___reduce __pyx_mstate_global->__pyx_n_s_immutabledict___reduce -#define __pyx_n_s_immutabledict_clear __pyx_mstate_global->__pyx_n_s_immutabledict_clear -#define __pyx_n_s_immutabledict_copy __pyx_mstate_global->__pyx_n_s_immutabledict_copy -#define __pyx_n_s_immutabledict_merge_with __pyx_mstate_global->__pyx_n_s_immutabledict_merge_with -#define __pyx_n_s_immutabledict_pop __pyx_mstate_global->__pyx_n_s_immutabledict_pop -#define __pyx_n_s_immutabledict_popitem __pyx_mstate_global->__pyx_n_s_immutabledict_popitem -#define __pyx_n_s_immutabledict_setdefault __pyx_mstate_global->__pyx_n_s_immutabledict_setdefault -#define __pyx_n_s_immutabledict_union __pyx_mstate_global->__pyx_n_s_immutabledict_union -#define __pyx_n_s_immutabledict_update __pyx_mstate_global->__pyx_n_s_immutabledict_update -#define __pyx_n_s_import __pyx_mstate_global->__pyx_n_s_import -#define __pyx_n_s_init_subclass __pyx_mstate_global->__pyx_n_s_init_subclass -#define __pyx_n_s_is_compiled __pyx_mstate_global->__pyx_n_s_is_compiled -#define __pyx_n_s_is_coroutine __pyx_mstate_global->__pyx_n_s_is_coroutine -#define __pyx_kp_u_isenabled __pyx_mstate_global->__pyx_kp_u_isenabled -#define __pyx_n_s_key __pyx_mstate_global->__pyx_n_s_key -#define __pyx_n_s_kw __pyx_mstate_global->__pyx_n_s_kw -#define __pyx_kp_s_lib_sqlalchemy_util__immutabledi __pyx_mstate_global->__pyx_kp_s_lib_sqlalchemy_util__immutabledi -#define __pyx_n_s_main __pyx_mstate_global->__pyx_n_s_main -#define __pyx_n_s_merge_with __pyx_mstate_global->__pyx_n_s_merge_with -#define __pyx_n_s_metaclass __pyx_mstate_global->__pyx_n_s_metaclass -#define __pyx_n_s_module __pyx_mstate_global->__pyx_n_s_module -#define __pyx_n_s_name __pyx_mstate_global->__pyx_n_s_name -#define __pyx_n_s_new __pyx_mstate_global->__pyx_n_s_new -#define __pyx_n_s_obj __pyx_mstate_global->__pyx_n_s_obj -#define __pyx_n_s_object __pyx_mstate_global->__pyx_n_s_object -#define __pyx_kp_u_object_is_immutable __pyx_mstate_global->__pyx_kp_u_object_is_immutable -#define __pyx_kp_u_object_is_immutable_and_or_read __pyx_mstate_global->__pyx_kp_u_object_is_immutable_and_or_read -#define __pyx_n_s_or __pyx_mstate_global->__pyx_n_s_or -#define __pyx_n_s_other __pyx_mstate_global->__pyx_n_s_other -#define __pyx_n_s_pickle __pyx_mstate_global->__pyx_n_s_pickle -#define __pyx_n_s_pop __pyx_mstate_global->__pyx_n_s_pop -#define __pyx_n_s_popitem __pyx_mstate_global->__pyx_n_s_popitem -#define __pyx_n_s_prepare __pyx_mstate_global->__pyx_n_s_prepare -#define __pyx_n_s_pyx_PickleError __pyx_mstate_global->__pyx_n_s_pyx_PickleError -#define __pyx_n_s_pyx_checksum __pyx_mstate_global->__pyx_n_s_pyx_checksum -#define __pyx_n_s_pyx_result __pyx_mstate_global->__pyx_n_s_pyx_result -#define __pyx_n_s_pyx_state __pyx_mstate_global->__pyx_n_s_pyx_state -#define __pyx_n_s_pyx_type __pyx_mstate_global->__pyx_n_s_pyx_type -#define __pyx_n_s_pyx_unpickle_ImmutableDictBase __pyx_mstate_global->__pyx_n_s_pyx_unpickle_ImmutableDictBase -#define __pyx_n_s_qualname __pyx_mstate_global->__pyx_n_s_qualname -#define __pyx_n_s_readonly __pyx_mstate_global->__pyx_n_s_readonly -#define __pyx_n_s_reduce __pyx_mstate_global->__pyx_n_s_reduce -#define __pyx_n_s_reduce_cython __pyx_mstate_global->__pyx_n_s_reduce_cython -#define __pyx_n_s_reduce_ex __pyx_mstate_global->__pyx_n_s_reduce_ex -#define __pyx_n_s_repr __pyx_mstate_global->__pyx_n_s_repr -#define __pyx_n_s_result __pyx_mstate_global->__pyx_n_s_result -#define __pyx_n_s_return __pyx_mstate_global->__pyx_n_s_return -#define __pyx_n_s_ror __pyx_mstate_global->__pyx_n_s_ror -#define __pyx_n_s_self __pyx_mstate_global->__pyx_n_s_self -#define __pyx_n_s_set_name __pyx_mstate_global->__pyx_n_s_set_name -#define __pyx_n_s_setattr __pyx_mstate_global->__pyx_n_s_setattr -#define __pyx_n_s_setdefault __pyx_mstate_global->__pyx_n_s_setdefault -#define __pyx_n_s_setitem __pyx_mstate_global->__pyx_n_s_setitem -#define __pyx_n_s_setstate __pyx_mstate_global->__pyx_n_s_setstate -#define __pyx_n_s_setstate_cython __pyx_mstate_global->__pyx_n_s_setstate_cython -#define __pyx_n_s_slots __pyx_mstate_global->__pyx_n_s_slots -#define __pyx_n_s_sqlalchemy_util__immutabledict_c __pyx_mstate_global->__pyx_n_s_sqlalchemy_util__immutabledict_c -#define __pyx_n_s_state __pyx_mstate_global->__pyx_n_s_state -#define __pyx_kp_s_stringsource __pyx_mstate_global->__pyx_kp_s_stringsource -#define __pyx_n_s_super __pyx_mstate_global->__pyx_n_s_super -#define __pyx_n_s_test __pyx_mstate_global->__pyx_n_s_test -#define __pyx_kp_s_type_Self __pyx_mstate_global->__pyx_kp_s_type_Self -#define __pyx_n_s_typing __pyx_mstate_global->__pyx_n_s_typing -#define __pyx_n_s_union __pyx_mstate_global->__pyx_n_s_union -#define __pyx_n_s_update __pyx_mstate_global->__pyx_n_s_update -#define __pyx_n_s_use_setstate __pyx_mstate_global->__pyx_n_s_use_setstate -#define __pyx_n_s_value __pyx_mstate_global->__pyx_n_s_value -#define __pyx_int_222419149 __pyx_mstate_global->__pyx_int_222419149 -#define __pyx_int_228825662 __pyx_mstate_global->__pyx_int_228825662 -#define __pyx_int_238750788 __pyx_mstate_global->__pyx_int_238750788 -#define __pyx_tuple__2 __pyx_mstate_global->__pyx_tuple__2 -#define __pyx_tuple__6 __pyx_mstate_global->__pyx_tuple__6 -#define __pyx_tuple__8 __pyx_mstate_global->__pyx_tuple__8 -#define __pyx_tuple__10 __pyx_mstate_global->__pyx_tuple__10 -#define __pyx_tuple__12 __pyx_mstate_global->__pyx_tuple__12 -#define __pyx_tuple__15 __pyx_mstate_global->__pyx_tuple__15 -#define __pyx_tuple__16 __pyx_mstate_global->__pyx_tuple__16 -#define __pyx_tuple__17 __pyx_mstate_global->__pyx_tuple__17 -#define __pyx_tuple__20 __pyx_mstate_global->__pyx_tuple__20 -#define __pyx_tuple__22 __pyx_mstate_global->__pyx_tuple__22 -#define __pyx_tuple__25 __pyx_mstate_global->__pyx_tuple__25 -#define __pyx_tuple__27 __pyx_mstate_global->__pyx_tuple__27 -#define __pyx_tuple__29 __pyx_mstate_global->__pyx_tuple__29 -#define __pyx_tuple__37 __pyx_mstate_global->__pyx_tuple__37 -#define __pyx_tuple__39 __pyx_mstate_global->__pyx_tuple__39 -#define __pyx_tuple__43 __pyx_mstate_global->__pyx_tuple__43 -#define __pyx_codeobj__5 __pyx_mstate_global->__pyx_codeobj__5 -#define __pyx_codeobj__7 __pyx_mstate_global->__pyx_codeobj__7 -#define __pyx_codeobj__9 __pyx_mstate_global->__pyx_codeobj__9 -#define __pyx_codeobj__11 __pyx_mstate_global->__pyx_codeobj__11 -#define __pyx_codeobj__13 __pyx_mstate_global->__pyx_codeobj__13 -#define __pyx_codeobj__14 __pyx_mstate_global->__pyx_codeobj__14 -#define __pyx_codeobj__18 __pyx_mstate_global->__pyx_codeobj__18 -#define __pyx_codeobj__19 __pyx_mstate_global->__pyx_codeobj__19 -#define __pyx_codeobj__21 __pyx_mstate_global->__pyx_codeobj__21 -#define __pyx_codeobj__23 __pyx_mstate_global->__pyx_codeobj__23 -#define __pyx_codeobj__24 __pyx_mstate_global->__pyx_codeobj__24 -#define __pyx_codeobj__26 __pyx_mstate_global->__pyx_codeobj__26 -#define __pyx_codeobj__28 __pyx_mstate_global->__pyx_codeobj__28 -#define __pyx_codeobj__30 __pyx_mstate_global->__pyx_codeobj__30 -#define __pyx_codeobj__31 __pyx_mstate_global->__pyx_codeobj__31 -#define __pyx_codeobj__32 __pyx_mstate_global->__pyx_codeobj__32 -#define __pyx_codeobj__33 __pyx_mstate_global->__pyx_codeobj__33 -#define __pyx_codeobj__34 __pyx_mstate_global->__pyx_codeobj__34 -#define __pyx_codeobj__35 __pyx_mstate_global->__pyx_codeobj__35 -#define __pyx_codeobj__36 __pyx_mstate_global->__pyx_codeobj__36 -#define __pyx_codeobj__38 __pyx_mstate_global->__pyx_codeobj__38 -#define __pyx_codeobj__40 __pyx_mstate_global->__pyx_codeobj__40 -#define __pyx_codeobj__41 __pyx_mstate_global->__pyx_codeobj__41 -#define __pyx_codeobj__42 __pyx_mstate_global->__pyx_codeobj__42 -#define __pyx_codeobj__44 __pyx_mstate_global->__pyx_codeobj__44 -/* #### Code section: module_code ### */ - -/* "sqlalchemy/util/_immutabledict_cy.py":31 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -PyDoc_STRVAR(__pyx_doc_10sqlalchemy_4util_17_immutabledict_cy__is_compiled, "Utility function to indicate if this module is compiled or not."); -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled = {"_is_compiled", (PyCFunction)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled, METH_NOARGS, __pyx_doc_10sqlalchemy_4util_17_immutabledict_cy__is_compiled}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy__is_compiled(__pyx_self); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy__is_compiled(CYTHON_UNUSED PyObject *__pyx_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_is_compiled", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":33 - * def _is_compiled() -> bool: - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_True); - __pyx_r = Py_True; - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":31 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":44 - * - * - * def _immutable_fn(obj: object) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError(f"{obj.__class__.__name__} object is immutable") - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn = {"_immutable_fn", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_obj = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_immutable_fn (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_obj,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_obj)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 44, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_immutable_fn") < 0)) __PYX_ERR(0, 44, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_obj = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("_immutable_fn", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 44, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy._immutable_fn", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_2_immutable_fn(__pyx_self, __pyx_v_obj); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_2_immutable_fn(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_obj) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_immutable_fn", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":45 - * - * def _immutable_fn(obj: object) -> NoReturn: - * raise TypeError(f"{obj.__class__.__name__} object is immutable") # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_obj, __pyx_n_s_class); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_FormatSimple(__pyx_t_2, __pyx_empty_unicode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_1, __pyx_kp_u_object_is_immutable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 45, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":44 - * - * - * def _immutable_fn(obj: object) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError(f"{obj.__class__.__name__} object is immutable") - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy._immutable_fn", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":51 - * __slots__ = () - * - * def _readonly(self) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError( - * f"{self.__class__.__name__} object is immutable and/or readonly" - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly = {"_readonly", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_self = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("_readonly (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_self)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 51, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "_readonly") < 0)) __PYX_ERR(0, 51, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_self = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("_readonly", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 51, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer._readonly", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer__readonly(__pyx_self, __pyx_v_self); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer__readonly(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("_readonly", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":53 - * def _readonly(self) -> NoReturn: - * raise TypeError( - * f"{self.__class__.__name__} object is immutable and/or readonly" # <<<<<<<<<<<<<< - * ) - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_class); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_FormatSimple(__pyx_t_2, __pyx_empty_unicode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_PyUnicode_ConcatInPlace(__pyx_t_1, __pyx_kp_u_object_is_immutable_and_or_read); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":52 - * - * def _readonly(self) -> NoReturn: - * raise TypeError( # <<<<<<<<<<<<<< - * f"{self.__class__.__name__} object is immutable and/or readonly" - * ) - */ - __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 52, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":51 - * __slots__ = () - * - * def _readonly(self) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError( - * f"{self.__class__.__name__} object is immutable and/or readonly" - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer._readonly", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":56 - * ) - * - * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__ = {"__delitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_self = 0; - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[2] = {0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,&__pyx_n_s_key,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_self)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 56, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 56, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__delitem__", 1, 2, 2, 1); __PYX_ERR(0, 56, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__delitem__") < 0)) __PYX_ERR(0, 56, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 2)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - } - __pyx_v_self = values[0]; - __pyx_v_key = values[1]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__delitem__", 1, 2, 2, __pyx_nargs); __PYX_ERR(0, 56, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_2__delitem__(__pyx_self, __pyx_v_self, __pyx_v_key); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_2__delitem__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__delitem__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":57 - * - * def __delitem__(self, key: Any) -> NoReturn: - * self._readonly() # <<<<<<<<<<<<<< - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_readonly); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":56 - * ) - * - * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":59 - * self._readonly() - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__ = {"__setitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_self = 0; - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - CYTHON_UNUSED PyObject *__pyx_v_value = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[3] = {0,0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,&__pyx_n_s_key,&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_self)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 59, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 59, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__setitem__", 1, 3, 3, 1); __PYX_ERR(0, 59, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 59, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__setitem__", 1, 3, 3, 2); __PYX_ERR(0, 59, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setitem__") < 0)) __PYX_ERR(0, 59, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 3)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - } - __pyx_v_self = values[0]; - __pyx_v_key = values[1]; - __pyx_v_value = values[2]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setitem__", 1, 3, 3, __pyx_nargs); __PYX_ERR(0, 59, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_4__setitem__(__pyx_self, __pyx_v_self, __pyx_v_key, __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_4__setitem__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setitem__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":60 - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: - * self._readonly() # <<<<<<<<<<<<<< - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_readonly); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":59 - * self._readonly() - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":62 - * self._readonly() - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__ = {"__setattr__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_self = 0; - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - CYTHON_UNUSED PyObject *__pyx_v_value = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[3] = {0,0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setattr__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,&__pyx_n_s_key,&__pyx_n_s_value,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_self)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 62, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 62, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__setattr__", 1, 3, 3, 1); __PYX_ERR(0, 62, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_value)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 62, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__setattr__", 1, 3, 3, 2); __PYX_ERR(0, 62, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setattr__") < 0)) __PYX_ERR(0, 62, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 3)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - } - __pyx_v_self = values[0]; - __pyx_v_key = values[1]; - __pyx_v_value = values[2]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setattr__", 1, 3, 3, __pyx_nargs); __PYX_ERR(0, 62, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__setattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_6__setattr__(__pyx_self, __pyx_v_self, __pyx_v_key, __pyx_v_value); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_6__setattr__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setattr__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":63 - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: - * self._readonly() # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_readonly); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 63, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, NULL}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 0+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 63, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":62 - * self._readonly() - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ReadOnlyContainer.__setattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":74 - * # NOTE: this method is required in 3.9 and speeds up the use case - * # ImmutableDictBase[str,int](a_dict) significantly - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__( # type: ignore[override] - * cls, key: Any - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__(PyObject *__pyx_v_cls, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__ = {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__(PyObject *__pyx_v_cls, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__class_getitem__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 74, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__class_getitem__") < 0)) __PYX_ERR(0, 74, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_key = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__class_getitem__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 74, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__class_getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase___class_getitem__(((PyTypeObject*)__pyx_v_cls), __pyx_v_key); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__class_getitem__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":78 - * cls, key: Any - * ) -> type[Self]: - * return cls # <<<<<<<<<<<<<< - * - * def __delitem__(self, key: Any) -> NoReturn: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_cls); - __pyx_r = ((PyObject *)__pyx_v_cls); - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":74 - * # NOTE: this method is required in 3.9 and speeds up the use case - * # ImmutableDictBase[str,int](a_dict) significantly - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__( # type: ignore[override] - * cls, key: Any - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":80 - * return cls - * - * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_3__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_3__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_2__delitem__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), ((PyObject *)__pyx_v_key)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_2__delitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__delitem__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":81 - * - * def __delitem__(self, key: Any) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":80 - * return cls - * - * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":83 - * _immutable_fn(self) - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_5__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_5__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_4__setitem__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_4__setitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setitem__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":84 - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 84, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 84, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":83 - * _immutable_fn(self) - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":86 - * _immutable_fn(self) - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_7__setattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_7__setattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setattr__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_6__setattr__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_6__setattr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setattr__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":87 - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def clear(self) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 87, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 87, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":86 - * _immutable_fn(self) - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__setattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":89 - * _immutable_fn(self) - * - * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear = {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("clear (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("clear", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "clear", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_8clear(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_8clear(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("clear", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":90 - * - * def clear(self) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 90, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 90, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":89 - * _immutable_fn(self) - * - * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":92 - * _immutable_fn(self) - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop = {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - CYTHON_UNUSED PyObject *__pyx_v_default = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[2] = {0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pop (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; - values[1] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 92, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (kw_args > 0) { - PyObject* value = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_default); - if (value) { values[1] = __Pyx_Arg_NewRef_FASTCALL(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 92, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "pop") < 0)) __PYX_ERR(0, 92, __pyx_L3_error) - } - } else { - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_key = values[0]; - __pyx_v_default = values[1]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("pop", 0, 1, 2, __pyx_nargs); __PYX_ERR(0, 92, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_10pop(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), __pyx_v_key, __pyx_v_default); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_10pop(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("pop", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":93 - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def popitem(self) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":92 - * _immutable_fn(self) - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":95 - * _immutable_fn(self) - * - * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem = {"popitem", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("popitem (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("popitem", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "popitem", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_12popitem(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_12popitem(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("popitem", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":96 - * - * def popitem(self) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 96, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":95 - * _immutable_fn(self) - * - * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.popitem", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":98 - * _immutable_fn(self) - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault = {"setdefault", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - CYTHON_UNUSED PyObject *__pyx_v_default = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[2] = {0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("setdefault (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; - values[1] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 98, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (kw_args > 0) { - PyObject* value = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_default); - if (value) { values[1] = __Pyx_Arg_NewRef_FASTCALL(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 98, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "setdefault") < 0)) __PYX_ERR(0, 98, __pyx_L3_error) - } - } else { - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_key = values[0]; - __pyx_v_default = values[1]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("setdefault", 0, 1, 2, __pyx_nargs); __PYX_ERR(0, 98, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_14setdefault(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), __pyx_v_key, __pyx_v_default); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_14setdefault(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("setdefault", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":99 - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":98 - * _immutable_fn(self) - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":101 - * _immutable_fn(self) - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update = {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - CYTHON_UNUSED PyObject *__pyx_v_arg = 0; - CYTHON_UNUSED PyObject *__pyx_v_kw = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("update (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "update", 1))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_arg = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_16update(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), __pyx_v_arg, __pyx_v_kw); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_arg); - __Pyx_XDECREF(__pyx_v_kw); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_16update(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED PyObject *__pyx_v_kw) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("update", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":102 - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 102, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 102, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":101 - * _immutable_fn(self) - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__ = {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__reduce_cython__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce_cython__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_18__reduce_cython__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_18__reduce_cython__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self) { - PyObject *__pyx_v_state = 0; - PyObject *__pyx_v__dict = 0; - int __pyx_v_use_setstate; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 1); - - /* "(tree fragment)":5 - * cdef object _dict - * cdef bint use_setstate - * state = () # <<<<<<<<<<<<<< - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - */ - __Pyx_INCREF(__pyx_empty_tuple); - __pyx_v_state = __pyx_empty_tuple; - - /* "(tree fragment)":6 - * cdef bint use_setstate - * state = () - * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< - * if _dict is not None: - * state += (_dict,) - */ - __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_v__dict = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":7 - * state = () - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - __pyx_t_2 = (__pyx_v__dict != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":8 - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: - * state += (_dict,) # <<<<<<<<<<<<<< - * use_setstate = True - * else: - */ - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v__dict); - __Pyx_GIVEREF(__pyx_v__dict); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict)) __PYX_ERR(1, 8, __pyx_L1_error); - __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3)); - __pyx_t_3 = 0; - - /* "(tree fragment)":9 - * if _dict is not None: - * state += (_dict,) - * use_setstate = True # <<<<<<<<<<<<<< - * else: - * use_setstate = False - */ - __pyx_v_use_setstate = 1; - - /* "(tree fragment)":7 - * state = () - * _dict = getattr(self, '__dict__', None) - * if _dict is not None: # <<<<<<<<<<<<<< - * state += (_dict,) - * use_setstate = True - */ - goto __pyx_L3; - } - - /* "(tree fragment)":11 - * use_setstate = True - * else: - * use_setstate = False # <<<<<<<<<<<<<< - * if use_setstate: - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state - */ - /*else*/ { - __pyx_v_use_setstate = 0; - } - __pyx_L3:; - - /* "(tree fragment)":12 - * else: - * use_setstate = False - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state - * else: - */ - if (__pyx_v_use_setstate) { - - /* "(tree fragment)":13 - * use_setstate = False - * if use_setstate: - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state # <<<<<<<<<<<<<< - * else: - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_ImmutableDictBase); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_238750788); - __Pyx_GIVEREF(__pyx_int_238750788); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_238750788)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_3); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state)) __PYX_ERR(1, 13, __pyx_L1_error); - __pyx_t_3 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "(tree fragment)":12 - * else: - * use_setstate = False - * if use_setstate: # <<<<<<<<<<<<<< - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state - * else: - */ - } - - /* "(tree fragment)":15 - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, None), state - * else: - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) - */ - /*else*/ { - __Pyx_XDECREF(__pyx_r); - __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_ImmutableDictBase); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))))) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_int_238750788); - __Pyx_GIVEREF(__pyx_int_238750788); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_238750788)) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_INCREF(__pyx_v_state); - __Pyx_GIVEREF(__pyx_v_state); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_4); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error); - __pyx_t_4 = 0; - __pyx_t_1 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - } - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v_state); - __Pyx_XDECREF(__pyx_v__dict); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":16 - * else: - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__ = {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 16, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__setstate_cython__") < 0)) __PYX_ERR(1, 16, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v___pyx_state = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__setstate_cython__", 1, 1, 1, __pyx_nargs); __PYX_ERR(1, 16, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_20__setstate_cython__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v_self), __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_20__setstate_cython__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 1); - - /* "(tree fragment)":17 - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): - * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) # <<<<<<<<<<<<<< - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 17, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_17_immutabledict_cy___pyx_unpickle_ImmutableDictBase__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.ImmutableDictBase.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":113 - * - * # ImmutableDictBase start - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__( # type: ignore[override] - * cls, key: Any - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__(PyObject *__pyx_v_cls, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__ = {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__(PyObject *__pyx_v_cls, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__class_getitem__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 113, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__class_getitem__") < 0)) __PYX_ERR(0, 113, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 1)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - } - __pyx_v_key = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__class_getitem__", 1, 1, 1, __pyx_nargs); __PYX_ERR(0, 113, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__class_getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict___class_getitem__(((PyTypeObject*)__pyx_v_cls), __pyx_v_key); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict___class_getitem__(PyTypeObject *__pyx_v_cls, CYTHON_UNUSED PyObject *__pyx_v_key) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__class_getitem__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":117 - * cls, key: Any - * ) -> type[Self]: - * return cls # <<<<<<<<<<<<<< - * - * def __delitem__(self, key: Any) -> NoReturn: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_cls); - __pyx_r = ((PyObject *)__pyx_v_cls); - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":113 - * - * # ImmutableDictBase start - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__( # type: ignore[override] - * cls, key: Any - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":119 - * return cls - * - * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_3__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_3__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_2__delitem__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v_key)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_2__delitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__delitem__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":120 - * - * def __delitem__(self, key: Any) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 120, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":119 - * return cls - * - * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":122 - * _immutable_fn(self) - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_5__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_5__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_4__setitem__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_4__setitem__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setitem__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":123 - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 123, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":122 - * _immutable_fn(self) - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":125 - * _immutable_fn(self) - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_7__setattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ -static int __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_7__setattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - int __pyx_r; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setattr__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_6__setattr__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_6__setattr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_value) { - int __pyx_r; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setattr__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":126 - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def clear(self) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 126, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 126, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":125 - * _immutable_fn(self) - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__setattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":128 - * _immutable_fn(self) - * - * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear = {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("clear (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("clear", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "clear", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_8clear(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_8clear(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("clear", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":129 - * - * def clear(self) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 129, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":128 - * _immutable_fn(self) - * - * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":131 - * _immutable_fn(self) - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop = {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - CYTHON_UNUSED PyObject *__pyx_v_default = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[2] = {0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pop (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; - values[1] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 131, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (kw_args > 0) { - PyObject* value = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_default); - if (value) { values[1] = __Pyx_Arg_NewRef_FASTCALL(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 131, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "pop") < 0)) __PYX_ERR(0, 131, __pyx_L3_error) - } - } else { - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_key = values[0]; - __pyx_v_default = values[1]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("pop", 0, 1, 2, __pyx_nargs); __PYX_ERR(0, 131, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_10pop(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_key, __pyx_v_default); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_10pop(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("pop", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":132 - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def popitem(self) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 132, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 132, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":131 - * _immutable_fn(self) - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":134 - * _immutable_fn(self) - * - * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem = {"popitem", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("popitem (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("popitem", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "popitem", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_12popitem(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_12popitem(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("popitem", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":135 - * - * def popitem(self) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 135, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 135, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":134 - * _immutable_fn(self) - * - * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.popitem", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":137 - * _immutable_fn(self) - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault = {"setdefault", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - CYTHON_UNUSED PyObject *__pyx_v_key = 0; - CYTHON_UNUSED PyObject *__pyx_v_default = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[2] = {0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("setdefault (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; - values[1] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_key)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 137, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (kw_args > 0) { - PyObject* value = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_default); - if (value) { values[1] = __Pyx_Arg_NewRef_FASTCALL(value); kw_args--; } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 137, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "setdefault") < 0)) __PYX_ERR(0, 137, __pyx_L3_error) - } - } else { - switch (__pyx_nargs) { - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_key = values[0]; - __pyx_v_default = values[1]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("setdefault", 0, 1, 2, __pyx_nargs); __PYX_ERR(0, 137, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_14setdefault(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_key, __pyx_v_default); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_14setdefault(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_key, CYTHON_UNUSED PyObject *__pyx_v_default) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("setdefault", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":138 - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 138, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 138, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":137 - * _immutable_fn(self) - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":140 - * _immutable_fn(self) - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update = {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - CYTHON_UNUSED PyObject *__pyx_v_arg = 0; - CYTHON_UNUSED PyObject *__pyx_v_kw = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("update (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "update", 1))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_arg = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_16update(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_arg, __pyx_v_kw); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_arg); - __Pyx_XDECREF(__pyx_v_kw); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_16update(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED PyObject *__pyx_v_kw) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("update", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":141 - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * # ImmutableDictBase end - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 141, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 141, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":140 - * _immutable_fn(self) - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.update", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":145 - * # ImmutableDictBase end - * - * def __repr__(self) -> str: # <<<<<<<<<<<<<< - * return f"immutabledict({dict.__repr__(self)})" - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__(PyObject *__pyx_v_self) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_18__repr__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_18__repr__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - Py_ssize_t __pyx_t_2; - Py_UCS4 __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - unsigned int __pyx_t_7; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__repr__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":146 - * - * def __repr__(self) -> str: - * return f"immutabledict({dict.__repr__(self)})" # <<<<<<<<<<<<<< - * - * @cython.annotation_typing(False) # avoid cython crash from generic return - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = 0; - __pyx_t_3 = 127; - __Pyx_INCREF(__pyx_kp_u_immutabledict); - __pyx_t_2 += 14; - __Pyx_GIVEREF(__pyx_kp_u_immutabledict); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_kp_u_immutabledict); - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_repr); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = NULL; - __pyx_t_7 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_5))) { - __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); - if (likely(__pyx_t_6)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); - __Pyx_INCREF(__pyx_t_6); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_5, function); - __pyx_t_7 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_6, ((PyObject *)__pyx_v_self)}; - __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_5, __pyx_callargs+1-__pyx_t_7, 1+__pyx_t_7); - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - } - __pyx_t_5 = __Pyx_PyObject_FormatSimple(__pyx_t_4, __pyx_empty_unicode); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_5) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_5) : __pyx_t_3; - __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_5); - __pyx_t_5 = 0; - __Pyx_INCREF(__pyx_kp_u_); - __pyx_t_2 += 1; - __Pyx_GIVEREF(__pyx_kp_u_); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_kp_u_); - __pyx_t_5 = __Pyx_PyUnicode_Join(__pyx_t_1, 3, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 146, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":145 - * # ImmutableDictBase end - * - * def __repr__(self) -> str: # <<<<<<<<<<<<<< - * return f"immutabledict({dict.__repr__(self)})" - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":148 - * return f"immutabledict({dict.__repr__(self)})" - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def union( - * self, other: Optional[Mapping[_KT, _VT]] = None, / - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union = {"union", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v_other = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[1] = {0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("union (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {0}; - - /* "sqlalchemy/util/_immutabledict_cy.py":150 - * @cython.annotation_typing(False) # avoid cython crash from generic return - * def union( - * self, other: Optional[Mapping[_KT, _VT]] = None, / # <<<<<<<<<<<<<< - * ) -> immutabledict[_KT, _VT]: - * if not other: - */ - values[0] = __Pyx_Arg_NewRef_FASTCALL(((PyObject *)Py_None)); - if (__pyx_kwds && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) > 0) { - if (likely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values, 0, "union") < 0)) __PYX_ERR(0, 148, __pyx_L3_error) - } else { - switch (__pyx_nargs) { - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_other = values[0]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("union", 0, 0, 1, __pyx_nargs); __PYX_ERR(0, 148, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.union", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_20union(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_other); - - /* "sqlalchemy/util/_immutabledict_cy.py":148 - * return f"immutabledict({dict.__repr__(self)})" - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def union( - * self, other: Optional[Mapping[_KT, _VT]] = None, / - */ - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_20union(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v_other) { - struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_result = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - unsigned int __pyx_t_7; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("union", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":152 - * self, other: Optional[Mapping[_KT, _VT]] = None, / - * ) -> immutabledict[_KT, _VT]: - * if not other: # <<<<<<<<<<<<<< - * return self - * # new + update is faster than immutabledict(self) - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_other); if (unlikely((__pyx_t_1 < 0))) __PYX_ERR(0, 152, __pyx_L1_error) - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_immutabledict_cy.py":153 - * ) -> immutabledict[_KT, _VT]: - * if not other: - * return self # <<<<<<<<<<<<<< - * # new + update is faster than immutabledict(self) - * result: immutabledict = immutabledict() # type: ignore[type-arg] - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":152 - * self, other: Optional[Mapping[_KT, _VT]] = None, / - * ) -> immutabledict[_KT, _VT]: - * if not other: # <<<<<<<<<<<<<< - * return self - * # new + update is faster than immutabledict(self) - */ - } - - /* "sqlalchemy/util/_immutabledict_cy.py":155 - * return self - * # new + update is faster than immutabledict(self) - * result: immutabledict = immutabledict() # type: ignore[type-arg] # <<<<<<<<<<<<<< - * PyDict_Update(result, self) - * if isinstance(other, dict): - */ - __pyx_t_3 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 155, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_t_3); - __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":156 - * # new + update is faster than immutabledict(self) - * result: immutabledict = immutabledict() # type: ignore[type-arg] - * PyDict_Update(result, self) # <<<<<<<<<<<<<< - * if isinstance(other, dict): - * # c version of PyDict_Update supports only dicts - */ - __pyx_t_4 = PyDict_Update(((PyObject *)__pyx_v_result), ((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 156, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":157 - * result: immutabledict = immutabledict() # type: ignore[type-arg] - * PyDict_Update(result, self) - * if isinstance(other, dict): # <<<<<<<<<<<<<< - * # c version of PyDict_Update supports only dicts - * PyDict_Update(result, other) - */ - __pyx_t_2 = PyDict_Check(__pyx_v_other); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_immutabledict_cy.py":159 - * if isinstance(other, dict): - * # c version of PyDict_Update supports only dicts - * PyDict_Update(result, other) # <<<<<<<<<<<<<< - * else: - * dict.update(result, other) - */ - __pyx_t_4 = PyDict_Update(((PyObject *)__pyx_v_result), __pyx_v_other); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 159, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":157 - * result: immutabledict = immutabledict() # type: ignore[type-arg] - * PyDict_Update(result, self) - * if isinstance(other, dict): # <<<<<<<<<<<<<< - * # c version of PyDict_Update supports only dicts - * PyDict_Update(result, other) - */ - goto __pyx_L4; - } - - /* "sqlalchemy/util/_immutabledict_cy.py":161 - * PyDict_Update(result, other) - * else: - * dict.update(result, other) # <<<<<<<<<<<<<< - * return result - * - */ - /*else*/ { - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_update); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 161, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = NULL; - __pyx_t_7 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_5))) { - __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); - if (likely(__pyx_t_6)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); - __Pyx_INCREF(__pyx_t_6); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_5, function); - __pyx_t_7 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[3] = {__pyx_t_6, ((PyObject *)__pyx_v_result), __pyx_v_other}; - __pyx_t_3 = __Pyx_PyObject_FastCall(__pyx_t_5, __pyx_callargs+1-__pyx_t_7, 2+__pyx_t_7); - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 161, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_L4:; - - /* "sqlalchemy/util/_immutabledict_cy.py":162 - * else: - * dict.update(result, other) - * return result # <<<<<<<<<<<<<< - * - * @cython.annotation_typing(False) # avoid cython crash from generic return - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_result); - __pyx_r = ((PyObject *)__pyx_v_result); - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":148 - * return f"immutabledict({dict.__repr__(self)})" - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def union( - * self, other: Optional[Mapping[_KT, _VT]] = None, / - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.union", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":164 - * return result - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def merge_with( - * self, *dicts: Optional[Mapping[_KT, _VT]] - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with = {"merge_with", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with, METH_VARARGS|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_dicts = 0; - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("merge_with (wrapper)", 0); - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_VARARGS(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "merge_with", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_dicts = __pyx_args; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_22merge_with(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), __pyx_v_dicts); - - /* function exit code */ - __Pyx_DECREF(__pyx_v_dicts); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_22merge_with(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v_dicts) { - struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_result = NULL; - PyObject *__pyx_v_d = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - Py_ssize_t __pyx_t_6; - int __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - PyObject *__pyx_t_9 = NULL; - unsigned int __pyx_t_10; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("merge_with", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":168 - * self, *dicts: Optional[Mapping[_KT, _VT]] - * ) -> immutabledict[_KT, _VT]: - * result: Optional[immutabledict] = None # type: ignore[type-arg] # <<<<<<<<<<<<<< - * d: object - * if not dicts: - */ - __Pyx_INCREF(Py_None); - __pyx_v_result = ((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)Py_None); - - /* "sqlalchemy/util/_immutabledict_cy.py":170 - * result: Optional[immutabledict] = None # type: ignore[type-arg] - * d: object - * if not dicts: # <<<<<<<<<<<<<< - * return self - * for d in dicts: - */ - __pyx_t_1 = (PyTuple_GET_SIZE(__pyx_v_dicts) != 0); - __pyx_t_2 = (!__pyx_t_1); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_immutabledict_cy.py":171 - * d: object - * if not dicts: - * return self # <<<<<<<<<<<<<< - * for d in dicts: - * if d is not None and len(d) > 0: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":170 - * result: Optional[immutabledict] = None # type: ignore[type-arg] - * d: object - * if not dicts: # <<<<<<<<<<<<<< - * return self - * for d in dicts: - */ - } - - /* "sqlalchemy/util/_immutabledict_cy.py":172 - * if not dicts: - * return self - * for d in dicts: # <<<<<<<<<<<<<< - * if d is not None and len(d) > 0: - * if result is None: - */ - __pyx_t_3 = __pyx_v_dicts; __Pyx_INCREF(__pyx_t_3); - __pyx_t_4 = 0; - for (;;) { - { - Py_ssize_t __pyx_temp = __Pyx_PyTuple_GET_SIZE(__pyx_t_3); - #if !CYTHON_ASSUME_SAFE_MACROS - if (unlikely((__pyx_temp < 0))) __PYX_ERR(0, 172, __pyx_L1_error) - #endif - if (__pyx_t_4 >= __pyx_temp) break; - } - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_5); __pyx_t_4++; if (unlikely((0 < 0))) __PYX_ERR(0, 172, __pyx_L1_error) - #else - __pyx_t_5 = __Pyx_PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 172, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - #endif - __Pyx_XDECREF_SET(__pyx_v_d, __pyx_t_5); - __pyx_t_5 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":173 - * return self - * for d in dicts: - * if d is not None and len(d) > 0: # <<<<<<<<<<<<<< - * if result is None: - * # new + update is faster than immutabledict(self) - */ - __pyx_t_1 = (__pyx_v_d != Py_None); - if (__pyx_t_1) { - } else { - __pyx_t_2 = __pyx_t_1; - goto __pyx_L7_bool_binop_done; - } - __pyx_t_6 = PyObject_Length(__pyx_v_d); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 173, __pyx_L1_error) - __pyx_t_1 = (__pyx_t_6 > 0); - __pyx_t_2 = __pyx_t_1; - __pyx_L7_bool_binop_done:; - if (__pyx_t_2) { - - /* "sqlalchemy/util/_immutabledict_cy.py":174 - * for d in dicts: - * if d is not None and len(d) > 0: - * if result is None: # <<<<<<<<<<<<<< - * # new + update is faster than immutabledict(self) - * result = immutabledict() - */ - __pyx_t_2 = (((PyObject *)__pyx_v_result) == Py_None); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_immutabledict_cy.py":176 - * if result is None: - * # new + update is faster than immutabledict(self) - * result = immutabledict() # <<<<<<<<<<<<<< - * PyDict_Update(result, self) - * if isinstance(d, dict): - */ - __pyx_t_5 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 176, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF_SET(__pyx_v_result, ((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_t_5)); - __pyx_t_5 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":177 - * # new + update is faster than immutabledict(self) - * result = immutabledict() - * PyDict_Update(result, self) # <<<<<<<<<<<<<< - * if isinstance(d, dict): - * # c version of PyDict_Update supports only dicts - */ - __pyx_t_7 = PyDict_Update(((PyObject *)__pyx_v_result), ((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 177, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":174 - * for d in dicts: - * if d is not None and len(d) > 0: - * if result is None: # <<<<<<<<<<<<<< - * # new + update is faster than immutabledict(self) - * result = immutabledict() - */ - } - - /* "sqlalchemy/util/_immutabledict_cy.py":178 - * result = immutabledict() - * PyDict_Update(result, self) - * if isinstance(d, dict): # <<<<<<<<<<<<<< - * # c version of PyDict_Update supports only dicts - * PyDict_Update(result, d) - */ - __pyx_t_2 = PyDict_Check(__pyx_v_d); - if (__pyx_t_2) { - - /* "sqlalchemy/util/_immutabledict_cy.py":180 - * if isinstance(d, dict): - * # c version of PyDict_Update supports only dicts - * PyDict_Update(result, d) # <<<<<<<<<<<<<< - * else: - * dict.update(result, d) - */ - __pyx_t_7 = PyDict_Update(((PyObject *)__pyx_v_result), __pyx_v_d); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 180, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":178 - * result = immutabledict() - * PyDict_Update(result, self) - * if isinstance(d, dict): # <<<<<<<<<<<<<< - * # c version of PyDict_Update supports only dicts - * PyDict_Update(result, d) - */ - goto __pyx_L10; - } - - /* "sqlalchemy/util/_immutabledict_cy.py":182 - * PyDict_Update(result, d) - * else: - * dict.update(result, d) # <<<<<<<<<<<<<< - * - * return self if result is None else result - */ - /*else*/ { - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 182, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_9 = NULL; - __pyx_t_10 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_8))) { - __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); - if (likely(__pyx_t_9)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); - __Pyx_INCREF(__pyx_t_9); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_8, function); - __pyx_t_10 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[3] = {__pyx_t_9, ((PyObject *)__pyx_v_result), __pyx_v_d}; - __pyx_t_5 = __Pyx_PyObject_FastCall(__pyx_t_8, __pyx_callargs+1-__pyx_t_10, 2+__pyx_t_10); - __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; - if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 182, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - } - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - } - __pyx_L10:; - - /* "sqlalchemy/util/_immutabledict_cy.py":173 - * return self - * for d in dicts: - * if d is not None and len(d) > 0: # <<<<<<<<<<<<<< - * if result is None: - * # new + update is faster than immutabledict(self) - */ - } - - /* "sqlalchemy/util/_immutabledict_cy.py":172 - * if not dicts: - * return self - * for d in dicts: # <<<<<<<<<<<<<< - * if d is not None and len(d) > 0: - * if result is None: - */ - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":184 - * dict.update(result, d) - * - * return self if result is None else result # <<<<<<<<<<<<<< - * - * def copy(self) -> Self: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = (((PyObject *)__pyx_v_result) == Py_None); - if (__pyx_t_2) { - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_t_3 = ((PyObject *)__pyx_v_self); - } else { - __Pyx_INCREF((PyObject *)__pyx_v_result); - __pyx_t_3 = ((PyObject *)__pyx_v_result); - } - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":164 - * return result - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def merge_with( - * self, *dicts: Optional[Mapping[_KT, _VT]] - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.merge_with", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF((PyObject *)__pyx_v_result); - __Pyx_XDECREF(__pyx_v_d); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":186 - * return self if result is None else result - * - * def copy(self) -> Self: # <<<<<<<<<<<<<< - * return self - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy = {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("copy (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("copy", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "copy", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_24copy(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_24copy(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("copy", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":187 - * - * def copy(self) -> Self: - * return self # <<<<<<<<<<<<<< - * - * def __reduce__(self) -> Any: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF((PyObject *)__pyx_v_self); - __pyx_r = ((PyObject *)__pyx_v_self); - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":186 - * return self if result is None else result - * - * def copy(self) -> Self: # <<<<<<<<<<<<<< - * return self - * - */ - - /* function exit code */ - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":189 - * return self - * - * def __reduce__(self) -> Any: # <<<<<<<<<<<<<< - * return immutabledict, (dict(self),) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__ = {"__reduce__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__(PyObject *__pyx_v_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce__ (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - if (unlikely(__pyx_nargs > 0)) { - __Pyx_RaiseArgtupleInvalid("__reduce__", 1, 0, 0, __pyx_nargs); return NULL;} - if (unlikely(__pyx_kwds) && __Pyx_NumKwargs_FASTCALL(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__reduce__", 0))) return NULL; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_26__reduce__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_26__reduce__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":190 - * - * def __reduce__(self) -> Any: - * return immutabledict, (dict(self),) # <<<<<<<<<<<<<< - * - * # PEP 584 - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyDict_Type)), ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 190, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 190, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_1); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1)) __PYX_ERR(0, 190, __pyx_L1_error); - __pyx_t_1 = 0; - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 190, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - __Pyx_GIVEREF((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict))) __PYX_ERR(0, 190, __pyx_L1_error); - __Pyx_GIVEREF(__pyx_t_2); - if (__Pyx_PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2)) __PYX_ERR(0, 190, __pyx_L1_error); - __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":189 - * return self - * - * def __reduce__(self) -> Any: # <<<<<<<<<<<<<< - * return immutabledict, (dict(self),) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__reduce__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":193 - * - * # PEP 584 - * def __ior__(self, __value: Any, /) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_29__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_29__ior__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__ior__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_28__ior__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v__immutabledict__value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_28__ior__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v__immutabledict__value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__ior__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":194 - * # PEP 584 - * def __ior__(self, __value: Any, /) -> NoReturn: - * _immutable_fn(self) # <<<<<<<<<<<<<< - * - * def __or__( # type: ignore[override] - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_immutable_fn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 194, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (unlikely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 1+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 194, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":193 - * - * # PEP 584 - * def __ior__(self, __value: Any, /) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__ior__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":196 - * _immutable_fn(self) - * - * def __or__( # type: ignore[override] # <<<<<<<<<<<<<< - * self, __value: Mapping[_KT, _VT], / - * ) -> immutabledict[_KT, _VT]: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_31__or__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_31__or__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__or__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_30__or__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v__immutabledict__value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_30__or__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__or__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":199 - * self, __value: Mapping[_KT, _VT], / - * ) -> immutabledict[_KT, _VT]: - * return immutabledict( # <<<<<<<<<<<<<< - * dict.__or__(self, __value), # type: ignore[call-overload] - * ) - */ - __Pyx_XDECREF(__pyx_r); - - /* "sqlalchemy/util/_immutabledict_cy.py":200 - * ) -> immutabledict[_KT, _VT]: - * return immutabledict( - * dict.__or__(self, __value), # type: ignore[call-overload] # <<<<<<<<<<<<<< - * ) - * - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_or); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 200, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[3] = {__pyx_t_3, ((PyObject *)__pyx_v_self), __pyx_v__immutabledict__value}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 2+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 200, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - - /* "sqlalchemy/util/_immutabledict_cy.py":199 - * self, __value: Mapping[_KT, _VT], / - * ) -> immutabledict[_KT, _VT]: - * return immutabledict( # <<<<<<<<<<<<<< - * dict.__or__(self, __value), # type: ignore[call-overload] - * ) - */ - __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 199, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":196 - * _immutable_fn(self) - * - * def __or__( # type: ignore[override] # <<<<<<<<<<<<<< - * self, __value: Mapping[_KT, _VT], / - * ) -> immutabledict[_KT, _VT]: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__or__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sqlalchemy/util/_immutabledict_cy.py":203 - * ) - * - * def __ror__( # type: ignore[override] # <<<<<<<<<<<<<< - * self, __value: Mapping[_KT, _VT], / - * ) -> immutabledict[_KT, _VT]: - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_33__ror__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value); /*proto*/ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_33__ror__(PyObject *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__ror__ (wrapper)", 0); - __pyx_kwvalues = __Pyx_KwValues_VARARGS(__pyx_args, __pyx_nargs); - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_32__ror__(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *)__pyx_v_self), ((PyObject *)__pyx_v__immutabledict__value)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_32__ror__(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict *__pyx_v_self, PyObject *__pyx_v__immutabledict__value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - unsigned int __pyx_t_4; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__ror__", 1); - - /* "sqlalchemy/util/_immutabledict_cy.py":206 - * self, __value: Mapping[_KT, _VT], / - * ) -> immutabledict[_KT, _VT]: - * return immutabledict( # <<<<<<<<<<<<<< - * dict.__ror__(self, __value), # type: ignore[call-overload] - * ) - */ - __Pyx_XDECREF(__pyx_r); - - /* "sqlalchemy/util/_immutabledict_cy.py":207 - * ) -> immutabledict[_KT, _VT]: - * return immutabledict( - * dict.__ror__(self, __value), # type: ignore[call-overload] # <<<<<<<<<<<<<< - * ) - */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyDict_Type)), __pyx_n_s_ror); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 207, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = NULL; - __pyx_t_4 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_2))) { - __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); - if (likely(__pyx_t_3)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); - __Pyx_INCREF(__pyx_t_3); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_2, function); - __pyx_t_4 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[3] = {__pyx_t_3, ((PyObject *)__pyx_v_self), __pyx_v__immutabledict__value}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_2, __pyx_callargs+1-__pyx_t_4, 2+__pyx_t_4); - __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 207, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - - /* "sqlalchemy/util/_immutabledict_cy.py":206 - * self, __value: Mapping[_KT, _VT], / - * ) -> immutabledict[_KT, _VT]: - * return immutabledict( # <<<<<<<<<<<<<< - * dict.__ror__(self, __value), # type: ignore[call-overload] - * ) - */ - __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 206, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - /* "sqlalchemy/util/_immutabledict_cy.py":203 - * ) - * - * def __ror__( # type: ignore[override] # <<<<<<<<<<<<<< - * self, __value: Mapping[_KT, _VT], / - * ) -> immutabledict[_KT, _VT]: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.immutabledict.__ror__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __pyx_unpickle_ImmutableDictBase(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -); /*proto*/ -static PyMethodDef __pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase = {"__pyx_unpickle_ImmutableDictBase", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}; -static PyObject *__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase(PyObject *__pyx_self, -#if CYTHON_METH_FASTCALL -PyObject *const *__pyx_args, Py_ssize_t __pyx_nargs, PyObject *__pyx_kwds -#else -PyObject *__pyx_args, PyObject *__pyx_kwds -#endif -) { - PyObject *__pyx_v___pyx_type = 0; - long __pyx_v___pyx_checksum; - PyObject *__pyx_v___pyx_state = 0; - #if !CYTHON_METH_FASTCALL - CYTHON_UNUSED Py_ssize_t __pyx_nargs; - #endif - CYTHON_UNUSED PyObject *const *__pyx_kwvalues; - PyObject* values[3] = {0,0,0}; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__pyx_unpickle_ImmutableDictBase (wrapper)", 0); - #if !CYTHON_METH_FASTCALL - #if CYTHON_ASSUME_SAFE_MACROS - __pyx_nargs = PyTuple_GET_SIZE(__pyx_args); - #else - __pyx_nargs = PyTuple_Size(__pyx_args); if (unlikely(__pyx_nargs < 0)) return NULL; - #endif - #endif - __pyx_kwvalues = __Pyx_KwValues_FASTCALL(__pyx_args, __pyx_nargs); - { - PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; - if (__pyx_kwds) { - Py_ssize_t kw_args; - switch (__pyx_nargs) { - case 3: values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = __Pyx_NumKwargs_FASTCALL(__pyx_kwds); - switch (__pyx_nargs) { - case 0: - if (likely((values[0] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_type)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[0]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_checksum)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[1]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ImmutableDictBase", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_GetKwValue_FASTCALL(__pyx_kwds, __pyx_kwvalues, __pyx_n_s_pyx_state)) != 0)) { - (void)__Pyx_Arg_NewRef_FASTCALL(values[2]); - kw_args--; - } - else if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - else { - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ImmutableDictBase", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - const Py_ssize_t kwd_pos_args = __pyx_nargs; - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_kwvalues, __pyx_pyargnames, 0, values + 0, kwd_pos_args, "__pyx_unpickle_ImmutableDictBase") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) - } - } else if (unlikely(__pyx_nargs != 3)) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = __Pyx_Arg_FASTCALL(__pyx_args, 0); - values[1] = __Pyx_Arg_FASTCALL(__pyx_args, 1); - values[2] = __Pyx_Arg_FASTCALL(__pyx_args, 2); - } - __pyx_v___pyx_type = values[0]; - __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_v___pyx_state = values[2]; - } - goto __pyx_L6_skip; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ImmutableDictBase", 1, 3, 3, __pyx_nargs); __PYX_ERR(1, 1, __pyx_L3_error) - __pyx_L6_skip:; - goto __pyx_L4_argument_unpacking_done; - __pyx_L3_error:; - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.__pyx_unpickle_ImmutableDictBase", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_4__pyx_unpickle_ImmutableDictBase(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); - - /* function exit code */ - { - Py_ssize_t __pyx_temp; - for (__pyx_temp=0; __pyx_temp < (Py_ssize_t)(sizeof(values)/sizeof(values[0])); ++__pyx_temp) { - __Pyx_Arg_XDECREF_FASTCALL(values[__pyx_temp]); - } - } - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_10sqlalchemy_4util_17_immutabledict_cy_4__pyx_unpickle_ImmutableDictBase(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_v___pyx_PickleError = 0; - PyObject *__pyx_v___pyx_result = 0; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - unsigned int __pyx_t_5; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_ImmutableDictBase", 1); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - */ - __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__2, Py_NE)); if (unlikely((__pyx_t_2 < 0))) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "(tree fragment)":5 - * cdef object __pyx_result - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): - * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - * __pyx_result = ImmutableDictBase.__new__(__pyx_type) - */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_PickleError); - __Pyx_GIVEREF(__pyx_n_s_PickleError); - if (__Pyx_PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError)) __PYX_ERR(1, 5, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_t_1); - __pyx_v___pyx_PickleError = __pyx_t_1; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "(tree fragment)":6 - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum # <<<<<<<<<<<<<< - * __pyx_result = ImmutableDictBase.__new__(__pyx_type) - * if __pyx_state is not None: - */ - __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_v___pyx_PickleError, __pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(1, 6, __pyx_L1_error) - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - */ - } - - /* "(tree fragment)":7 - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - * __pyx_result = ImmutableDictBase.__new__(__pyx_type) # <<<<<<<<<<<<<< - * if __pyx_state is not None: - * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase), __pyx_n_s_new); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = NULL; - __pyx_t_5 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_4, __pyx_v___pyx_type}; - __pyx_t_1 = __Pyx_PyObject_FastCall(__pyx_t_3, __pyx_callargs+1-__pyx_t_5, 1+__pyx_t_5); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - } - __pyx_v___pyx_result = __pyx_t_1; - __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - * __pyx_result = ImmutableDictBase.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - __pyx_t_2 = (__pyx_v___pyx_state != Py_None); - if (__pyx_t_2) { - - /* "(tree fragment)":9 - * __pyx_result = ImmutableDictBase.__new__(__pyx_type) - * if __pyx_state is not None: - * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< - * return __pyx_result - * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): - */ - if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None) || __Pyx_RaiseUnexpectedTypeError("tuple", __pyx_v___pyx_state))) __PYX_ERR(1, 9, __pyx_L1_error) - __pyx_t_1 = __pyx_f_10sqlalchemy_4util_17_immutabledict_cy___pyx_unpickle_ImmutableDictBase__set_state(((struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "(tree fragment)":8 - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - * __pyx_result = ImmutableDictBase.__new__(__pyx_type) - * if __pyx_state is not None: # <<<<<<<<<<<<<< - * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) - * return __pyx_result - */ - } - - /* "(tree fragment)":10 - * if __pyx_state is not None: - * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) - * return __pyx_result # <<<<<<<<<<<<<< - * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v___pyx_result); - __pyx_r = __pyx_v___pyx_result; - goto __pyx_L0; - - /* "(tree fragment)":1 - * def __pyx_unpickle_ImmutableDictBase(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.__pyx_unpickle_ImmutableDictBase", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XDECREF(__pyx_v___pyx_PickleError); - __Pyx_XDECREF(__pyx_v___pyx_result); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":11 - * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[0]) - */ - -static PyObject *__pyx_f_10sqlalchemy_4util_17_immutabledict_cy___pyx_unpickle_ImmutableDictBase__set_state(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - Py_ssize_t __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - unsigned int __pyx_t_8; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_unpickle_ImmutableDictBase__set_state", 1); - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[0]) - */ - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); - __PYX_ERR(1, 12, __pyx_L1_error) - } - __pyx_t_2 = __Pyx_PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(1, 12, __pyx_L1_error) - __pyx_t_3 = (__pyx_t_2 > 0); - if (__pyx_t_3) { - } else { - __pyx_t_1 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __pyx_t_3 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(1, 12, __pyx_L1_error) - __pyx_t_1 = __pyx_t_3; - __pyx_L4_bool_binop_done:; - if (__pyx_t_1) { - - /* "(tree fragment)":13 - * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[0]) # <<<<<<<<<<<<<< - */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(__pyx_v___pyx_state == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); - __PYX_ERR(1, 13, __pyx_L1_error) - } - __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_7 = NULL; - __pyx_t_8 = 0; - #if CYTHON_UNPACK_METHODS - if (likely(PyMethod_Check(__pyx_t_6))) { - __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); - if (likely(__pyx_t_7)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); - __Pyx_INCREF(__pyx_t_7); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_6, function); - __pyx_t_8 = 1; - } - } - #endif - { - PyObject *__pyx_callargs[2] = {__pyx_t_7, __pyx_t_5}; - __pyx_t_4 = __Pyx_PyObject_FastCall(__pyx_t_6, __pyx_callargs+1-__pyx_t_8, 1+__pyx_t_8); - __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "(tree fragment)":12 - * return __pyx_result - * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< - * __pyx_result.__dict__.update(__pyx_state[0]) - */ - } - - /* "(tree fragment)":11 - * __pyx_unpickle_ImmutableDictBase__set_state( __pyx_result, __pyx_state) - * return __pyx_result - * cdef __pyx_unpickle_ImmutableDictBase__set_state(ImmutableDictBase __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< - * if len(__pyx_state) > 0 and hasattr(__pyx_result, '__dict__'): - * __pyx_result.__dict__.update(__pyx_state[0]) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_AddTraceback("sqlalchemy.util._immutabledict_cy.__pyx_unpickle_ImmutableDictBase__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static int __pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase(PyObject *o, visitproc v, void *a) { - int e; - if (!(&PyDict_Type)->tp_traverse); else { e = (&PyDict_Type)->tp_traverse(o,v,a); if (e) return e; } - return 0; -} - -static int __pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase(PyObject *o) { - if (!(&PyDict_Type)->tp_clear); else (&PyDict_Type)->tp_clear(o); - return 0; -} - -static int __pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase(PyObject *o, PyObject *i, PyObject *v) { - if (v) { - return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_5__setitem__(o, i, v); - } - else { - return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_3__delitem__(o, i); - } -} - -static int __pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase(PyObject *o, PyObject *n, PyObject *v) { - if (v) { - return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_7__setattr__(o, n, v); - } - else { - if ((&PyDict_Type)->tp_setattro) - return (&PyDict_Type)->tp_setattro(o, n, v); - return PyObject_GenericSetAttr(o, n, 0); - } -} - -static PyMethodDef __pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase[] = { - {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"popitem", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"setdefault", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update, METH_VARARGS|METH_KEYWORDS, 0}, - {"__reduce_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__setstate_cython__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {0, 0, 0, 0} -}; -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_slots[] = { - {Py_mp_ass_subscript, (void *)__pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, - {Py_tp_setattro, (void *)__pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, - {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, - {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, - {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase}, - {0, 0}, -}; -static PyType_Spec __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_spec = { - "sqlalchemy.util._immutabledict_cy.ImmutableDictBase", - sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase), - 0, - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, - __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_slots, -}; -#else - -static PyMappingMethods __pyx_tp_as_mapping_ImmutableDictBase = { - 0, /*mp_length*/ - 0, /*mp_subscript*/ - __pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*mp_ass_subscript*/ -}; - -static PyTypeObject __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase = { - PyVarObject_HEAD_INIT(0, 0) - "sqlalchemy.util._immutabledict_cy.""ImmutableDictBase", /*tp_name*/ - sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - 0, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_ImmutableDictBase, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - __pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*tp_traverse*/ - __pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - #if !CYTHON_USE_TYPE_SPECS - 0, /*tp_dictoffset*/ - #endif - 0, /*tp_init*/ - 0, /*tp_alloc*/ - 0, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - #if CYTHON_USE_TP_FINALIZE - 0, /*tp_finalize*/ - #else - NULL, /*tp_finalize*/ - #endif - #endif - #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, /*tp_vectorcall*/ - #endif - #if __PYX_NEED_TP_PRINT_SLOT == 1 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030C0000 - 0, /*tp_watched*/ - #endif - #if PY_VERSION_HEX >= 0x030d00A4 - 0, /*tp_versions_used*/ - #endif - #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, /*tp_pypy_flags*/ - #endif -}; -#endif - -static int __pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *o, visitproc v, void *a) { - int e; - if (!(&PyDict_Type)->tp_traverse); else { e = (&PyDict_Type)->tp_traverse(o,v,a); if (e) return e; } - return 0; -} - -static int __pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *o) { - if (!(&PyDict_Type)->tp_clear); else (&PyDict_Type)->tp_clear(o); - return 0; -} - -static int __pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *o, PyObject *i, PyObject *v) { - if (v) { - return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_5__setitem__(o, i, v); - } - else { - return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_3__delitem__(o, i); - } -} - -static int __pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *o, PyObject *n, PyObject *v) { - if (v) { - return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_7__setattr__(o, n, v); - } - else { - if ((&PyDict_Type)->tp_setattro) - return (&PyDict_Type)->tp_setattro(o, n, v); - return PyObject_GenericSetAttr(o, n, 0); - } -} - -static CYTHON_INLINE PyObject *__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_maybe_call_slot(PyTypeObject* type, PyObject *left, PyObject *right ) { - binaryfunc slot; -#if CYTHON_USE_TYPE_SLOTS || PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY - slot = type->tp_as_number ? type->tp_as_number->nb_or : NULL; -#else - slot = (binaryfunc) PyType_GetSlot(type, Py_nb_or); -#endif - return slot ? slot(left, right ) : __Pyx_NewRef(Py_NotImplemented); -} -static PyObject *__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict(PyObject *left, PyObject *right ) { - int maybe_self_is_left, maybe_self_is_right = 0; - maybe_self_is_left = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(left)->tp_as_number && Py_TYPE(left)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) -#endif - || __Pyx_TypeCheck(left, __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - if (maybe_self_is_left) { - PyObject *res; - res = __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_31__or__(left, right); - if (res != Py_NotImplemented) return res; - Py_DECREF(res); - } - maybe_self_is_right = Py_TYPE(left) == Py_TYPE(right) -#if CYTHON_USE_TYPE_SLOTS - || (Py_TYPE(right)->tp_as_number && Py_TYPE(right)->tp_as_number->nb_or == &__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) -#endif - || PyType_IsSubtype(Py_TYPE(right), __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - if (maybe_self_is_right) { - return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_33__ror__(right, left); - } - return __Pyx_NewRef(Py_NotImplemented); -} - - - -static PyObject *__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__(PyObject *self, CYTHON_UNUSED PyObject *arg) { - return __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__(self); -} - -static PyMethodDef __pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_immutabledict[] = { - {"__class_getitem__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"clear", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"pop", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"popitem", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"setdefault", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"update", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update, METH_VARARGS|METH_KEYWORDS, 0}, - {"__repr__", (PyCFunction)__pyx_specialmethod___pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__, METH_NOARGS|METH_COEXIST, 0}, - {"union", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"merge_with", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with, METH_VARARGS|METH_KEYWORDS, 0}, - {"copy", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__reduce__", (PyCFunction)(void*)(__Pyx_PyCFunction_FastCallWithKeywords)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__, __Pyx_METH_FASTCALL|METH_KEYWORDS, 0}, - {"__ror__", (PyCFunction)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_33__ror__, METH_O|METH_COEXIST, 0}, - {0, 0, 0, 0} -}; -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_slots[] = { - {Py_tp_repr, (void *)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__}, - {Py_nb_or, (void *)__pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, - {Py_nb_inplace_or, (void *)__pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_29__ior__}, - {Py_mp_ass_subscript, (void *)__pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, - {Py_tp_setattro, (void *)__pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, - {Py_tp_doc, (void *)PyDoc_STR("An immutable version of a dict.")}, - {Py_tp_traverse, (void *)__pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, - {Py_tp_clear, (void *)__pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, - {Py_tp_methods, (void *)__pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_immutabledict}, - {0, 0}, -}; -static PyType_Spec __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_spec = { - "sqlalchemy.util._immutabledict_cy.immutabledict", - sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict), - 0, - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, - __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_slots, -}; -#else - -static PyNumberMethods __pyx_tp_as_number_immutabledict = { - 0, /*nb_add*/ - 0, /*nb_subtract*/ - 0, /*nb_multiply*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_divide*/ - #endif - 0, /*nb_remainder*/ - 0, /*nb_divmod*/ - 0, /*nb_power*/ - 0, /*nb_negative*/ - 0, /*nb_positive*/ - 0, /*nb_absolute*/ - 0, /*nb_bool*/ - 0, /*nb_invert*/ - 0, /*nb_lshift*/ - 0, /*nb_rshift*/ - 0, /*nb_and*/ - 0, /*nb_xor*/ - __pyx_nb_or_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*nb_or*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_coerce*/ - #endif - 0, /*nb_int*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_long*/ - #else - 0, /*reserved*/ - #endif - 0, /*nb_float*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_oct*/ - #endif - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_hex*/ - #endif - 0, /*nb_inplace_add*/ - 0, /*nb_inplace_subtract*/ - 0, /*nb_inplace_multiply*/ - #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) - 0, /*nb_inplace_divide*/ - #endif - 0, /*nb_inplace_remainder*/ - 0, /*nb_inplace_power*/ - 0, /*nb_inplace_lshift*/ - 0, /*nb_inplace_rshift*/ - 0, /*nb_inplace_and*/ - 0, /*nb_inplace_xor*/ - __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_29__ior__, /*nb_inplace_or*/ - 0, /*nb_floor_divide*/ - 0, /*nb_true_divide*/ - 0, /*nb_inplace_floor_divide*/ - 0, /*nb_inplace_true_divide*/ - 0, /*nb_index*/ - #if PY_VERSION_HEX >= 0x03050000 - 0, /*nb_matrix_multiply*/ - #endif - #if PY_VERSION_HEX >= 0x03050000 - 0, /*nb_inplace_matrix_multiply*/ - #endif -}; - -static PyMappingMethods __pyx_tp_as_mapping_immutabledict = { - 0, /*mp_length*/ - 0, /*mp_subscript*/ - __pyx_mp_ass_subscript_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*mp_ass_subscript*/ -}; - -static PyTypeObject __pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict = { - PyVarObject_HEAD_INIT(0, 0) - "sqlalchemy.util._immutabledict_cy.""immutabledict", /*tp_name*/ - sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - 0, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - __pyx_pw_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_19__repr__, /*tp_repr*/ - &__pyx_tp_as_number_immutabledict, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_immutabledict, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - __pyx_tp_setattro_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/ - PyDoc_STR("An immutable version of a dict."), /*tp_doc*/ - __pyx_tp_traverse_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*tp_traverse*/ - __pyx_tp_clear_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - #if !CYTHON_USE_TYPE_SPECS - 0, /*tp_dictoffset*/ - #endif - 0, /*tp_init*/ - 0, /*tp_alloc*/ - 0, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - #if CYTHON_USE_TP_FINALIZE - 0, /*tp_finalize*/ - #else - NULL, /*tp_finalize*/ - #endif - #endif - #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, /*tp_vectorcall*/ - #endif - #if __PYX_NEED_TP_PRINT_SLOT == 1 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030C0000 - 0, /*tp_watched*/ - #endif - #if PY_VERSION_HEX >= 0x030d00A4 - 0, /*tp_versions_used*/ - #endif - #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, /*tp_pypy_flags*/ - #endif -}; -#endif - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif -/* #### Code section: pystring_table ### */ - -static int __Pyx_CreateStringTabAndInitStrings(void) { - __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, - {&__pyx_n_s_Any, __pyx_k_Any, sizeof(__pyx_k_Any), 0, 0, 1, 1}, - {&__pyx_n_s_Dict, __pyx_k_Dict, sizeof(__pyx_k_Dict), 0, 0, 1, 1}, - {&__pyx_n_s_Hashable, __pyx_k_Hashable, sizeof(__pyx_k_Hashable), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase, __pyx_k_ImmutableDictBase, sizeof(__pyx_k_ImmutableDictBase), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase___class_getite, __pyx_k_ImmutableDictBase___class_getite, sizeof(__pyx_k_ImmutableDictBase___class_getite), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase___reduce_cytho, __pyx_k_ImmutableDictBase___reduce_cytho, sizeof(__pyx_k_ImmutableDictBase___reduce_cytho), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase___setstate_cyt, __pyx_k_ImmutableDictBase___setstate_cyt, sizeof(__pyx_k_ImmutableDictBase___setstate_cyt), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase_clear, __pyx_k_ImmutableDictBase_clear, sizeof(__pyx_k_ImmutableDictBase_clear), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase_pop, __pyx_k_ImmutableDictBase_pop, sizeof(__pyx_k_ImmutableDictBase_pop), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase_popitem, __pyx_k_ImmutableDictBase_popitem, sizeof(__pyx_k_ImmutableDictBase_popitem), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase_setdefault, __pyx_k_ImmutableDictBase_setdefault, sizeof(__pyx_k_ImmutableDictBase_setdefault), 0, 0, 1, 1}, - {&__pyx_n_s_ImmutableDictBase_update, __pyx_k_ImmutableDictBase_update, sizeof(__pyx_k_ImmutableDictBase_update), 0, 0, 1, 1}, - {&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0}, - {&__pyx_n_s_KT, __pyx_k_KT, sizeof(__pyx_k_KT), 0, 0, 1, 1}, - {&__pyx_n_u_KT, __pyx_k_KT, sizeof(__pyx_k_KT), 0, 1, 0, 1}, - {&__pyx_n_s_Mapping, __pyx_k_Mapping, sizeof(__pyx_k_Mapping), 0, 0, 1, 1}, - {&__pyx_n_s_NoReturn, __pyx_k_NoReturn, sizeof(__pyx_k_NoReturn), 0, 0, 1, 1}, - {&__pyx_n_s_Optional, __pyx_k_Optional, sizeof(__pyx_k_Optional), 0, 0, 1, 1}, - {&__pyx_kp_s_Optional_Any, __pyx_k_Optional_Any, sizeof(__pyx_k_Optional_Any), 0, 0, 1, 0}, - {&__pyx_kp_s_Optional_Mapping__KT__VT, __pyx_k_Optional_Mapping__KT__VT, sizeof(__pyx_k_Optional_Mapping__KT__VT), 0, 0, 1, 0}, - {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_ReadOnlyContainer, __pyx_k_ReadOnlyContainer, sizeof(__pyx_k_ReadOnlyContainer), 0, 0, 1, 1}, - {&__pyx_n_s_ReadOnlyContainer___delitem, __pyx_k_ReadOnlyContainer___delitem, sizeof(__pyx_k_ReadOnlyContainer___delitem), 0, 0, 1, 1}, - {&__pyx_n_s_ReadOnlyContainer___setattr, __pyx_k_ReadOnlyContainer___setattr, sizeof(__pyx_k_ReadOnlyContainer___setattr), 0, 0, 1, 1}, - {&__pyx_n_s_ReadOnlyContainer___setitem, __pyx_k_ReadOnlyContainer___setitem, sizeof(__pyx_k_ReadOnlyContainer___setitem), 0, 0, 1, 1}, - {&__pyx_n_s_ReadOnlyContainer__readonly, __pyx_k_ReadOnlyContainer__readonly, sizeof(__pyx_k_ReadOnlyContainer__readonly), 0, 0, 1, 1}, - {&__pyx_n_s_Self, __pyx_k_Self, sizeof(__pyx_k_Self), 0, 0, 1, 1}, - {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, - {&__pyx_n_s_TypeVar, __pyx_k_TypeVar, sizeof(__pyx_k_TypeVar), 0, 0, 1, 1}, - {&__pyx_n_s_VT, __pyx_k_VT, sizeof(__pyx_k_VT), 0, 0, 1, 1}, - {&__pyx_n_u_VT, __pyx_k_VT, sizeof(__pyx_k_VT), 0, 1, 0, 1}, - {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0}, - {&__pyx_n_s__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 1, 1}, - {&__pyx_n_s_arg, __pyx_k_arg, sizeof(__pyx_k_arg), 0, 0, 1, 1}, - {&__pyx_n_s_asyncio_coroutines, __pyx_k_asyncio_coroutines, sizeof(__pyx_k_asyncio_coroutines), 0, 0, 1, 1}, - {&__pyx_n_s_bool, __pyx_k_bool, sizeof(__pyx_k_bool), 0, 0, 1, 1}, - {&__pyx_n_s_bound, __pyx_k_bound, sizeof(__pyx_k_bound), 0, 0, 1, 1}, - {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1}, - {&__pyx_n_s_class_getitem, __pyx_k_class_getitem, sizeof(__pyx_k_class_getitem), 0, 0, 1, 1}, - {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_cls, __pyx_k_cls, sizeof(__pyx_k_cls), 0, 0, 1, 1}, - {&__pyx_n_s_copy, __pyx_k_copy, sizeof(__pyx_k_copy), 0, 0, 1, 1}, - {&__pyx_n_s_d, __pyx_k_d, sizeof(__pyx_k_d), 0, 0, 1, 1}, - {&__pyx_n_s_default, __pyx_k_default, sizeof(__pyx_k_default), 0, 0, 1, 1}, - {&__pyx_n_s_delitem, __pyx_k_delitem, sizeof(__pyx_k_delitem), 0, 0, 1, 1}, - {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, - {&__pyx_n_s_dict_2, __pyx_k_dict_2, sizeof(__pyx_k_dict_2), 0, 0, 1, 1}, - {&__pyx_n_s_dicts, __pyx_k_dicts, sizeof(__pyx_k_dicts), 0, 0, 1, 1}, - {&__pyx_kp_u_disable, __pyx_k_disable, sizeof(__pyx_k_disable), 0, 1, 0, 0}, - {&__pyx_n_s_doc, __pyx_k_doc, sizeof(__pyx_k_doc), 0, 0, 1, 1}, - {&__pyx_kp_u_enable, __pyx_k_enable, sizeof(__pyx_k_enable), 0, 1, 0, 0}, - {&__pyx_kp_u_gc, __pyx_k_gc, sizeof(__pyx_k_gc), 0, 1, 0, 0}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_immutable_fn, __pyx_k_immutable_fn, sizeof(__pyx_k_immutable_fn), 0, 0, 1, 1}, - {&__pyx_kp_u_immutabledict, __pyx_k_immutabledict, sizeof(__pyx_k_immutabledict), 0, 1, 0, 0}, - {&__pyx_n_s_immutabledict_2, __pyx_k_immutabledict_2, sizeof(__pyx_k_immutabledict_2), 0, 0, 1, 1}, - {&__pyx_kp_s_immutabledict__KT__VT, __pyx_k_immutabledict__KT__VT, sizeof(__pyx_k_immutabledict__KT__VT), 0, 0, 1, 0}, - {&__pyx_n_s_immutabledict___class_getitem, __pyx_k_immutabledict___class_getitem, sizeof(__pyx_k_immutabledict___class_getitem), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict___reduce, __pyx_k_immutabledict___reduce, sizeof(__pyx_k_immutabledict___reduce), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict_clear, __pyx_k_immutabledict_clear, sizeof(__pyx_k_immutabledict_clear), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict_copy, __pyx_k_immutabledict_copy, sizeof(__pyx_k_immutabledict_copy), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict_merge_with, __pyx_k_immutabledict_merge_with, sizeof(__pyx_k_immutabledict_merge_with), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict_pop, __pyx_k_immutabledict_pop, sizeof(__pyx_k_immutabledict_pop), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict_popitem, __pyx_k_immutabledict_popitem, sizeof(__pyx_k_immutabledict_popitem), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict_setdefault, __pyx_k_immutabledict_setdefault, sizeof(__pyx_k_immutabledict_setdefault), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict_union, __pyx_k_immutabledict_union, sizeof(__pyx_k_immutabledict_union), 0, 0, 1, 1}, - {&__pyx_n_s_immutabledict_update, __pyx_k_immutabledict_update, sizeof(__pyx_k_immutabledict_update), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_init_subclass, __pyx_k_init_subclass, sizeof(__pyx_k_init_subclass), 0, 0, 1, 1}, - {&__pyx_n_s_is_compiled, __pyx_k_is_compiled, sizeof(__pyx_k_is_compiled), 0, 0, 1, 1}, - {&__pyx_n_s_is_coroutine, __pyx_k_is_coroutine, sizeof(__pyx_k_is_coroutine), 0, 0, 1, 1}, - {&__pyx_kp_u_isenabled, __pyx_k_isenabled, sizeof(__pyx_k_isenabled), 0, 1, 0, 0}, - {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, - {&__pyx_n_s_kw, __pyx_k_kw, sizeof(__pyx_k_kw), 0, 0, 1, 1}, - {&__pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_k_lib_sqlalchemy_util__immutabledi, sizeof(__pyx_k_lib_sqlalchemy_util__immutabledi), 0, 0, 1, 0}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_merge_with, __pyx_k_merge_with, sizeof(__pyx_k_merge_with), 0, 0, 1, 1}, - {&__pyx_n_s_metaclass, __pyx_k_metaclass, sizeof(__pyx_k_metaclass), 0, 0, 1, 1}, - {&__pyx_n_s_module, __pyx_k_module, sizeof(__pyx_k_module), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, - {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1}, - {&__pyx_n_s_object, __pyx_k_object, sizeof(__pyx_k_object), 0, 0, 1, 1}, - {&__pyx_kp_u_object_is_immutable, __pyx_k_object_is_immutable, sizeof(__pyx_k_object_is_immutable), 0, 1, 0, 0}, - {&__pyx_kp_u_object_is_immutable_and_or_read, __pyx_k_object_is_immutable_and_or_read, sizeof(__pyx_k_object_is_immutable_and_or_read), 0, 1, 0, 0}, - {&__pyx_n_s_or, __pyx_k_or, sizeof(__pyx_k_or), 0, 0, 1, 1}, - {&__pyx_n_s_other, __pyx_k_other, sizeof(__pyx_k_other), 0, 0, 1, 1}, - {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, - {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, - {&__pyx_n_s_popitem, __pyx_k_popitem, sizeof(__pyx_k_popitem), 0, 0, 1, 1}, - {&__pyx_n_s_prepare, __pyx_k_prepare, sizeof(__pyx_k_prepare), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_unpickle_ImmutableDictBase, __pyx_k_pyx_unpickle_ImmutableDictBase, sizeof(__pyx_k_pyx_unpickle_ImmutableDictBase), 0, 0, 1, 1}, - {&__pyx_n_s_qualname, __pyx_k_qualname, sizeof(__pyx_k_qualname), 0, 0, 1, 1}, - {&__pyx_n_s_readonly, __pyx_k_readonly, sizeof(__pyx_k_readonly), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_n_s_repr, __pyx_k_repr, sizeof(__pyx_k_repr), 0, 0, 1, 1}, - {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, - {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, - {&__pyx_n_s_ror, __pyx_k_ror, sizeof(__pyx_k_ror), 0, 0, 1, 1}, - {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, - {&__pyx_n_s_set_name, __pyx_k_set_name, sizeof(__pyx_k_set_name), 0, 0, 1, 1}, - {&__pyx_n_s_setattr, __pyx_k_setattr, sizeof(__pyx_k_setattr), 0, 0, 1, 1}, - {&__pyx_n_s_setdefault, __pyx_k_setdefault, sizeof(__pyx_k_setdefault), 0, 0, 1, 1}, - {&__pyx_n_s_setitem, __pyx_k_setitem, sizeof(__pyx_k_setitem), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_slots, __pyx_k_slots, sizeof(__pyx_k_slots), 0, 0, 1, 1}, - {&__pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_k_sqlalchemy_util__immutabledict_c, sizeof(__pyx_k_sqlalchemy_util__immutabledict_c), 0, 0, 1, 1}, - {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, - {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, - {&__pyx_n_s_super, __pyx_k_super, sizeof(__pyx_k_super), 0, 0, 1, 1}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_kp_s_type_Self, __pyx_k_type_Self, sizeof(__pyx_k_type_Self), 0, 0, 1, 0}, - {&__pyx_n_s_typing, __pyx_k_typing, sizeof(__pyx_k_typing), 0, 0, 1, 1}, - {&__pyx_n_s_union, __pyx_k_union, sizeof(__pyx_k_union), 0, 0, 1, 1}, - {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, - {&__pyx_n_s_use_setstate, __pyx_k_use_setstate, sizeof(__pyx_k_use_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} - }; - return __Pyx_InitStrings(__pyx_string_tab); -} -/* #### Code section: cached_builtins ### */ -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 45, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: cached_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "(tree fragment)":4 - * cdef object __pyx_PickleError - * cdef object __pyx_result - * if __pyx_checksum not in (0xe3b0c44, 0xda39a3e, 0xd41d8cd): # <<<<<<<<<<<<<< - * from pickle import PickleError as __pyx_PickleError - * raise __pyx_PickleError, "Incompatible checksums (0x%x vs (0xe3b0c44, 0xda39a3e, 0xd41d8cd) = ())" % __pyx_checksum - */ - __pyx_tuple__2 = PyTuple_Pack(3, __pyx_int_238750788, __pyx_int_228825662, __pyx_int_222419149); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__2); - __Pyx_GIVEREF(__pyx_tuple__2); - - /* "sqlalchemy/util/_immutabledict_cy.py":31 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_codeobj__5 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_is_compiled, 31, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__5)) __PYX_ERR(0, 31, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":44 - * - * - * def _immutable_fn(obj: object) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError(f"{obj.__class__.__name__} object is immutable") - * - */ - __pyx_tuple__6 = PyTuple_Pack(1, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__6); - __Pyx_GIVEREF(__pyx_tuple__6); - __pyx_codeobj__7 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__6, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_immutable_fn, 44, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__7)) __PYX_ERR(0, 44, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":51 - * __slots__ = () - * - * def _readonly(self) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError( - * f"{self.__class__.__name__} object is immutable and/or readonly" - */ - __pyx_tuple__8 = PyTuple_Pack(1, __pyx_n_s_self); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 51, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__8); - __Pyx_GIVEREF(__pyx_tuple__8); - __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_readonly, 51, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 51, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":56 - * ) - * - * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - __pyx_tuple__10 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_key); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__10); - __Pyx_GIVEREF(__pyx_tuple__10); - __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__10, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_delitem, 56, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 56, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":59 - * self._readonly() - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - __pyx_tuple__12 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_key, __pyx_n_s_value); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__12); - __Pyx_GIVEREF(__pyx_tuple__12); - __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_setitem, 59, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 59, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":62 - * self._readonly() - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_setattr, 62, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 62, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":66 - * - * - * _KT = TypeVar("_KT", bound=Hashable) # <<<<<<<<<<<<<< - * _VT = TypeVar("_VT", bound=Any) - * - */ - __pyx_tuple__15 = PyTuple_Pack(1, __pyx_n_u_KT); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); - - /* "sqlalchemy/util/_immutabledict_cy.py":67 - * - * _KT = TypeVar("_KT", bound=Hashable) - * _VT = TypeVar("_VT", bound=Any) # <<<<<<<<<<<<<< - * - * - */ - __pyx_tuple__16 = PyTuple_Pack(1, __pyx_n_u_VT); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__16); - __Pyx_GIVEREF(__pyx_tuple__16); - - /* "sqlalchemy/util/_immutabledict_cy.py":74 - * # NOTE: this method is required in 3.9 and speeds up the use case - * # ImmutableDictBase[str,int](a_dict) significantly - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__( # type: ignore[override] - * cls, key: Any - */ - __pyx_tuple__17 = PyTuple_Pack(2, __pyx_n_s_cls, __pyx_n_s_key); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__17); - __Pyx_GIVEREF(__pyx_tuple__17); - __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_class_getitem, 74, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 74, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":89 - * _immutable_fn(self) - * - * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_clear, 89, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 89, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":92 - * _immutable_fn(self) - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_tuple__20 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_key, __pyx_n_s_default); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__20); - __Pyx_GIVEREF(__pyx_tuple__20); - __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_pop, 92, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(0, 92, __pyx_L1_error) - __pyx_tuple__22 = PyTuple_Pack(1, Py_None); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__22); - __Pyx_GIVEREF(__pyx_tuple__22); - - /* "sqlalchemy/util/_immutabledict_cy.py":95 - * _immutable_fn(self) - * - * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_codeobj__23 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_popitem, 95, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__23)) __PYX_ERR(0, 95, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":98 - * _immutable_fn(self) - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_codeobj__24 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_setdefault, 98, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__24)) __PYX_ERR(0, 98, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":101 - * _immutable_fn(self) - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_tuple__25 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_arg, __pyx_n_s_kw); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__25); - __Pyx_GIVEREF(__pyx_tuple__25); - __pyx_codeobj__26 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_update, 101, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__26)) __PYX_ERR(0, 101, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_tuple__27 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_state, __pyx_n_s_dict_2, __pyx_n_s_use_setstate); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__27); - __Pyx_GIVEREF(__pyx_tuple__27); - __pyx_codeobj__28 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__27, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_reduce_cython, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__28)) __PYX_ERR(1, 1, __pyx_L1_error) - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) - */ - __pyx_tuple__29 = PyTuple_Pack(2, __pyx_n_s_self, __pyx_n_s_pyx_state); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__29); - __Pyx_GIVEREF(__pyx_tuple__29); - __pyx_codeobj__30 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__29, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_setstate_cython, 16, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__30)) __PYX_ERR(1, 16, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":113 - * - * # ImmutableDictBase start - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__( # type: ignore[override] - * cls, key: Any - */ - __pyx_codeobj__31 = (PyObject*)__Pyx_PyCode_New(2, 0, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_class_getitem, 113, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__31)) __PYX_ERR(0, 113, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":128 - * _immutable_fn(self) - * - * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_codeobj__32 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_clear, 128, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__32)) __PYX_ERR(0, 128, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":131 - * _immutable_fn(self) - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_pop, 131, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(0, 131, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":134 - * _immutable_fn(self) - * - * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_codeobj__34 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_popitem, 134, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__34)) __PYX_ERR(0, 134, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":137 - * _immutable_fn(self) - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_setdefault, 137, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(0, 137, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":140 - * _immutable_fn(self) - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_codeobj__36 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_update, 140, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__36)) __PYX_ERR(0, 140, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":148 - * return f"immutabledict({dict.__repr__(self)})" - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def union( - * self, other: Optional[Mapping[_KT, _VT]] = None, / - */ - __pyx_tuple__37 = PyTuple_Pack(3, __pyx_n_s_self, __pyx_n_s_other, __pyx_n_s_result); if (unlikely(!__pyx_tuple__37)) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__37); - __Pyx_GIVEREF(__pyx_tuple__37); - __pyx_codeobj__38 = (PyObject*)__Pyx_PyCode_New(2, 2, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__37, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_union, 148, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__38)) __PYX_ERR(0, 148, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":164 - * return result - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def merge_with( - * self, *dicts: Optional[Mapping[_KT, _VT]] - */ - __pyx_tuple__39 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_dicts, __pyx_n_s_result, __pyx_n_s_d); if (unlikely(!__pyx_tuple__39)) __PYX_ERR(0, 164, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__39); - __Pyx_GIVEREF(__pyx_tuple__39); - __pyx_codeobj__40 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS|CO_VARARGS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__39, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_merge_with, 164, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__40)) __PYX_ERR(0, 164, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":186 - * return self if result is None else result - * - * def copy(self) -> Self: # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_codeobj__41 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_copy, 186, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__41)) __PYX_ERR(0, 186, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":189 - * return self - * - * def __reduce__(self) -> Any: # <<<<<<<<<<<<<< - * return immutabledict, (dict(self),) - * - */ - __pyx_codeobj__42 = (PyObject*)__Pyx_PyCode_New(1, 0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_lib_sqlalchemy_util__immutabledi, __pyx_n_s_reduce, 189, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__42)) __PYX_ERR(0, 189, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __pyx_unpickle_ImmutableDictBase(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_tuple__43 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__43)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__43); - __Pyx_GIVEREF(__pyx_tuple__43); - __pyx_codeobj__44 = (PyObject*)__Pyx_PyCode_New(3, 0, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__43, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_ImmutableDictBase, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__44)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} -/* #### Code section: init_constants ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitConstants(void) { - if (__Pyx_CreateStringTabAndInitStrings() < 0) __PYX_ERR(0, 1, __pyx_L1_error); - __pyx_int_222419149 = PyInt_FromLong(222419149L); if (unlikely(!__pyx_int_222419149)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_228825662 = PyInt_FromLong(228825662L); if (unlikely(!__pyx_int_228825662)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_int_238750788 = PyInt_FromLong(238750788L); if (unlikely(!__pyx_int_238750788)) __PYX_ERR(0, 1, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} -/* #### Code section: init_globals ### */ - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - return 0; -} -/* #### Code section: init_module ### */ - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - #if CYTHON_USE_TYPE_SPECS - __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PyDict_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_spec, __pyx_t_1); - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase)) __PYX_ERR(0, 71, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase_spec, __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) < 0) __PYX_ERR(0, 71, __pyx_L1_error) - #else - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase = &__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase; - #endif - if (sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) != sizeof(PyDictObject)) { - if (__Pyx_validate_extern_base((&PyDict_Type)) < 0) __PYX_ERR(0, 71, __pyx_L1_error) - } - #if !CYTHON_COMPILING_IN_LIMITED_API - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_dealloc = (&PyDict_Type)->tp_dealloc; - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_base = (&PyDict_Type); - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_new = (&PyDict_Type)->tp_new; - #endif - #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) < 0) __PYX_ERR(0, 71, __pyx_L1_error) - #endif - #if PY_MAJOR_VERSION < 3 - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_print = 0; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_dictoffset && __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_getattro == PyObject_GenericGetAttr)) { - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase->tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_ImmutableDictBase, (PyObject *) __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) < 0) __PYX_ERR(0, 71, __pyx_L1_error) - #if !CYTHON_COMPILING_IN_LIMITED_API - if (__Pyx_setup_reduce((PyObject *) __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase) < 0) __PYX_ERR(0, 71, __pyx_L1_error) - #endif - #if CYTHON_USE_TYPE_SPECS - __pyx_t_1 = PyTuple_Pack(1, (PyObject *)(&PyDict_Type)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 109, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict = (PyTypeObject *) __Pyx_PyType_FromModuleAndSpec(__pyx_m, &__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_spec, __pyx_t_1); - __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; - if (unlikely(!__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict)) __PYX_ERR(0, 109, __pyx_L1_error) - if (__Pyx_fix_up_extension_type_from_spec(&__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict_spec, __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) < 0) __PYX_ERR(0, 109, __pyx_L1_error) - #else - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict = &__pyx_type_10sqlalchemy_4util_17_immutabledict_cy_immutabledict; - #endif - if (sizeof(struct __pyx_obj_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) != sizeof(PyDictObject)) { - if (__Pyx_validate_extern_base((&PyDict_Type)) < 0) __PYX_ERR(0, 109, __pyx_L1_error) - } - #if !CYTHON_COMPILING_IN_LIMITED_API - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_dealloc = (&PyDict_Type)->tp_dealloc; - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_base = (&PyDict_Type); - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_new = (&PyDict_Type)->tp_new; - #endif - #if !CYTHON_USE_TYPE_SPECS - if (__Pyx_PyType_Ready(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) < 0) __PYX_ERR(0, 109, __pyx_L1_error) - #endif - #if PY_MAJOR_VERSION < 3 - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_print = 0; - #endif - #if !CYTHON_COMPILING_IN_LIMITED_API - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_dictoffset && __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_getattro == PyObject_GenericGetAttr)) { - __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict->tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - #endif - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_immutabledict_2, (PyObject *) __pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict) < 0) __PYX_ERR(0, 109, __pyx_L1_error) - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_ptype_7cpython_4type_type = __Pyx_ImportType_3_0_11(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", - #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 - sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_11(PyTypeObject), - #elif CYTHON_COMPILING_IN_LIMITED_API - sizeof(PyTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_11(PyTypeObject), - #else - sizeof(PyHeapTypeObject), __PYX_GET_STRUCT_ALIGNMENT_3_0_11(PyHeapTypeObject), - #endif - __Pyx_ImportType_CheckSize_Warn_3_0_11); if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec__immutabledict_cy(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec__immutabledict_cy}, - {0, NULL} -}; -#endif - -#ifdef __cplusplus -namespace { - struct PyModuleDef __pyx_moduledef = - #else - static struct PyModuleDef __pyx_moduledef = - #endif - { - PyModuleDef_HEAD_INIT, - "_immutabledict_cy", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #elif CYTHON_USE_MODULE_STATE - sizeof(__pyx_mstate), /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - #if CYTHON_USE_MODULE_STATE - __pyx_m_traverse, /* m_traverse */ - __pyx_m_clear, /* m_clear */ - NULL /* m_free */ - #else - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ - #endif - }; - #ifdef __cplusplus -} /* anonymous namespace */ -#endif -#endif - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC init_immutabledict_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC init_immutabledict_cy(void) -#else -__Pyx_PyMODINIT_FUNC PyInit__immutabledict_cy(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit__immutabledict_cy(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *module, const char* from_name, const char* to_name, int allow_none) -#else -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) -#endif -{ - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { -#if CYTHON_COMPILING_IN_LIMITED_API - result = PyModule_AddObject(module, to_name, value); -#else - result = PyDict_SetItemString(moddict, to_name, value); -#endif - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - CYTHON_UNUSED_VAR(def); - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; -#if CYTHON_COMPILING_IN_LIMITED_API - moddict = module; -#else - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; -#endif - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec__immutabledict_cy(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - int stringtab_initialized = 0; - #if CYTHON_USE_MODULE_STATE - int pystate_addmodule_run = 0; - #endif - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module '_immutabledict_cy' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("_immutabledict_cy", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #elif CYTHON_USE_MODULE_STATE - __pyx_t_1 = PyModule_Create(&__pyx_moduledef); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) - { - int add_module_result = PyState_AddModule(__pyx_t_1, &__pyx_moduledef); - __pyx_t_1 = 0; /* transfer ownership from __pyx_t_1 to "_immutabledict_cy" pseudovariable */ - if (unlikely((add_module_result < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - pystate_addmodule_run = 1; - } - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #endif - CYTHON_UNUSED_VAR(__pyx_t_1); - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = __Pyx_PyImport_AddModuleRef(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_cython_runtime = __Pyx_PyImport_AddModuleRef((const char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__immutabledict_cy(void)", 0); - if (__Pyx_check_binary_version(__PYX_LIMITED_VERSION_HEX, __Pyx_get_runtime_version(), CYTHON_COMPILING_IN_LIMITED_API) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init(__pyx_m) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - PyEval_InitThreads(); - #endif - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - stringtab_initialized = 1; - if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_sqlalchemy__util___immutabledict_cy) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "sqlalchemy.util._immutabledict_cy")) { - if (unlikely((PyDict_SetItemString(modules, "sqlalchemy.util._immutabledict_cy", __pyx_m) < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely((__Pyx_modinit_type_init_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - if (unlikely((__Pyx_modinit_type_import_code() < 0))) __PYX_ERR(0, 1, __pyx_L1_error) - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) - #endif - - /* "sqlalchemy/util/_immutabledict_cy.py":10 - * from __future__ import annotations - * - * from typing import Any # <<<<<<<<<<<<<< - * from typing import Dict - * from typing import Hashable - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Any); - __Pyx_GIVEREF(__pyx_n_s_Any); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Any)) __PYX_ERR(0, 10, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Any); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Any, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":11 - * - * from typing import Any - * from typing import Dict # <<<<<<<<<<<<<< - * from typing import Hashable - * from typing import Mapping - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Dict); - __Pyx_GIVEREF(__pyx_n_s_Dict); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Dict)) __PYX_ERR(0, 11, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Dict); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Dict, __pyx_t_3) < 0) __PYX_ERR(0, 11, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":12 - * from typing import Any - * from typing import Dict - * from typing import Hashable # <<<<<<<<<<<<<< - * from typing import Mapping - * from typing import NoReturn - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_Hashable); - __Pyx_GIVEREF(__pyx_n_s_Hashable); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Hashable)) __PYX_ERR(0, 12, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_Hashable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Hashable, __pyx_t_2) < 0) __PYX_ERR(0, 12, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":13 - * from typing import Dict - * from typing import Hashable - * from typing import Mapping # <<<<<<<<<<<<<< - * from typing import NoReturn - * from typing import Optional - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Mapping); - __Pyx_GIVEREF(__pyx_n_s_Mapping); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Mapping)) __PYX_ERR(0, 13, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Mapping, __pyx_t_3) < 0) __PYX_ERR(0, 13, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":14 - * from typing import Hashable - * from typing import Mapping - * from typing import NoReturn # <<<<<<<<<<<<<< - * from typing import Optional - * from typing import TypeVar - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_NoReturn); - __Pyx_GIVEREF(__pyx_n_s_NoReturn); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_NoReturn)) __PYX_ERR(0, 14, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_NoReturn); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_NoReturn, __pyx_t_2) < 0) __PYX_ERR(0, 14, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":15 - * from typing import Mapping - * from typing import NoReturn - * from typing import Optional # <<<<<<<<<<<<<< - * from typing import TypeVar - * - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Optional); - __Pyx_GIVEREF(__pyx_n_s_Optional); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Optional)) __PYX_ERR(0, 15, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Optional); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Optional, __pyx_t_3) < 0) __PYX_ERR(0, 15, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":16 - * from typing import NoReturn - * from typing import Optional - * from typing import TypeVar # <<<<<<<<<<<<<< - * - * from .typing import Self - */ - __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_INCREF(__pyx_n_s_TypeVar); - __Pyx_GIVEREF(__pyx_n_s_TypeVar); - if (__Pyx_PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_TypeVar)) __PYX_ERR(0, 16, __pyx_L1_error); - __pyx_t_3 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_TypeVar, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":18 - * from typing import TypeVar - * - * from .typing import Self # <<<<<<<<<<<<<< - * - * # START GENERATED CYTHON IMPORT - */ - __pyx_t_3 = PyList_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_INCREF(__pyx_n_s_Self); - __Pyx_GIVEREF(__pyx_n_s_Self); - if (__Pyx_PyList_SET_ITEM(__pyx_t_3, 0, __pyx_n_s_Self)) __PYX_ERR(0, 18, __pyx_L1_error); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_typing, __pyx_t_3, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_Self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_Self, __pyx_t_3) < 0) __PYX_ERR(0, 18, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":22 - * # START GENERATED CYTHON IMPORT - * # This section is automatically generated by the script tools/cython_imports.py - * try: # <<<<<<<<<<<<<< - * # NOTE: the cython compiler needs this "import cython" in the file, it - * # can't be only "from sqlalchemy.util import cython" with the fallback - */ - { - (void)__pyx_t_1; (void)__pyx_t_4; (void)__pyx_t_5; /* mark used */ - /*try:*/ { - - /* "sqlalchemy/util/_immutabledict_cy.py":26 - * # can't be only "from sqlalchemy.util import cython" with the fallback - * # in that module - * import cython # <<<<<<<<<<<<<< - * except ModuleNotFoundError: - * from sqlalchemy.util import cython - */ - } - } - - /* "sqlalchemy/util/_immutabledict_cy.py":31 - * - * - * def _is_compiled() -> bool: # <<<<<<<<<<<<<< - * """Utility function to indicate if this module is compiled or not.""" - * return cython.compiled # type: ignore[no-any-return] - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 31, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_bool) < 0) __PYX_ERR(0, 31, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_1_is_compiled, 0, __pyx_n_s_is_compiled, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__5)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 31, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_compiled, __pyx_t_3) < 0) __PYX_ERR(0, 31, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":44 - * - * - * def _immutable_fn(obj: object) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError(f"{obj.__class__.__name__} object is immutable") - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_obj, __pyx_n_s_object) < 0) __PYX_ERR(0, 44, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 44, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_3_immutable_fn, 0, __pyx_n_s_immutable_fn, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__7)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_immutable_fn, __pyx_t_2) < 0) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":48 - * - * - * class ReadOnlyContainer: # <<<<<<<<<<<<<< - * __slots__ = () - * - */ - __pyx_t_2 = __Pyx_Py3MetaclassPrepare((PyObject *) NULL, __pyx_empty_tuple, __pyx_n_s_ReadOnlyContainer, __pyx_n_s_ReadOnlyContainer, (PyObject *) NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, (PyObject *) NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - - /* "sqlalchemy/util/_immutabledict_cy.py":49 - * - * class ReadOnlyContainer: - * __slots__ = () # <<<<<<<<<<<<<< - * - * def _readonly(self) -> NoReturn: - */ - if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_slots, __pyx_empty_tuple) < 0) __PYX_ERR(0, 49, __pyx_L1_error) - - /* "sqlalchemy/util/_immutabledict_cy.py":51 - * __slots__ = () - * - * def _readonly(self) -> NoReturn: # <<<<<<<<<<<<<< - * raise TypeError( - * f"{self.__class__.__name__} object is immutable and/or readonly" - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 51, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 51, __pyx_L1_error) - __pyx_t_6 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_1_readonly, 0, __pyx_n_s_ReadOnlyContainer__readonly, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__9)); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 51, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_6, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_readonly, __pyx_t_6) < 0) __PYX_ERR(0, 51, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":56 - * ) - * - * def __delitem__(self, key: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - __pyx_t_6 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 56, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 56, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_3__delitem__, 0, __pyx_n_s_ReadOnlyContainer___delitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__11)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_6); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_delitem, __pyx_t_3) < 0) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":59 - * self._readonly() - * - * def __setitem__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 59, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 59, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 59, __pyx_L1_error) - __pyx_t_6 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_5__setitem__, 0, __pyx_n_s_ReadOnlyContainer___setitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__13)); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_6, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_setitem, __pyx_t_6) < 0) __PYX_ERR(0, 59, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":62 - * self._readonly() - * - * def __setattr__(self, key: Any, value: Any) -> NoReturn: # <<<<<<<<<<<<<< - * self._readonly() - * - */ - __pyx_t_6 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 62, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_value, __pyx_n_s_Any) < 0) __PYX_ERR(0, 62, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 62, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ReadOnlyContainer_7__setattr__, 0, __pyx_n_s_ReadOnlyContainer___setattr, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__14)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_6); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (__Pyx_SetNameInClass(__pyx_t_2, __pyx_n_s_setattr, __pyx_t_3) < 0) __PYX_ERR(0, 62, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":48 - * - * - * class ReadOnlyContainer: # <<<<<<<<<<<<<< - * __slots__ = () - * - */ - __pyx_t_3 = __Pyx_Py3ClassCreate(((PyObject*)&PyType_Type), __pyx_n_s_ReadOnlyContainer, __pyx_empty_tuple, __pyx_t_2, NULL, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_ReadOnlyContainer, __pyx_t_3) < 0) __PYX_ERR(0, 48, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":66 - * - * - * _KT = TypeVar("_KT", bound=Hashable) # <<<<<<<<<<<<<< - * _VT = TypeVar("_VT", bound=Any) - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_Hashable); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_bound, __pyx_t_6) < 0) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__15, __pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_KT, __pyx_t_6) < 0) __PYX_ERR(0, 66, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":67 - * - * _KT = TypeVar("_KT", bound=Hashable) - * _VT = TypeVar("_VT", bound=Any) # <<<<<<<<<<<<<< - * - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_TypeVar); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_Any); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_bound, __pyx_t_2) < 0) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_tuple__16, __pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_VT, __pyx_t_2) < 0) __PYX_ERR(0, 67, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":74 - * # NOTE: this method is required in 3.9 and speeds up the use case - * # ImmutableDictBase[str,int](a_dict) significantly - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__( # type: ignore[override] - * cls, key: Any - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 74, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_type_Self) < 0) __PYX_ERR(0, 74, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_1__class_getitem__, __Pyx_CYFUNCTION_CLASSMETHOD | __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase___class_getite, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__18)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_class_getitem, __pyx_t_3) < 0) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - __Pyx_GetNameInClass(__pyx_t_3, (PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_class_getitem); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_Method_ClassMethod(__pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_class_getitem, __pyx_t_2) < 0) __PYX_ERR(0, 74, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - - /* "sqlalchemy/util/_immutabledict_cy.py":89 - * _immutable_fn(self) - * - * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 89, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_9clear, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_clear, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__19)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_clear, __pyx_t_3) < 0) __PYX_ERR(0, 89, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - - /* "sqlalchemy/util/_immutabledict_cy.py":92 - * _immutable_fn(self) - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 92, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_default, __pyx_kp_s_Optional_Any) < 0) __PYX_ERR(0, 92, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 92, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_11pop, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_pop, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__21)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_2, __pyx_tuple__22); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_pop, __pyx_t_2) < 0) __PYX_ERR(0, 92, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - - /* "sqlalchemy/util/_immutabledict_cy.py":95 - * _immutable_fn(self) - * - * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 95, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 95, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_13popitem, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_popitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__23)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 95, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_popitem, __pyx_t_3) < 0) __PYX_ERR(0, 95, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - - /* "sqlalchemy/util/_immutabledict_cy.py":98 - * _immutable_fn(self) - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 98, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 98, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_default, __pyx_kp_s_Optional_Any) < 0) __PYX_ERR(0, 98, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 98, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_15setdefault, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_setdefault, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__24)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 98, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_2, __pyx_tuple__22); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_setdefault, __pyx_t_2) < 0) __PYX_ERR(0, 98, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - - /* "sqlalchemy/util/_immutabledict_cy.py":101 - * _immutable_fn(self) - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_arg, __pyx_n_s_Any) < 0) __PYX_ERR(0, 101, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_kw, __pyx_n_s_Any) < 0) __PYX_ERR(0, 101, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 101, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_17update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase_update, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__26)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_update, __pyx_t_3) < 0) __PYX_ERR(0, 101, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * cdef tuple state - * cdef object _dict - */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_19__reduce_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase___reduce_cytho, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__28)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_reduce_cython, __pyx_t_3) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - - /* "(tree fragment)":16 - * else: - * return __pyx_unpickle_ImmutableDictBase, (type(self), 0xe3b0c44, state) - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * __pyx_unpickle_ImmutableDictBase__set_state(self, __pyx_state) - */ - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_17ImmutableDictBase_21__setstate_cython__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_ImmutableDictBase___setstate_cyt, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__30)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase, __pyx_n_s_setstate_cython, __pyx_t_3) < 0) __PYX_ERR(1, 16, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_ImmutableDictBase); - - /* "sqlalchemy/util/_immutabledict_cy.py":113 - * - * # ImmutableDictBase start - * @classmethod # <<<<<<<<<<<<<< - * def __class_getitem__( # type: ignore[override] - * cls, key: Any - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 113, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_type_Self) < 0) __PYX_ERR(0, 113, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_1__class_getitem__, __Pyx_CYFUNCTION_CLASSMETHOD | __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict___class_getitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__31)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_class_getitem, __pyx_t_2) < 0) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - __Pyx_GetNameInClass(__pyx_t_2, (PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_class_getitem); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_Method_ClassMethod(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_class_getitem, __pyx_t_3) < 0) __PYX_ERR(0, 113, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":128 - * _immutable_fn(self) - * - * def clear(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 128, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 128, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_9clear, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_clear, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__32)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 128, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_clear, __pyx_t_2) < 0) __PYX_ERR(0, 128, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":131 - * _immutable_fn(self) - * - * def pop(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 131, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_default, __pyx_kp_s_Optional_Any) < 0) __PYX_ERR(0, 131, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 131, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_11pop, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_pop, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__33)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 131, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_3, __pyx_tuple__22); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_pop, __pyx_t_3) < 0) __PYX_ERR(0, 131, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":134 - * _immutable_fn(self) - * - * def popitem(self) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 134, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 134, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_13popitem, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_popitem, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__34)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 134, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_popitem, __pyx_t_2) < 0) __PYX_ERR(0, 134, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":137 - * _immutable_fn(self) - * - * def setdefault(self, key: Any, default: Optional[Any] = None) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 137, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_key, __pyx_n_s_Any) < 0) __PYX_ERR(0, 137, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_default, __pyx_kp_s_Optional_Any) < 0) __PYX_ERR(0, 137, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 137, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_15setdefault, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_setdefault, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__35)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 137, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_3, __pyx_tuple__22); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_setdefault, __pyx_t_3) < 0) __PYX_ERR(0, 137, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":140 - * _immutable_fn(self) - * - * def update(self, *arg: Any, **kw: Any) -> NoReturn: # <<<<<<<<<<<<<< - * _immutable_fn(self) - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 140, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_arg, __pyx_n_s_Any) < 0) __PYX_ERR(0, 140, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_kw, __pyx_n_s_Any) < 0) __PYX_ERR(0, 140, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_NoReturn) < 0) __PYX_ERR(0, 140, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_17update, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_update, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__36)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 140, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_update, __pyx_t_2) < 0) __PYX_ERR(0, 140, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":148 - * return f"immutabledict({dict.__repr__(self)})" - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def union( - * self, other: Optional[Mapping[_KT, _VT]] = None, / - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_other, __pyx_kp_s_Optional_Mapping__KT__VT) < 0) __PYX_ERR(0, 148, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_kp_s_immutabledict__KT__VT) < 0) __PYX_ERR(0, 148, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_21union, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_union, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__38)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetDefaultsTuple(__pyx_t_3, __pyx_tuple__22); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_union, __pyx_t_3) < 0) __PYX_ERR(0, 148, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":164 - * return result - * - * @cython.annotation_typing(False) # avoid cython crash from generic return # <<<<<<<<<<<<<< - * def merge_with( - * self, *dicts: Optional[Mapping[_KT, _VT]] - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 164, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_dicts, __pyx_kp_s_Optional_Mapping__KT__VT) < 0) __PYX_ERR(0, 164, __pyx_L1_error) - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_kp_s_immutabledict__KT__VT) < 0) __PYX_ERR(0, 164, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_23merge_with, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_merge_with, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__40)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 164, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_merge_with, __pyx_t_2) < 0) __PYX_ERR(0, 164, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":186 - * return self if result is None else result - * - * def copy(self) -> Self: # <<<<<<<<<<<<<< - * return self - * - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 186, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_return, __pyx_n_s_Self) < 0) __PYX_ERR(0, 186, __pyx_L1_error) - __pyx_t_3 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_25copy, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict_copy, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__41)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 186, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_3, __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_copy, __pyx_t_3) < 0) __PYX_ERR(0, 186, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "sqlalchemy/util/_immutabledict_cy.py":189 - * return self - * - * def __reduce__(self) -> Any: # <<<<<<<<<<<<<< - * return immutabledict, (dict(self),) - * - */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 189, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_return, __pyx_n_s_Any) < 0) __PYX_ERR(0, 189, __pyx_L1_error) - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_13immutabledict_27__reduce__, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_immutabledict___reduce, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__42)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 189, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_CyFunction_SetAnnotationsDict(__pyx_t_2, __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__Pyx_SetItemOnTypeDict((PyObject *)__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict, __pyx_n_s_reduce, __pyx_t_2) < 0) __PYX_ERR(0, 189, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - PyType_Modified(__pyx_ptype_10sqlalchemy_4util_17_immutabledict_cy_immutabledict); - - /* "(tree fragment)":1 - * def __pyx_unpickle_ImmutableDictBase(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< - * cdef object __pyx_PickleError - * cdef object __pyx_result - */ - __pyx_t_2 = __Pyx_CyFunction_New(&__pyx_mdef_10sqlalchemy_4util_17_immutabledict_cy_5__pyx_unpickle_ImmutableDictBase, 0, __pyx_n_s_pyx_unpickle_ImmutableDictBase, NULL, __pyx_n_s_sqlalchemy_util__immutabledict_c, __pyx_d, ((PyObject *)__pyx_codeobj__44)); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_ImmutableDictBase, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sqlalchemy/util/_immutabledict_cy.py":1 - * # util/_immutabledict_cy.py # <<<<<<<<<<<<<< - * # Copyright (C) 2010-2025 the SQLAlchemy authors and contributors - * # - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_6); - if (__pyx_m) { - if (__pyx_d && stringtab_initialized) { - __Pyx_AddTraceback("init sqlalchemy.util._immutabledict_cy", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - #if !CYTHON_USE_MODULE_STATE - Py_CLEAR(__pyx_m); - #else - Py_DECREF(__pyx_m); - if (pystate_addmodule_run) { - PyObject *tp, *value, *tb; - PyErr_Fetch(&tp, &value, &tb); - PyState_RemoveModule(&__pyx_moduledef); - PyErr_Restore(tp, value, tb); - } - #endif - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init sqlalchemy.util._immutabledict_cy"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} -/* #### Code section: cleanup_globals ### */ -/* #### Code section: cleanup_module ### */ -/* #### Code section: main_method ### */ -/* #### Code section: utility_code_pragmas ### */ -#ifdef _MSC_VER -#pragma warning( push ) -/* Warning 4127: conditional expression is constant - * Cython uses constant conditional expressions to allow in inline functions to be optimized at - * compile-time, so this warning is not useful - */ -#pragma warning( disable : 4127 ) -#endif - - - -/* #### Code section: utility_code_def ### */ - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030C00A6 - PyObject *current_exception = tstate->current_exception; - if (unlikely(!current_exception)) return 0; - exc_type = (PyObject*) Py_TYPE(current_exception); - if (exc_type == err) return 1; -#else - exc_type = tstate->curexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; -#endif - #if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(exc_type); - #endif - if (unlikely(PyTuple_Check(err))) { - result = __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - } else { - result = __Pyx_PyErr_GivenExceptionMatches(exc_type, err); - } - #if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(exc_type); - #endif - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject *tmp_value; - assert(type == NULL || (value != NULL && type == (PyObject*) Py_TYPE(value))); - if (value) { - #if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(((PyBaseExceptionObject*) value)->traceback != tb)) - #endif - PyException_SetTraceback(value, tb); - } - tmp_value = tstate->current_exception; - tstate->current_exception = value; - Py_XDECREF(tmp_value); - Py_XDECREF(type); - Py_XDECREF(tb); -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -#endif -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { -#if PY_VERSION_HEX >= 0x030C00A6 - PyObject* exc_value; - exc_value = tstate->current_exception; - tstate->current_exception = 0; - *value = exc_value; - *type = NULL; - *tb = NULL; - if (exc_value) { - *type = (PyObject*) Py_TYPE(exc_value); - Py_INCREF(*type); - #if CYTHON_COMPILING_IN_CPYTHON - *tb = ((PyBaseExceptionObject*) exc_value)->traceback; - Py_XINCREF(*tb); - #else - *tb = PyException_GetTraceback(exc_value); - #endif - } -#else - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -#endif -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* PyObjectGetAttrStrNoError */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - (void) PyObject_GetOptionalAttr(obj, attr_name, &result); - return result; -#else -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -#endif -} - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStrNoError(__pyx_b, name); - if (unlikely(!result) && !PyErr_Occurred()) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* TupleAndListFromArray */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE void __Pyx_copy_object_array(PyObject *const *CYTHON_RESTRICT src, PyObject** CYTHON_RESTRICT dest, Py_ssize_t length) { - PyObject *v; - Py_ssize_t i; - for (i = 0; i < length; i++) { - v = dest[i] = src[i]; - Py_INCREF(v); - } -} -static CYTHON_INLINE PyObject * -__Pyx_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - Py_INCREF(__pyx_empty_tuple); - return __pyx_empty_tuple; - } - res = PyTuple_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyTupleObject*)res)->ob_item, n); - return res; -} -static CYTHON_INLINE PyObject * -__Pyx_PyList_FromArray(PyObject *const *src, Py_ssize_t n) -{ - PyObject *res; - if (n <= 0) { - return PyList_New(0); - } - res = PyList_New(n); - if (unlikely(res == NULL)) return NULL; - __Pyx_copy_object_array(src, ((PyListObject*)res)->ob_item, n); - return res; -} -#endif - -/* BytesEquals */ -static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else - if (s1 == s2) { - return (equals == Py_EQ); - } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { - const char *ps1, *ps2; - Py_ssize_t length = PyBytes_GET_SIZE(s1); - if (length != PyBytes_GET_SIZE(s2)) - return (equals == Py_NE); - ps1 = PyBytes_AS_STRING(s1); - ps2 = PyBytes_AS_STRING(s2); - if (ps1[0] != ps2[0]) { - return (equals == Py_NE); - } else if (length == 1) { - return (equals == Py_EQ); - } else { - int result; -#if CYTHON_USE_UNICODE_INTERNALS && (PY_VERSION_HEX < 0x030B0000) - Py_hash_t hash1, hash2; - hash1 = ((PyBytesObject*)s1)->ob_shash; - hash2 = ((PyBytesObject*)s2)->ob_shash; - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - return (equals == Py_NE); - } -#endif - result = memcmp(ps1, ps2, (size_t)length); - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { - return (equals == Py_NE); - } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { - return (equals == Py_NE); - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -#endif -} - -/* UnicodeEquals */ -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { -#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API - return PyObject_RichCompareBool(s1, s2, equals); -#else -#if PY_MAJOR_VERSION < 3 - PyObject* owned_ref = NULL; -#endif - int s1_is_unicode, s2_is_unicode; - if (s1 == s2) { - goto return_eq; - } - s1_is_unicode = PyUnicode_CheckExact(s1); - s2_is_unicode = PyUnicode_CheckExact(s2); -#if PY_MAJOR_VERSION < 3 - if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { - owned_ref = PyUnicode_FromObject(s2); - if (unlikely(!owned_ref)) - return -1; - s2 = owned_ref; - s2_is_unicode = 1; - } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { - owned_ref = PyUnicode_FromObject(s1); - if (unlikely(!owned_ref)) - return -1; - s1 = owned_ref; - s1_is_unicode = 1; - } else if (((!s2_is_unicode) & (!s1_is_unicode))) { - return __Pyx_PyBytes_Equals(s1, s2, equals); - } -#endif - if (s1_is_unicode & s2_is_unicode) { - Py_ssize_t length; - int kind; - void *data1, *data2; - if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) - return -1; - length = __Pyx_PyUnicode_GET_LENGTH(s1); - if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { - goto return_ne; - } -#if CYTHON_USE_UNICODE_INTERNALS - { - Py_hash_t hash1, hash2; - #if CYTHON_PEP393_ENABLED - hash1 = ((PyASCIIObject*)s1)->hash; - hash2 = ((PyASCIIObject*)s2)->hash; - #else - hash1 = ((PyUnicodeObject*)s1)->hash; - hash2 = ((PyUnicodeObject*)s2)->hash; - #endif - if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { - goto return_ne; - } - } -#endif - kind = __Pyx_PyUnicode_KIND(s1); - if (kind != __Pyx_PyUnicode_KIND(s2)) { - goto return_ne; - } - data1 = __Pyx_PyUnicode_DATA(s1); - data2 = __Pyx_PyUnicode_DATA(s2); - if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { - goto return_ne; - } else if (length == 1) { - goto return_eq; - } else { - int result = memcmp(data1, data2, (size_t)(length * kind)); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & s2_is_unicode) { - goto return_ne; - } else if ((s2 == Py_None) & s1_is_unicode) { - goto return_ne; - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -return_eq: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_EQ); -return_ne: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(owned_ref); - #endif - return (equals == Py_NE); -#endif -} - -/* fastcall */ -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s) -{ - Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames); - for (i = 0; i < n; i++) - { - if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i]; - } - for (i = 0; i < n; i++) - { - int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ); - if (unlikely(eq != 0)) { - if (unlikely(eq < 0)) return NULL; - return kwvalues[i]; - } - } - return NULL; -} -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000 -CYTHON_UNUSED static PyObject *__Pyx_KwargsAsDict_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues) { - Py_ssize_t i, nkwargs = PyTuple_GET_SIZE(kwnames); - PyObject *dict; - dict = PyDict_New(); - if (unlikely(!dict)) - return NULL; - for (i=0; i= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject *const *kwvalues, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds)); - while (1) { - Py_XDECREF(key); key = NULL; - Py_XDECREF(value); value = NULL; - if (kwds_is_tuple) { - Py_ssize_t size; -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(kwds); -#else - size = PyTuple_Size(kwds); - if (size < 0) goto bad; -#endif - if (pos >= size) break; -#if CYTHON_AVOID_BORROWED_REFS - key = __Pyx_PySequence_ITEM(kwds, pos); - if (!key) goto bad; -#elif CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kwds, pos); -#else - key = PyTuple_GetItem(kwds, pos); - if (!key) goto bad; -#endif - value = kwvalues[pos]; - pos++; - } - else - { - if (!PyDict_Next(kwds, &pos, &key, &value)) break; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - } - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - Py_INCREF(value); - Py_DECREF(key); -#endif - key = NULL; - value = NULL; - continue; - } -#if !CYTHON_AVOID_BORROWED_REFS - Py_INCREF(key); -#endif - Py_INCREF(value); - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = ( - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key) - ); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; -#if CYTHON_AVOID_BORROWED_REFS - value = NULL; -#endif - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - Py_XDECREF(key); - Py_XDECREF(value); - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - Py_XDECREF(key); - Py_XDECREF(value); - return -1; -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* UnicodeConcatInPlace */ -# if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 -static int -__Pyx_unicode_modifiable(PyObject *unicode) -{ - if (Py_REFCNT(unicode) != 1) - return 0; - if (!PyUnicode_CheckExact(unicode)) - return 0; - if (PyUnicode_CHECK_INTERNED(unicode)) - return 0; - return 1; -} -static CYTHON_INLINE PyObject *__Pyx_PyUnicode_ConcatInPlaceImpl(PyObject **p_left, PyObject *right - #if CYTHON_REFNANNY - , void* __pyx_refnanny - #endif - ) { - PyObject *left = *p_left; - Py_ssize_t left_len, right_len, new_len; - if (unlikely(__Pyx_PyUnicode_READY(left) == -1)) - return NULL; - if (unlikely(__Pyx_PyUnicode_READY(right) == -1)) - return NULL; - left_len = PyUnicode_GET_LENGTH(left); - if (left_len == 0) { - Py_INCREF(right); - return right; - } - right_len = PyUnicode_GET_LENGTH(right); - if (right_len == 0) { - Py_INCREF(left); - return left; - } - if (unlikely(left_len > PY_SSIZE_T_MAX - right_len)) { - PyErr_SetString(PyExc_OverflowError, - "strings are too large to concat"); - return NULL; - } - new_len = left_len + right_len; - if (__Pyx_unicode_modifiable(left) - && PyUnicode_CheckExact(right) - && PyUnicode_KIND(right) <= PyUnicode_KIND(left) - && !(PyUnicode_IS_ASCII(left) && !PyUnicode_IS_ASCII(right))) { - int ret; - __Pyx_GIVEREF(*p_left); - ret = PyUnicode_Resize(p_left, new_len); - __Pyx_GOTREF(*p_left); - if (unlikely(ret != 0)) - return NULL; - #if PY_VERSION_HEX >= 0x030d0000 - if (unlikely(PyUnicode_CopyCharacters(*p_left, left_len, right, 0, right_len) < 0)) return NULL; - #else - _PyUnicode_FastCopyCharacters(*p_left, left_len, right, 0, right_len); - #endif - __Pyx_INCREF(*p_left); - __Pyx_GIVEREF(*p_left); - return *p_left; - } else { - return __Pyx_PyUnicode_Concat(left, right); - } - } -#endif - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL && !CYTHON_VECTORCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) { - return NULL; - } - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) { - return NULL; - } - #endif - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = Py_TYPE(func)->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = __Pyx_CyOrPyCFunction_GET_FUNCTION(func); - self = __Pyx_CyOrPyCFunction_GET_SELF(func); - #if PY_MAJOR_VERSION < 3 - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - #else - if (unlikely(Py_EnterRecursiveCall(" while calling a Python object"))) - return NULL; - #endif - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectFastCall */ -#if PY_VERSION_HEX < 0x03090000 || CYTHON_COMPILING_IN_LIMITED_API -static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) { - PyObject *argstuple; - PyObject *result = 0; - size_t i; - argstuple = PyTuple_New((Py_ssize_t)nargs); - if (unlikely(!argstuple)) return NULL; - for (i = 0; i < nargs; i++) { - Py_INCREF(args[i]); - if (__Pyx_PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]) < 0) goto bad; - } - result = __Pyx_PyObject_Call(func, argstuple, kwargs); - bad: - Py_DECREF(argstuple); - return result; -} -#endif -static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) { - Py_ssize_t nargs = __Pyx_PyVectorcall_NARGS(_nargs); -#if CYTHON_COMPILING_IN_CPYTHON - if (nargs == 0 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_NOARGS)) - return __Pyx_PyObject_CallMethO(func, NULL); - } - else if (nargs == 1 && kwargs == NULL) { - if (__Pyx_CyOrPyCFunction_Check(func) && likely( __Pyx_CyOrPyCFunction_GET_FLAGS(func) & METH_O)) - return __Pyx_PyObject_CallMethO(func, args[0]); - } -#endif - #if PY_VERSION_HEX < 0x030800B1 - #if CYTHON_FAST_PYCCALL - if (PyCFunction_Check(func)) { - if (kwargs) { - return _PyCFunction_FastCallDict(func, args, nargs, kwargs); - } else { - return _PyCFunction_FastCallKeywords(func, args, nargs, NULL); - } - } - #if PY_VERSION_HEX >= 0x030700A1 - if (!kwargs && __Pyx_IS_TYPE(func, &PyMethodDescr_Type)) { - return _PyMethodDescr_FastCallKeywords(func, args, nargs, NULL); - } - #endif - #endif - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs); - } - #endif - #endif - if (kwargs == NULL) { - #if CYTHON_VECTORCALL - #if PY_VERSION_HEX < 0x03090000 - vectorcallfunc f = _PyVectorcall_Function(func); - #else - vectorcallfunc f = PyVectorcall_Function(func); - #endif - if (f) { - return f(func, args, (size_t)nargs, NULL); - } - #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL - if (__Pyx_CyFunction_CheckExact(func)) { - __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); - if (f) return f(func, args, (size_t)nargs, NULL); - } - #endif - } - if (nargs == 0) { - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, kwargs); - } - #if PY_VERSION_HEX >= 0x03090000 && !CYTHON_COMPILING_IN_LIMITED_API - return PyObject_VectorcallDict(func, args, (size_t)nargs, kwargs); - #else - return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs); - #endif -} - -/* PyObjectCallOneArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *args[2] = {NULL, arg}; - return __Pyx_PyObject_FastCall(func, args+1, 1 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - __Pyx_PyThreadState_declare - CYTHON_UNUSED_VAR(cause); - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { - #if PY_VERSION_HEX >= 0x030C00A6 - PyException_SetTraceback(value, tb); - #elif CYTHON_FAST_THREAD_STATE - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#else - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#elif CYTHON_COMPILING_IN_LIMITED_API - if (unlikely(!__pyx_m)) { - return NULL; - } - result = PyObject_GetAttr(__pyx_m, name); - if (likely(result)) { - return result; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* KeywordStringCheck */ -static int __Pyx_CheckKeywordStrings( - PyObject *kw, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; -#if CYTHON_COMPILING_IN_PYPY - if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0)) - goto invalid_keyword; - return 1; -#else - if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) { - Py_ssize_t kwsize; -#if CYTHON_ASSUME_SAFE_MACROS - kwsize = PyTuple_GET_SIZE(kw); -#else - kwsize = PyTuple_Size(kw); - if (kwsize < 0) return 0; -#endif - if (unlikely(kwsize == 0)) - return 1; - if (!kw_allowed) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, 0); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - goto invalid_keyword; - } -#if PY_VERSION_HEX < 0x03090000 - for (pos = 0; pos < kwsize; pos++) { -#if CYTHON_ASSUME_SAFE_MACROS - key = PyTuple_GET_ITEM(kw, pos); -#else - key = PyTuple_GetItem(kw, pos); - if (!key) return 0; -#endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } -#endif - return 1; - } - while (PyDict_Next(kw, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_Check(key))) - #endif - if (unlikely(!PyUnicode_Check(key))) - goto invalid_keyword_type; - } - if (!kw_allowed && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - return 0; -#endif -invalid_keyword: - #if PY_MAJOR_VERSION < 3 - PyErr_Format(PyExc_TypeError, - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - PyErr_Format(PyExc_TypeError, - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - -/* GetAttr3 */ -#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1 -static PyObject *__Pyx_GetAttr3Default(PyObject *d) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - return NULL; - __Pyx_PyErr_Clear(); - Py_INCREF(d); - return d; -} -#endif -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r; -#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1 - int res = PyObject_GetOptionalAttr(o, n, &r); - return (res != 0) ? r : __Pyx_NewRef(d); -#else - #if CYTHON_USE_TYPE_SLOTS - if (likely(PyString_Check(n))) { - r = __Pyx_PyObject_GetAttrStrNoError(o, n); - if (unlikely(!r) && likely(!PyErr_Occurred())) { - r = __Pyx_NewRef(d); - } - return r; - } - #endif - r = PyObject_GetAttr(o, n); - return (likely(r)) ? r : __Pyx_GetAttr3Default(d); -#endif -} - -/* RaiseUnexpectedTypeError */ -static int -__Pyx_RaiseUnexpectedTypeError(const char *expected, PyObject *obj) -{ - __Pyx_TypeName obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj)); - PyErr_Format(PyExc_TypeError, "Expected %s, got " __Pyx_FMT_TYPENAME, - expected, obj_type_name); - __Pyx_DECREF_TypeName(obj_type_name); - return 0; -} - -/* JoinPyUnicode */ -static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_count, Py_ssize_t result_ulength, - Py_UCS4 max_char) { -#if CYTHON_USE_UNICODE_INTERNALS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - PyObject *result_uval; - int result_ukind, kind_shift; - Py_ssize_t i, char_pos; - void *result_udata; - CYTHON_MAYBE_UNUSED_VAR(max_char); -#if CYTHON_PEP393_ENABLED - result_uval = PyUnicode_New(result_ulength, max_char); - if (unlikely(!result_uval)) return NULL; - result_ukind = (max_char <= 255) ? PyUnicode_1BYTE_KIND : (max_char <= 65535) ? PyUnicode_2BYTE_KIND : PyUnicode_4BYTE_KIND; - kind_shift = (result_ukind == PyUnicode_4BYTE_KIND) ? 2 : result_ukind - 1; - result_udata = PyUnicode_DATA(result_uval); -#else - result_uval = PyUnicode_FromUnicode(NULL, result_ulength); - if (unlikely(!result_uval)) return NULL; - result_ukind = sizeof(Py_UNICODE); - kind_shift = (result_ukind == 4) ? 2 : result_ukind - 1; - result_udata = PyUnicode_AS_UNICODE(result_uval); -#endif - assert(kind_shift == 2 || kind_shift == 1 || kind_shift == 0); - char_pos = 0; - for (i=0; i < value_count; i++) { - int ukind; - Py_ssize_t ulength; - void *udata; - PyObject *uval = PyTuple_GET_ITEM(value_tuple, i); - if (unlikely(__Pyx_PyUnicode_READY(uval))) - goto bad; - ulength = __Pyx_PyUnicode_GET_LENGTH(uval); - if (unlikely(!ulength)) - continue; - if (unlikely((PY_SSIZE_T_MAX >> kind_shift) - ulength < char_pos)) - goto overflow; - ukind = __Pyx_PyUnicode_KIND(uval); - udata = __Pyx_PyUnicode_DATA(uval); - if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) { - memcpy((char *)result_udata + (char_pos << kind_shift), udata, (size_t) (ulength << kind_shift)); - } else { - #if PY_VERSION_HEX >= 0x030d0000 - if (unlikely(PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength) < 0)) goto bad; - #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters) - _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength); - #else - Py_ssize_t j; - for (j=0; j < ulength; j++) { - Py_UCS4 uchar = __Pyx_PyUnicode_READ(ukind, udata, j); - __Pyx_PyUnicode_WRITE(result_ukind, result_udata, char_pos+j, uchar); - } - #endif - } - char_pos += ulength; - } - return result_uval; -overflow: - PyErr_SetString(PyExc_OverflowError, "join() result is too long for a Python string"); -bad: - Py_DECREF(result_uval); - return NULL; -#else - CYTHON_UNUSED_VAR(max_char); - CYTHON_UNUSED_VAR(result_ulength); - CYTHON_UNUSED_VAR(value_count); - return PyUnicode_Join(__pyx_empty_unicode, value_tuple); -#endif -} - -/* PyObjectCallNoArg */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { - PyObject *arg[2] = {NULL, NULL}; - return __Pyx_PyObject_FastCall(func, arg + 1, 0 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *module = 0; - PyObject *empty_dict = 0; - PyObject *empty_list = 0; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (unlikely(!py_import)) - goto bad; - if (!from_list) { - empty_list = PyList_New(0); - if (unlikely(!empty_list)) - goto bad; - from_list = empty_list; - } - #endif - empty_dict = PyDict_New(); - if (unlikely(!empty_dict)) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if (strchr(__Pyx_MODULE_NAME, '.') != NULL) { - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, 1); - if (unlikely(!module)) { - if (unlikely(!PyErr_ExceptionMatches(PyExc_ImportError))) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (unlikely(!py_level)) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, __pyx_d, empty_dict, from_list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, __pyx_d, empty_dict, from_list, level); - #endif - } - } -bad: - Py_XDECREF(empty_dict); - Py_XDECREF(empty_list); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - const char* module_name_str = 0; - PyObject* module_name = 0; - PyObject* module_dot = 0; - PyObject* full_name = 0; - PyErr_Clear(); - module_name_str = PyModule_GetName(module); - if (unlikely(!module_name_str)) { goto modbad; } - module_name = PyUnicode_FromString(module_name_str); - if (unlikely(!module_name)) { goto modbad; } - module_dot = PyUnicode_Concat(module_name, __pyx_kp_u__3); - if (unlikely(!module_dot)) { goto modbad; } - full_name = PyUnicode_Concat(module_dot, name); - if (unlikely(!full_name)) { goto modbad; } - #if PY_VERSION_HEX < 0x030700A1 || (CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM < 0x07030400) - { - PyObject *modules = PyImport_GetModuleDict(); - if (unlikely(!modules)) - goto modbad; - value = PyObject_GetItem(modules, full_name); - } - #else - value = PyImport_GetModule(full_name); - #endif - modbad: - Py_XDECREF(full_name); - Py_XDECREF(module_dot); - Py_XDECREF(module_name); - } - if (unlikely(!value)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* GetAttr */ -static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { -#if CYTHON_USE_TYPE_SLOTS -#if PY_MAJOR_VERSION >= 3 - if (likely(PyUnicode_Check(n))) -#else - if (likely(PyString_Check(n))) -#endif - return __Pyx_PyObject_GetAttrStr(o, n); -#endif - return PyObject_GetAttr(o, n); -} - -/* HasAttr */ -static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { - PyObject *r; - if (unlikely(!__Pyx_PyBaseString_Check(n))) { - PyErr_SetString(PyExc_TypeError, - "hasattr(): attribute name must be string"); - return -1; - } - r = __Pyx_GetAttr(o, n); - if (!r) { - PyErr_Clear(); - return 0; - } else { - Py_DECREF(r); - return 1; - } -} - -/* GetItemInt */ -static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (unlikely(!j)) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyList_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - Py_ssize_t wrapped_i = i; - if (wraparound & unlikely(i < 0)) { - wrapped_i += PyTuple_GET_SIZE(o); - } - if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); - Py_INCREF(r); - return r; - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -#else - return PySequence_GetItem(o, i); -#endif -} -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, - CYTHON_NCP_UNUSED int wraparound, - CYTHON_NCP_UNUSED int boundscheck) { -#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS - if (is_list || PyList_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); - if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) { - PyObject *r = PyList_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } - else if (PyTuple_CheckExact(o)) { - Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); - if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, n); - Py_INCREF(r); - return r; - } - } else { - PyMappingMethods *mm = Py_TYPE(o)->tp_as_mapping; - PySequenceMethods *sm = Py_TYPE(o)->tp_as_sequence; - if (mm && mm->mp_subscript) { - PyObject *r, *key = PyInt_FromSsize_t(i); - if (unlikely(!key)) return NULL; - r = mm->mp_subscript(o, key); - Py_DECREF(key); - return r; - } - if (likely(sm && sm->sq_item)) { - if (wraparound && unlikely(i < 0) && likely(sm->sq_length)) { - Py_ssize_t l = sm->sq_length(o); - if (likely(l >= 0)) { - i += l; - } else { - if (!PyErr_ExceptionMatches(PyExc_OverflowError)) - return NULL; - PyErr_Clear(); - } - } - return sm->sq_item(o, i); - } - } -#else - if (is_list || !PyMapping_Check(o)) { - return PySequence_GetItem(o, i); - } -#endif - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -/* FixUpExtensionType */ -#if CYTHON_USE_TYPE_SPECS -static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) { -#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - CYTHON_UNUSED_VAR(spec); - CYTHON_UNUSED_VAR(type); -#else - const PyType_Slot *slot = spec->slots; - while (slot && slot->slot && slot->slot != Py_tp_members) - slot++; - if (slot && slot->slot == Py_tp_members) { - int changed = 0; -#if !(PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON) - const -#endif - PyMemberDef *memb = (PyMemberDef*) slot->pfunc; - while (memb && memb->name) { - if (memb->name[0] == '_' && memb->name[1] == '_') { -#if PY_VERSION_HEX < 0x030900b1 - if (strcmp(memb->name, "__weaklistoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_weaklistoffset = memb->offset; - changed = 1; - } - else if (strcmp(memb->name, "__dictoffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); - type->tp_dictoffset = memb->offset; - changed = 1; - } -#if CYTHON_METH_FASTCALL - else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); -#if PY_VERSION_HEX >= 0x030800b4 - type->tp_vectorcall_offset = memb->offset; -#else - type->tp_print = (printfunc) memb->offset; -#endif - changed = 1; - } -#endif -#else - if ((0)); -#endif -#if PY_VERSION_HEX <= 0x030900b1 && CYTHON_COMPILING_IN_CPYTHON - else if (strcmp(memb->name, "__module__") == 0) { - PyObject *descr; - assert(memb->type == T_OBJECT); - assert(memb->flags == 0 || memb->flags == READONLY); - descr = PyDescr_NewMember(type, memb); - if (unlikely(!descr)) - return -1; - if (unlikely(PyDict_SetItem(type->tp_dict, PyDescr_NAME(descr), descr) < 0)) { - Py_DECREF(descr); - return -1; - } - Py_DECREF(descr); - changed = 1; - } -#endif - } - memb++; - } - if (changed) - PyType_Modified(type); - } -#endif - return 0; -} -#endif - -/* FormatTypeName */ -#if CYTHON_COMPILING_IN_LIMITED_API -static __Pyx_TypeName -__Pyx_PyType_GetName(PyTypeObject* tp) -{ - PyObject *name = __Pyx_PyObject_GetAttrStr((PyObject *)tp, - __pyx_n_s_name); - if (unlikely(name == NULL) || unlikely(!PyUnicode_Check(name))) { - PyErr_Clear(); - Py_XDECREF(name); - name = __Pyx_NewRef(__pyx_n_s__4); - } - return name; -} -#endif - -/* ValidateExternBase */ -static int __Pyx_validate_extern_base(PyTypeObject *base) { - Py_ssize_t itemsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_itemsize; -#endif -#if !CYTHON_COMPILING_IN_LIMITED_API - itemsize = ((PyTypeObject *)base)->tp_itemsize; -#else - py_itemsize = PyObject_GetAttrString((PyObject*)base, "__itemsize__"); - if (!py_itemsize) - return -1; - itemsize = PyLong_AsSsize_t(py_itemsize); - Py_DECREF(py_itemsize); - py_itemsize = 0; - if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) - return -1; -#endif - if (itemsize) { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(base); - PyErr_Format(PyExc_TypeError, - "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name); - __Pyx_DECREF_TypeName(b_name); - return -1; - } - return 0; -} - -/* PyObjectGetMethod */ -static int __Pyx_PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) { - PyObject *attr; -#if CYTHON_UNPACK_METHODS && CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_PYTYPE_LOOKUP - __Pyx_TypeName type_name; - PyTypeObject *tp = Py_TYPE(obj); - PyObject *descr; - descrgetfunc f = NULL; - PyObject **dictptr, *dict; - int meth_found = 0; - assert (*method == NULL); - if (unlikely(tp->tp_getattro != PyObject_GenericGetAttr)) { - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; - } - if (unlikely(tp->tp_dict == NULL) && unlikely(PyType_Ready(tp) < 0)) { - return 0; - } - descr = _PyType_Lookup(tp, name); - if (likely(descr != NULL)) { - Py_INCREF(descr); -#if defined(Py_TPFLAGS_METHOD_DESCRIPTOR) && Py_TPFLAGS_METHOD_DESCRIPTOR - if (__Pyx_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) -#elif PY_MAJOR_VERSION >= 3 - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr) || __Pyx_IS_TYPE(descr, &PyMethodDescr_Type))) - #endif -#else - #ifdef __Pyx_CyFunction_USED - if (likely(PyFunction_Check(descr) || __Pyx_CyFunction_Check(descr))) - #else - if (likely(PyFunction_Check(descr))) - #endif -#endif - { - meth_found = 1; - } else { - f = Py_TYPE(descr)->tp_descr_get; - if (f != NULL && PyDescr_IsData(descr)) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - } - } - dictptr = _PyObject_GetDictPtr(obj); - if (dictptr != NULL && (dict = *dictptr) != NULL) { - Py_INCREF(dict); - attr = __Pyx_PyDict_GetItemStr(dict, name); - if (attr != NULL) { - Py_INCREF(attr); - Py_DECREF(dict); - Py_XDECREF(descr); - goto try_unpack; - } - Py_DECREF(dict); - } - if (meth_found) { - *method = descr; - return 1; - } - if (f != NULL) { - attr = f(descr, obj, (PyObject *)Py_TYPE(obj)); - Py_DECREF(descr); - goto try_unpack; - } - if (likely(descr != NULL)) { - *method = descr; - return 0; - } - type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return 0; -#else - attr = __Pyx_PyObject_GetAttrStr(obj, name); - goto try_unpack; -#endif -try_unpack: -#if CYTHON_UNPACK_METHODS - if (likely(attr) && PyMethod_Check(attr) && likely(PyMethod_GET_SELF(attr) == obj)) { - PyObject *function = PyMethod_GET_FUNCTION(attr); - Py_INCREF(function); - Py_DECREF(attr); - *method = function; - return 1; - } -#endif - *method = attr; - return 0; -} - -/* PyObjectCallMethod0 */ -static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name) { - PyObject *method = NULL, *result = NULL; - int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method); - if (likely(is_method)) { - result = __Pyx_PyObject_CallOneArg(method, obj); - Py_DECREF(method); - return result; - } - if (unlikely(!method)) goto bad; - result = __Pyx_PyObject_CallNoArg(method); - Py_DECREF(method); -bad: - return result; -} - -/* ValidateBasesTuple */ -#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API || CYTHON_USE_TYPE_SPECS -static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffset, PyObject *bases) { - Py_ssize_t i, n; -#if CYTHON_ASSUME_SAFE_MACROS - n = PyTuple_GET_SIZE(bases); -#else - n = PyTuple_Size(bases); - if (n < 0) return -1; -#endif - for (i = 1; i < n; i++) - { -#if CYTHON_AVOID_BORROWED_REFS - PyObject *b0 = PySequence_GetItem(bases, i); - if (!b0) return -1; -#elif CYTHON_ASSUME_SAFE_MACROS - PyObject *b0 = PyTuple_GET_ITEM(bases, i); -#else - PyObject *b0 = PyTuple_GetItem(bases, i); - if (!b0) return -1; -#endif - PyTypeObject *b; -#if PY_MAJOR_VERSION < 3 - if (PyClass_Check(b0)) - { - PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class", - PyString_AS_STRING(((PyClassObject*)b0)->cl_name)); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } -#endif - b = (PyTypeObject*) b0; - if (!__Pyx_PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE)) - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "base class '" __Pyx_FMT_TYPENAME "' is not a heap type", b_name); - __Pyx_DECREF_TypeName(b_name); -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - if (dictoffset == 0) - { - Py_ssize_t b_dictoffset = 0; -#if CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY - b_dictoffset = b->tp_dictoffset; -#else - PyObject *py_b_dictoffset = PyObject_GetAttrString((PyObject*)b, "__dictoffset__"); - if (!py_b_dictoffset) goto dictoffset_return; - b_dictoffset = PyLong_AsSsize_t(py_b_dictoffset); - Py_DECREF(py_b_dictoffset); - if (b_dictoffset == -1 && PyErr_Occurred()) goto dictoffset_return; -#endif - if (b_dictoffset) { - { - __Pyx_TypeName b_name = __Pyx_PyType_GetName(b); - PyErr_Format(PyExc_TypeError, - "extension type '%.200s' has no __dict__ slot, " - "but base type '" __Pyx_FMT_TYPENAME "' has: " - "either add 'cdef dict __dict__' to the extension type " - "or add '__slots__ = [...]' to the base type", - type_name, b_name); - __Pyx_DECREF_TypeName(b_name); - } -#if !(CYTHON_USE_TYPE_SLOTS || CYTHON_COMPILING_IN_PYPY) - dictoffset_return: -#endif -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - return -1; - } - } -#if CYTHON_AVOID_BORROWED_REFS - Py_DECREF(b0); -#endif - } - return 0; -} -#endif - -/* PyType_Ready */ -static int __Pyx_PyType_Ready(PyTypeObject *t) { -#if CYTHON_USE_TYPE_SPECS || !(CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_LIMITED_API) || defined(PYSTON_MAJOR_VERSION) - (void)__Pyx_PyObject_CallMethod0; -#if CYTHON_USE_TYPE_SPECS - (void)__Pyx_validate_bases_tuple; -#endif - return PyType_Ready(t); -#else - int r; - PyObject *bases = __Pyx_PyType_GetSlot(t, tp_bases, PyObject*); - if (bases && unlikely(__Pyx_validate_bases_tuple(t->tp_name, t->tp_dictoffset, bases) == -1)) - return -1; -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - { - int gc_was_enabled; - #if PY_VERSION_HEX >= 0x030A00b1 - gc_was_enabled = PyGC_Disable(); - (void)__Pyx_PyObject_CallMethod0; - #else - PyObject *ret, *py_status; - PyObject *gc = NULL; - #if PY_VERSION_HEX >= 0x030700a1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM+0 >= 0x07030400) - gc = PyImport_GetModule(__pyx_kp_u_gc); - #endif - if (unlikely(!gc)) gc = PyImport_Import(__pyx_kp_u_gc); - if (unlikely(!gc)) return -1; - py_status = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_isenabled); - if (unlikely(!py_status)) { - Py_DECREF(gc); - return -1; - } - gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); - Py_DECREF(py_status); - if (gc_was_enabled > 0) { - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_disable); - if (unlikely(!ret)) { - Py_DECREF(gc); - return -1; - } - Py_DECREF(ret); - } else if (unlikely(gc_was_enabled == -1)) { - Py_DECREF(gc); - return -1; - } - #endif - t->tp_flags |= Py_TPFLAGS_HEAPTYPE; -#if PY_VERSION_HEX >= 0x030A0000 - t->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; -#endif -#else - (void)__Pyx_PyObject_CallMethod0; -#endif - r = PyType_Ready(t); -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; - #if PY_VERSION_HEX >= 0x030A00b1 - if (gc_was_enabled) - PyGC_Enable(); - #else - if (gc_was_enabled) { - PyObject *tp, *v, *tb; - PyErr_Fetch(&tp, &v, &tb); - ret = __Pyx_PyObject_CallMethod0(gc, __pyx_kp_u_enable); - if (likely(ret || r == -1)) { - Py_XDECREF(ret); - PyErr_Restore(tp, v, tb); - } else { - Py_XDECREF(tp); - Py_XDECREF(v); - Py_XDECREF(tb); - r = -1; - } - } - Py_DECREF(gc); - #endif - } -#endif - return r; -#endif -} - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - __Pyx_TypeName type_name = __Pyx_PyType_GetName(tp); - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%U'", - type_name, attr_name); -#else - "'" __Pyx_FMT_TYPENAME "' object has no attribute '%.400s'", - type_name, PyString_AS_STRING(attr_name)); -#endif - __Pyx_DECREF_TypeName(type_name); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* SetupReduce */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStrNoError(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_getstate = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; - PyObject *getstate = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate); -#else - getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate); - if (!getstate && PyErr_Occurred()) { - goto __PYX_BAD; - } -#endif - if (getstate) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate); -#else - object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate); - if (!object_getstate && PyErr_Occurred()) { - goto __PYX_BAD; - } -#endif - if (object_getstate != getstate) { - goto __PYX_GOOD; - } - } -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) { - __Pyx_TypeName type_obj_name = - __Pyx_PyType_GetName((PyTypeObject*)type_obj); - PyErr_Format(PyExc_RuntimeError, - "Unable to initialize pickling for " __Pyx_FMT_TYPENAME, type_obj_name); - __Pyx_DECREF_TypeName(type_obj_name); - } - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); - Py_XDECREF(object_getstate); - Py_XDECREF(getstate); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} -#endif - -/* TypeImport */ -#ifndef __PYX_HAVE_RT_ImportType_3_0_11 -#define __PYX_HAVE_RT_ImportType_3_0_11 -static PyTypeObject *__Pyx_ImportType_3_0_11(PyObject *module, const char *module_name, const char *class_name, - size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize_3_0_11 check_size) -{ - PyObject *result = 0; - char warning[200]; - Py_ssize_t basicsize; - Py_ssize_t itemsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_basicsize; - PyObject *py_itemsize; -#endif - result = PyObject_GetAttrString(module, class_name); - if (!result) - goto bad; - if (!PyType_Check(result)) { - PyErr_Format(PyExc_TypeError, - "%.200s.%.200s is not a type object", - module_name, class_name); - goto bad; - } -#if !CYTHON_COMPILING_IN_LIMITED_API - basicsize = ((PyTypeObject *)result)->tp_basicsize; - itemsize = ((PyTypeObject *)result)->tp_itemsize; -#else - py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); - if (!py_basicsize) - goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) - goto bad; - py_itemsize = PyObject_GetAttrString(result, "__itemsize__"); - if (!py_itemsize) - goto bad; - itemsize = PyLong_AsSsize_t(py_itemsize); - Py_DECREF(py_itemsize); - py_itemsize = 0; - if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred()) - goto bad; -#endif - if (itemsize) { - if (size % alignment) { - alignment = size % alignment; - } - if (itemsize < (Py_ssize_t)alignment) - itemsize = (Py_ssize_t)alignment; - } - if ((size_t)(basicsize + itemsize) < size) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize+itemsize); - goto bad; - } - if (check_size == __Pyx_ImportType_CheckSize_Error_3_0_11 && - ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) { - PyErr_Format(PyExc_ValueError, - "%.200s.%.200s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd-%zd from PyObject", - module_name, class_name, size, basicsize, basicsize+itemsize); - goto bad; - } - else if (check_size == __Pyx_ImportType_CheckSize_Warn_3_0_11 && (size_t)basicsize > size) { - PyOS_snprintf(warning, sizeof(warning), - "%s.%s size changed, may indicate binary incompatibility. " - "Expected %zd from C header, got %zd from PyObject", - module_name, class_name, size, basicsize); - if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; - } - return (PyTypeObject *)result; -bad: - Py_XDECREF(result); - return NULL; -} -#endif - -/* FetchSharedCythonModule */ -static PyObject *__Pyx_FetchSharedCythonABIModule(void) { - return __Pyx_PyImport_AddModuleRef((char*) __PYX_ABI_MODULE_NAME); -} - -/* FetchCommonType */ -static int __Pyx_VerifyCachedType(PyObject *cached_type, - const char *name, - Py_ssize_t basicsize, - Py_ssize_t expected_basicsize) { - if (!PyType_Check(cached_type)) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s is not a type object", name); - return -1; - } - if (basicsize != expected_basicsize) { - PyErr_Format(PyExc_TypeError, - "Shared Cython type %.200s has the wrong size, try recompiling", - name); - return -1; - } - return 0; -} -#if !CYTHON_USE_TYPE_SPECS -static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { - PyObject* abi_module; - const char* object_name; - PyTypeObject *cached_type = NULL; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - object_name = strrchr(type->tp_name, '.'); - object_name = object_name ? object_name+1 : type->tp_name; - cached_type = (PyTypeObject*) PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - if (__Pyx_VerifyCachedType( - (PyObject *)cached_type, - object_name, - cached_type->tp_basicsize, - type->tp_basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - if (PyType_Ready(type) < 0) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, (PyObject *)type) < 0) - goto bad; - Py_INCREF(type); - cached_type = type; -done: - Py_DECREF(abi_module); - return cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#else -static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { - PyObject *abi_module, *cached_type = NULL; - const char* object_name = strrchr(spec->name, '.'); - object_name = object_name ? object_name+1 : spec->name; - abi_module = __Pyx_FetchSharedCythonABIModule(); - if (!abi_module) return NULL; - cached_type = PyObject_GetAttrString(abi_module, object_name); - if (cached_type) { - Py_ssize_t basicsize; -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *py_basicsize; - py_basicsize = PyObject_GetAttrString(cached_type, "__basicsize__"); - if (unlikely(!py_basicsize)) goto bad; - basicsize = PyLong_AsSsize_t(py_basicsize); - Py_DECREF(py_basicsize); - py_basicsize = 0; - if (unlikely(basicsize == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; -#else - basicsize = likely(PyType_Check(cached_type)) ? ((PyTypeObject*) cached_type)->tp_basicsize : -1; -#endif - if (__Pyx_VerifyCachedType( - cached_type, - object_name, - basicsize, - spec->basicsize) < 0) { - goto bad; - } - goto done; - } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; - PyErr_Clear(); - CYTHON_UNUSED_VAR(module); - cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases); - if (unlikely(!cached_type)) goto bad; - if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad; - if (PyObject_SetAttrString(abi_module, object_name, cached_type) < 0) goto bad; -done: - Py_DECREF(abi_module); - assert(cached_type == NULL || PyType_Check(cached_type)); - return (PyTypeObject *) cached_type; -bad: - Py_XDECREF(cached_type); - cached_type = NULL; - goto done; -} -#endif - -/* PyVectorcallFastCallDict */ -#if CYTHON_METH_FASTCALL -static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - PyObject *res = NULL; - PyObject *kwnames; - PyObject **newargs; - PyObject **kwvalues; - Py_ssize_t i, pos; - size_t j; - PyObject *key, *value; - unsigned long keys_are_strings; - Py_ssize_t nkw = PyDict_GET_SIZE(kw); - newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0])); - if (unlikely(newargs == NULL)) { - PyErr_NoMemory(); - return NULL; - } - for (j = 0; j < nargs; j++) newargs[j] = args[j]; - kwnames = PyTuple_New(nkw); - if (unlikely(kwnames == NULL)) { - PyMem_Free(newargs); - return NULL; - } - kwvalues = newargs + nargs; - pos = i = 0; - keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; - while (PyDict_Next(kw, &pos, &key, &value)) { - keys_are_strings &= Py_TYPE(key)->tp_flags; - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(kwnames, i, key); - kwvalues[i] = value; - i++; - } - if (unlikely(!keys_are_strings)) { - PyErr_SetString(PyExc_TypeError, "keywords must be strings"); - goto cleanup; - } - res = vc(func, newargs, nargs, kwnames); -cleanup: - Py_DECREF(kwnames); - for (i = 0; i < nkw; i++) - Py_DECREF(kwvalues[i]); - PyMem_Free(newargs); - return res; -} -static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw) -{ - if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { - return vc(func, args, nargs, NULL); - } - return __Pyx_PyVectorcall_FastCallDict_kw(func, vc, args, nargs, kw); -} -#endif - -/* CythonFunctionShared */ -#if CYTHON_COMPILING_IN_LIMITED_API -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - if (__Pyx_CyFunction_Check(func)) { - return PyCFunction_GetFunction(((__pyx_CyFunctionObject*)func)->func) == (PyCFunction) cfunc; - } else if (PyCFunction_Check(func)) { - return PyCFunction_GetFunction(func) == (PyCFunction) cfunc; - } - return 0; -} -#else -static CYTHON_INLINE int __Pyx__IsSameCyOrCFunction(PyObject *func, void *cfunc) { - return __Pyx_CyOrPyCFunction_Check(func) && __Pyx_CyOrPyCFunction_GET_FUNCTION(func) == (PyCFunction) cfunc; -} -#endif -static CYTHON_INLINE void __Pyx__CyFunction_SetClassObj(__pyx_CyFunctionObject* f, PyObject* classobj) { -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - __Pyx_Py_XDECREF_SET( - __Pyx_CyFunction_GetClassObj(f), - ((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#else - __Pyx_Py_XDECREF_SET( - ((PyCMethodObject *) (f))->mm_class, - (PyTypeObject*)((classobj) ? __Pyx_NewRef(classobj) : NULL)); -#endif -} -static PyObject * -__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, void *closure) -{ - CYTHON_UNUSED_VAR(closure); - if (unlikely(op->func_doc == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_doc = PyObject_GetAttrString(op->func, "__doc__"); - if (unlikely(!op->func_doc)) return NULL; -#else - if (((PyCFunctionObject*)op)->m_ml->ml_doc) { -#if PY_MAJOR_VERSION >= 3 - op->func_doc = PyUnicode_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#else - op->func_doc = PyString_FromString(((PyCFunctionObject*)op)->m_ml->ml_doc); -#endif - if (unlikely(op->func_doc == NULL)) - return NULL; - } else { - Py_INCREF(Py_None); - return Py_None; - } -#endif - } - Py_INCREF(op->func_doc); - return op->func_doc; -} -static int -__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (value == NULL) { - value = Py_None; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_doc, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_name == NULL)) { -#if CYTHON_COMPILING_IN_LIMITED_API - op->func_name = PyObject_GetAttrString(op->func, "__name__"); -#elif PY_MAJOR_VERSION >= 3 - op->func_name = PyUnicode_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#else - op->func_name = PyString_InternFromString(((PyCFunctionObject*)op)->m_ml->ml_name); -#endif - if (unlikely(op->func_name == NULL)) - return NULL; - } - Py_INCREF(op->func_name); - return op->func_name; -} -static int -__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__name__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_name, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_qualname); - return op->func_qualname; -} -static int -__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); -#if PY_MAJOR_VERSION >= 3 - if (unlikely(value == NULL || !PyUnicode_Check(value))) -#else - if (unlikely(value == NULL || !PyString_Check(value))) -#endif - { - PyErr_SetString(PyExc_TypeError, - "__qualname__ must be set to a string object"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_qualname, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(op->func_dict == NULL)) { - op->func_dict = PyDict_New(); - if (unlikely(op->func_dict == NULL)) - return NULL; - } - Py_INCREF(op->func_dict); - return op->func_dict; -} -static int -__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, void *context) -{ - CYTHON_UNUSED_VAR(context); - if (unlikely(value == NULL)) { - PyErr_SetString(PyExc_TypeError, - "function's dictionary may not be deleted"); - return -1; - } - if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "setting function's dictionary to a non-dict"); - return -1; - } - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->func_dict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(context); - Py_INCREF(op->func_globals); - return op->func_globals; -} -static PyObject * -__Pyx_CyFunction_get_closure(__pyx_CyFunctionObject *op, void *context) -{ - CYTHON_UNUSED_VAR(op); - CYTHON_UNUSED_VAR(context); - Py_INCREF(Py_None); - return Py_None; -} -static PyObject * -__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, void *context) -{ - PyObject* result = (op->func_code) ? op->func_code : Py_None; - CYTHON_UNUSED_VAR(context); - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) { - int result = 0; - PyObject *res = op->defaults_getter((PyObject *) op); - if (unlikely(!res)) - return -1; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - op->defaults_tuple = PyTuple_GET_ITEM(res, 0); - Py_INCREF(op->defaults_tuple); - op->defaults_kwdict = PyTuple_GET_ITEM(res, 1); - Py_INCREF(op->defaults_kwdict); - #else - op->defaults_tuple = __Pyx_PySequence_ITEM(res, 0); - if (unlikely(!op->defaults_tuple)) result = -1; - else { - op->defaults_kwdict = __Pyx_PySequence_ITEM(res, 1); - if (unlikely(!op->defaults_kwdict)) result = -1; - } - #endif - Py_DECREF(res); - return result; -} -static int -__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyTuple_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__defaults__ must be set to a tuple object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__defaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_tuple, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_tuple; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_tuple; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value) { - value = Py_None; - } else if (unlikely(value != Py_None && !PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__kwdefaults__ must be set to a dict object"); - return -1; - } - PyErr_WarnEx(PyExc_RuntimeWarning, "changes to cyfunction.__kwdefaults__ will not " - "currently affect the values used in function calls", 1); - Py_INCREF(value); - __Pyx_Py_XDECREF_SET(op->defaults_kwdict, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->defaults_kwdict; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - if (op->defaults_getter) { - if (unlikely(__Pyx_CyFunction_init_defaults(op) < 0)) return NULL; - result = op->defaults_kwdict; - } else { - result = Py_None; - } - } - Py_INCREF(result); - return result; -} -static int -__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - if (!value || value == Py_None) { - value = NULL; - } else if (unlikely(!PyDict_Check(value))) { - PyErr_SetString(PyExc_TypeError, - "__annotations__ must be set to a dict object"); - return -1; - } - Py_XINCREF(value); - __Pyx_Py_XDECREF_SET(op->func_annotations, value); - return 0; -} -static PyObject * -__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, void *context) { - PyObject* result = op->func_annotations; - CYTHON_UNUSED_VAR(context); - if (unlikely(!result)) { - result = PyDict_New(); - if (unlikely(!result)) return NULL; - op->func_annotations = result; - } - Py_INCREF(result); - return result; -} -static PyObject * -__Pyx_CyFunction_get_is_coroutine(__pyx_CyFunctionObject *op, void *context) { - int is_coroutine; - CYTHON_UNUSED_VAR(context); - if (op->func_is_coroutine) { - return __Pyx_NewRef(op->func_is_coroutine); - } - is_coroutine = op->flags & __Pyx_CYFUNCTION_COROUTINE; -#if PY_VERSION_HEX >= 0x03050000 - if (is_coroutine) { - PyObject *module, *fromlist, *marker = __pyx_n_s_is_coroutine; - fromlist = PyList_New(1); - if (unlikely(!fromlist)) return NULL; - Py_INCREF(marker); -#if CYTHON_ASSUME_SAFE_MACROS - PyList_SET_ITEM(fromlist, 0, marker); -#else - if (unlikely(PyList_SetItem(fromlist, 0, marker) < 0)) { - Py_DECREF(marker); - Py_DECREF(fromlist); - return NULL; - } -#endif - module = PyImport_ImportModuleLevelObject(__pyx_n_s_asyncio_coroutines, NULL, NULL, fromlist, 0); - Py_DECREF(fromlist); - if (unlikely(!module)) goto ignore; - op->func_is_coroutine = __Pyx_PyObject_GetAttrStr(module, marker); - Py_DECREF(module); - if (likely(op->func_is_coroutine)) { - return __Pyx_NewRef(op->func_is_coroutine); - } -ignore: - PyErr_Clear(); - } -#endif - op->func_is_coroutine = __Pyx_PyBool_FromLong(is_coroutine); - return __Pyx_NewRef(op->func_is_coroutine); -} -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject * -__Pyx_CyFunction_get_module(__pyx_CyFunctionObject *op, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_GetAttrString(op->func, "__module__"); -} -static int -__Pyx_CyFunction_set_module(__pyx_CyFunctionObject *op, PyObject* value, void *context) { - CYTHON_UNUSED_VAR(context); - return PyObject_SetAttrString(op->func, "__module__", value); -} -#endif -static PyGetSetDef __pyx_CyFunction_getsets[] = { - {(char *) "func_doc", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "__doc__", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0}, - {(char *) "func_name", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__name__", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0}, - {(char *) "__qualname__", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0}, - {(char *) "func_dict", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "__dict__", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0}, - {(char *) "func_globals", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "__globals__", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0}, - {(char *) "func_closure", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "__closure__", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0}, - {(char *) "func_code", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "__code__", (getter)__Pyx_CyFunction_get_code, 0, 0, 0}, - {(char *) "func_defaults", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__defaults__", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0}, - {(char *) "__kwdefaults__", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0}, - {(char *) "__annotations__", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0}, - {(char *) "_is_coroutine", (getter)__Pyx_CyFunction_get_is_coroutine, 0, 0, 0}, -#if CYTHON_COMPILING_IN_LIMITED_API - {"__module__", (getter)__Pyx_CyFunction_get_module, (setter)__Pyx_CyFunction_set_module, 0, 0}, -#endif - {0, 0, 0, 0, 0} -}; -static PyMemberDef __pyx_CyFunction_members[] = { -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), 0, 0}, -#endif -#if CYTHON_USE_TYPE_SPECS - {(char *) "__dictoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_dict), READONLY, 0}, -#if CYTHON_METH_FASTCALL -#if CYTHON_BACKPORT_VECTORCALL - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_vectorcall), READONLY, 0}, -#else -#if !CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__vectorcalloffset__", T_PYSSIZET, offsetof(PyCFunctionObject, vectorcall), READONLY, 0}, -#endif -#endif -#endif -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(__pyx_CyFunctionObject, func_weakreflist), READONLY, 0}, -#else - {(char *) "__weaklistoffset__", T_PYSSIZET, offsetof(PyCFunctionObject, m_weakreflist), READONLY, 0}, -#endif -#endif - {0, 0, 0, 0, 0} -}; -static PyObject * -__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, PyObject *args) -{ - CYTHON_UNUSED_VAR(args); -#if PY_MAJOR_VERSION >= 3 - Py_INCREF(m->func_qualname); - return m->func_qualname; -#else - return PyString_FromString(((PyCFunctionObject*)m)->m_ml->ml_name); -#endif -} -static PyMethodDef __pyx_CyFunction_methods[] = { - {"__reduce__", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0}, - {0, 0, 0, 0} -}; -#if PY_VERSION_HEX < 0x030500A0 || CYTHON_COMPILING_IN_LIMITED_API -#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist) -#else -#define __Pyx_CyFunction_weakreflist(cyfunc) (((PyCFunctionObject*)cyfunc)->m_weakreflist) -#endif -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { -#if !CYTHON_COMPILING_IN_LIMITED_API - PyCFunctionObject *cf = (PyCFunctionObject*) op; -#endif - if (unlikely(op == NULL)) - return NULL; -#if CYTHON_COMPILING_IN_LIMITED_API - op->func = PyCFunction_NewEx(ml, (PyObject*)op, module); - if (unlikely(!op->func)) return NULL; -#endif - op->flags = flags; - __Pyx_CyFunction_weakreflist(op) = NULL; -#if !CYTHON_COMPILING_IN_LIMITED_API - cf->m_ml = ml; - cf->m_self = (PyObject *) op; -#endif - Py_XINCREF(closure); - op->func_closure = closure; -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_XINCREF(module); - cf->m_module = module; -#endif - op->func_dict = NULL; - op->func_name = NULL; - Py_INCREF(qualname); - op->func_qualname = qualname; - op->func_doc = NULL; -#if PY_VERSION_HEX < 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API - op->func_classobj = NULL; -#else - ((PyCMethodObject*)op)->mm_class = NULL; -#endif - op->func_globals = globals; - Py_INCREF(op->func_globals); - Py_XINCREF(code); - op->func_code = code; - op->defaults_pyobjects = 0; - op->defaults_size = 0; - op->defaults = NULL; - op->defaults_tuple = NULL; - op->defaults_kwdict = NULL; - op->defaults_getter = NULL; - op->func_annotations = NULL; - op->func_is_coroutine = NULL; -#if CYTHON_METH_FASTCALL - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS | METH_METHOD)) { - case METH_NOARGS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_NOARGS; - break; - case METH_O: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_O; - break; - case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD; - break; - case METH_FASTCALL | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS; - break; - case METH_VARARGS | METH_KEYWORDS: - __Pyx_CyFunction_func_vectorcall(op) = NULL; - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - Py_DECREF(op); - return NULL; - } -#endif - return (PyObject *) op; -} -static int -__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m) -{ - Py_CLEAR(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_CLEAR(m->func); -#else - Py_CLEAR(((PyCFunctionObject*)m)->m_module); -#endif - Py_CLEAR(m->func_dict); - Py_CLEAR(m->func_name); - Py_CLEAR(m->func_qualname); - Py_CLEAR(m->func_doc); - Py_CLEAR(m->func_globals); - Py_CLEAR(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API -#if PY_VERSION_HEX < 0x030900B1 - Py_CLEAR(__Pyx_CyFunction_GetClassObj(m)); -#else - { - PyObject *cls = (PyObject*) ((PyCMethodObject *) (m))->mm_class; - ((PyCMethodObject *) (m))->mm_class = NULL; - Py_XDECREF(cls); - } -#endif -#endif - Py_CLEAR(m->defaults_tuple); - Py_CLEAR(m->defaults_kwdict); - Py_CLEAR(m->func_annotations); - Py_CLEAR(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_XDECREF(pydefaults[i]); - PyObject_Free(m->defaults); - m->defaults = NULL; - } - return 0; -} -static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - if (__Pyx_CyFunction_weakreflist(m) != NULL) - PyObject_ClearWeakRefs((PyObject *) m); - __Pyx_CyFunction_clear(m); - __Pyx_PyHeapTypeObject_GC_Del(m); -} -static void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m) -{ - PyObject_GC_UnTrack(m); - __Pyx__CyFunction_dealloc(m); -} -static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg) -{ - Py_VISIT(m->func_closure); -#if CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(m->func); -#else - Py_VISIT(((PyCFunctionObject*)m)->m_module); -#endif - Py_VISIT(m->func_dict); - Py_VISIT(m->func_name); - Py_VISIT(m->func_qualname); - Py_VISIT(m->func_doc); - Py_VISIT(m->func_globals); - Py_VISIT(m->func_code); -#if !CYTHON_COMPILING_IN_LIMITED_API - Py_VISIT(__Pyx_CyFunction_GetClassObj(m)); -#endif - Py_VISIT(m->defaults_tuple); - Py_VISIT(m->defaults_kwdict); - Py_VISIT(m->func_is_coroutine); - if (m->defaults) { - PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m); - int i; - for (i = 0; i < m->defaults_pyobjects; i++) - Py_VISIT(pydefaults[i]); - } - return 0; -} -static PyObject* -__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op) -{ -#if PY_MAJOR_VERSION >= 3 - return PyUnicode_FromFormat("", - op->func_qualname, (void *)op); -#else - return PyString_FromFormat("", - PyString_AsString(op->func_qualname), (void *)op); -#endif -} -static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) { -#if CYTHON_COMPILING_IN_LIMITED_API - PyObject *f = ((__pyx_CyFunctionObject*)func)->func; - PyObject *py_name = NULL; - PyCFunction meth; - int flags; - meth = PyCFunction_GetFunction(f); - if (unlikely(!meth)) return NULL; - flags = PyCFunction_GetFlags(f); - if (unlikely(flags < 0)) return NULL; -#else - PyCFunctionObject* f = (PyCFunctionObject*)func; - PyCFunction meth = f->m_ml->ml_meth; - int flags = f->m_ml->ml_flags; -#endif - Py_ssize_t size; - switch (flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) { - case METH_VARARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) - return (*meth)(self, arg); - break; - case METH_VARARGS | METH_KEYWORDS: - return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw); - case METH_NOARGS: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 0)) - return (*meth)(self, NULL); -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - case METH_O: - if (likely(kw == NULL || PyDict_Size(kw) == 0)) { -#if CYTHON_ASSUME_SAFE_MACROS - size = PyTuple_GET_SIZE(arg); -#else - size = PyTuple_Size(arg); - if (unlikely(size < 0)) return NULL; -#endif - if (likely(size == 1)) { - PyObject *result, *arg0; - #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - arg0 = PyTuple_GET_ITEM(arg, 0); - #else - arg0 = __Pyx_PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL; - #endif - result = (*meth)(self, arg0); - #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) - Py_DECREF(arg0); - #endif - return result; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, - "%.200S() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - py_name, size); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - f->m_ml->ml_name, size); -#endif - return NULL; - } - break; - default: - PyErr_SetString(PyExc_SystemError, "Bad call flags for CyFunction"); - return NULL; - } -#if CYTHON_COMPILING_IN_LIMITED_API - py_name = __Pyx_CyFunction_get_name((__pyx_CyFunctionObject*)func, NULL); - if (!py_name) return NULL; - PyErr_Format(PyExc_TypeError, "%.200S() takes no keyword arguments", - py_name); - Py_DECREF(py_name); -#else - PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments", - f->m_ml->ml_name); -#endif - return NULL; -} -static CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *self, *result; -#if CYTHON_COMPILING_IN_LIMITED_API - self = PyCFunction_GetSelf(((__pyx_CyFunctionObject*)func)->func); - if (unlikely(!self) && PyErr_Occurred()) return NULL; -#else - self = ((PyCFunctionObject*)func)->m_self; -#endif - result = __Pyx_CyFunction_CallMethod(func, self, arg, kw); - return result; -} -static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) { - PyObject *result; - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func; -#if CYTHON_METH_FASTCALL - __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); - if (vc) { -#if CYTHON_ASSUME_SAFE_MACROS - return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw); -#else - (void) &__Pyx_PyVectorcall_FastCallDict; - return PyVectorcall_Call(func, args, kw); -#endif - } -#endif - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - Py_ssize_t argc; - PyObject *new_args; - PyObject *self; -#if CYTHON_ASSUME_SAFE_MACROS - argc = PyTuple_GET_SIZE(args); -#else - argc = PyTuple_Size(args); - if (unlikely(!argc) < 0) return NULL; -#endif - new_args = PyTuple_GetSlice(args, 1, argc); - if (unlikely(!new_args)) - return NULL; - self = PyTuple_GetItem(args, 0); - if (unlikely(!self)) { - Py_DECREF(new_args); -#if PY_MAJOR_VERSION > 2 - PyErr_Format(PyExc_TypeError, - "unbound method %.200S() needs an argument", - cyfunc->func_qualname); -#else - PyErr_SetString(PyExc_TypeError, - "unbound method needs an argument"); -#endif - return NULL; - } - result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw); - Py_DECREF(new_args); - } else { - result = __Pyx_CyFunction_Call(func, args, kw); - } - return result; -} -#if CYTHON_METH_FASTCALL -static CYTHON_INLINE int __Pyx_CyFunction_Vectorcall_CheckArgs(__pyx_CyFunctionObject *cyfunc, Py_ssize_t nargs, PyObject *kwnames) -{ - int ret = 0; - if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) { - if (unlikely(nargs < 1)) { - PyErr_Format(PyExc_TypeError, "%.200s() needs an argument", - ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - ret = 1; - } - if (unlikely(kwnames) && unlikely(PyTuple_GET_SIZE(kwnames))) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no keyword arguments", ((PyCFunctionObject*)cyfunc)->m_ml->ml_name); - return -1; - } - return ret; -} -static PyObject * __Pyx_CyFunction_Vectorcall_NOARGS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 0)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes no arguments (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, NULL); -} -static PyObject * __Pyx_CyFunction_Vectorcall_O(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, kwnames)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - if (unlikely(nargs != 1)) { - PyErr_Format(PyExc_TypeError, - "%.200s() takes exactly one argument (%" CYTHON_FORMAT_SSIZE_T "d given)", - def->ml_name, nargs); - return NULL; - } - return def->ml_meth(self, args[0]); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCFunctionFastWithKeywords)(void(*)(void))def->ml_meth)(self, args, nargs, kwnames); -} -static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) -{ - __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *)func; - PyMethodDef* def = ((PyCFunctionObject*)cyfunc)->m_ml; - PyTypeObject *cls = (PyTypeObject *) __Pyx_CyFunction_GetClassObj(cyfunc); -#if CYTHON_BACKPORT_VECTORCALL - Py_ssize_t nargs = (Py_ssize_t)nargsf; -#else - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); -#endif - PyObject *self; - switch (__Pyx_CyFunction_Vectorcall_CheckArgs(cyfunc, nargs, NULL)) { - case 1: - self = args[0]; - args += 1; - nargs -= 1; - break; - case 0: - self = ((PyCFunctionObject*)cyfunc)->m_self; - break; - default: - return NULL; - } - return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames); -} -#endif -#if CYTHON_USE_TYPE_SPECS -static PyType_Slot __pyx_CyFunctionType_slots[] = { - {Py_tp_dealloc, (void *)__Pyx_CyFunction_dealloc}, - {Py_tp_repr, (void *)__Pyx_CyFunction_repr}, - {Py_tp_call, (void *)__Pyx_CyFunction_CallAsMethod}, - {Py_tp_traverse, (void *)__Pyx_CyFunction_traverse}, - {Py_tp_clear, (void *)__Pyx_CyFunction_clear}, - {Py_tp_methods, (void *)__pyx_CyFunction_methods}, - {Py_tp_members, (void *)__pyx_CyFunction_members}, - {Py_tp_getset, (void *)__pyx_CyFunction_getsets}, - {Py_tp_descr_get, (void *)__Pyx_PyMethod_New}, - {0, 0}, -}; -static PyType_Spec __pyx_CyFunctionType_spec = { - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if (defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL) - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - __pyx_CyFunctionType_slots -}; -#else -static PyTypeObject __pyx_CyFunctionType_type = { - PyVarObject_HEAD_INIT(0, 0) - __PYX_TYPE_MODULE_PREFIX "cython_function_or_method", - sizeof(__pyx_CyFunctionObject), - 0, - (destructor) __Pyx_CyFunction_dealloc, -#if !CYTHON_METH_FASTCALL - 0, -#elif CYTHON_BACKPORT_VECTORCALL - (printfunc)offsetof(__pyx_CyFunctionObject, func_vectorcall), -#else - offsetof(PyCFunctionObject, vectorcall), -#endif - 0, - 0, -#if PY_MAJOR_VERSION < 3 - 0, -#else - 0, -#endif - (reprfunc) __Pyx_CyFunction_repr, - 0, - 0, - 0, - 0, - __Pyx_CyFunction_CallAsMethod, - 0, - 0, - 0, - 0, -#ifdef Py_TPFLAGS_METHOD_DESCRIPTOR - Py_TPFLAGS_METHOD_DESCRIPTOR | -#endif -#if defined(_Py_TPFLAGS_HAVE_VECTORCALL) && CYTHON_METH_FASTCALL - _Py_TPFLAGS_HAVE_VECTORCALL | -#endif - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, - 0, - (traverseproc) __Pyx_CyFunction_traverse, - (inquiry) __Pyx_CyFunction_clear, - 0, -#if PY_VERSION_HEX < 0x030500A0 - offsetof(__pyx_CyFunctionObject, func_weakreflist), -#else - offsetof(PyCFunctionObject, m_weakreflist), -#endif - 0, - 0, - __pyx_CyFunction_methods, - __pyx_CyFunction_members, - __pyx_CyFunction_getsets, - 0, - 0, - __Pyx_PyMethod_New, - 0, - offsetof(__pyx_CyFunctionObject, func_dict), - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, -#if PY_VERSION_HEX >= 0x030400a1 - 0, -#endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) - 0, -#endif -#if __PYX_NEED_TP_PRINT_SLOT - 0, -#endif -#if PY_VERSION_HEX >= 0x030C0000 - 0, -#endif -#if PY_VERSION_HEX >= 0x030d00A4 - 0, -#endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 && PY_VERSION_HEX < 0x030a0000 - 0, -#endif -}; -#endif -static int __pyx_CyFunction_init(PyObject *module) { -#if CYTHON_USE_TYPE_SPECS - __pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL); -#else - CYTHON_UNUSED_VAR(module); - __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type); -#endif - if (unlikely(__pyx_CyFunctionType == NULL)) { - return -1; - } - return 0; -} -static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults = PyObject_Malloc(size); - if (unlikely(!m->defaults)) - return PyErr_NoMemory(); - memset(m->defaults, 0, size); - m->defaults_pyobjects = pyobjects; - m->defaults_size = size; - return m->defaults; -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_tuple = tuple; - Py_INCREF(tuple); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->defaults_kwdict = dict; - Py_INCREF(dict); -} -static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) { - __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; - m->func_annotations = dict; - Py_INCREF(dict); -} - -/* CythonFunction */ -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { - PyObject *op = __Pyx_CyFunction_Init( - PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), - ml, flags, qualname, closure, module, globals, code - ); - if (likely(op)) { - PyObject_GC_Track(op); - } - return op; -} - -/* CalculateMetaclass */ -static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases) { - Py_ssize_t i, nbases; -#if CYTHON_ASSUME_SAFE_MACROS - nbases = PyTuple_GET_SIZE(bases); -#else - nbases = PyTuple_Size(bases); - if (nbases < 0) return NULL; -#endif - for (i=0; i < nbases; i++) { - PyTypeObject *tmptype; -#if CYTHON_ASSUME_SAFE_MACROS - PyObject *tmp = PyTuple_GET_ITEM(bases, i); -#else - PyObject *tmp = PyTuple_GetItem(bases, i); - if (!tmp) return NULL; -#endif - tmptype = Py_TYPE(tmp); -#if PY_MAJOR_VERSION < 3 - if (tmptype == &PyClass_Type) - continue; -#endif - if (!metaclass) { - metaclass = tmptype; - continue; - } - if (PyType_IsSubtype(metaclass, tmptype)) - continue; - if (PyType_IsSubtype(tmptype, metaclass)) { - metaclass = tmptype; - continue; - } - PyErr_SetString(PyExc_TypeError, - "metaclass conflict: " - "the metaclass of a derived class " - "must be a (non-strict) subclass " - "of the metaclasses of all its bases"); - return NULL; - } - if (!metaclass) { -#if PY_MAJOR_VERSION < 3 - metaclass = &PyClass_Type; -#else - metaclass = &PyType_Type; -#endif - } - Py_INCREF((PyObject*) metaclass); - return (PyObject*) metaclass; -} - -/* PyObjectCall2Args */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { - PyObject *args[3] = {NULL, arg1, arg2}; - return __Pyx_PyObject_FastCall(function, args+1, 2 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET); -} - -/* PyObjectLookupSpecial */ -#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx__PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name, int with_error) { - PyObject *res; - PyTypeObject *tp = Py_TYPE(obj); -#if PY_MAJOR_VERSION < 3 - if (unlikely(PyInstance_Check(obj))) - return with_error ? __Pyx_PyObject_GetAttrStr(obj, attr_name) : __Pyx_PyObject_GetAttrStrNoError(obj, attr_name); -#endif - res = _PyType_Lookup(tp, attr_name); - if (likely(res)) { - descrgetfunc f = Py_TYPE(res)->tp_descr_get; - if (!f) { - Py_INCREF(res); - } else { - res = f(res, obj, (PyObject *)tp); - } - } else if (with_error) { - PyErr_SetObject(PyExc_AttributeError, attr_name); - } - return res; -} -#endif - -/* Py3ClassCreate */ -static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, - PyObject *qualname, PyObject *mkw, PyObject *modname, PyObject *doc) { - PyObject *ns; - if (metaclass) { - PyObject *prep = __Pyx_PyObject_GetAttrStrNoError(metaclass, __pyx_n_s_prepare); - if (prep) { - PyObject *pargs[3] = {NULL, name, bases}; - ns = __Pyx_PyObject_FastCallDict(prep, pargs+1, 2 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET, mkw); - Py_DECREF(prep); - } else { - if (unlikely(PyErr_Occurred())) - return NULL; - ns = PyDict_New(); - } - } else { - ns = PyDict_New(); - } - if (unlikely(!ns)) - return NULL; - if (unlikely(PyObject_SetItem(ns, __pyx_n_s_module, modname) < 0)) goto bad; -#if PY_VERSION_HEX >= 0x03030000 - if (unlikely(PyObject_SetItem(ns, __pyx_n_s_qualname, qualname) < 0)) goto bad; -#else - CYTHON_MAYBE_UNUSED_VAR(qualname); -#endif - if (unlikely(doc && PyObject_SetItem(ns, __pyx_n_s_doc, doc) < 0)) goto bad; - return ns; -bad: - Py_DECREF(ns); - return NULL; -} -#if PY_VERSION_HEX < 0x030600A4 && CYTHON_PEP487_INIT_SUBCLASS -static int __Pyx_SetNamesPEP487(PyObject *type_obj) { - PyTypeObject *type = (PyTypeObject*) type_obj; - PyObject *names_to_set, *key, *value, *set_name, *tmp; - Py_ssize_t i = 0; -#if CYTHON_USE_TYPE_SLOTS - names_to_set = PyDict_Copy(type->tp_dict); -#else - { - PyObject *d = PyObject_GetAttr(type_obj, __pyx_n_s_dict); - names_to_set = NULL; - if (likely(d)) { - PyObject *names_to_set = PyDict_New(); - int ret = likely(names_to_set) ? PyDict_Update(names_to_set, d) : -1; - Py_DECREF(d); - if (unlikely(ret < 0)) - Py_CLEAR(names_to_set); - } - } -#endif - if (unlikely(names_to_set == NULL)) - goto bad; - while (PyDict_Next(names_to_set, &i, &key, &value)) { - set_name = __Pyx_PyObject_LookupSpecialNoError(value, __pyx_n_s_set_name); - if (unlikely(set_name != NULL)) { - tmp = __Pyx_PyObject_Call2Args(set_name, type_obj, key); - Py_DECREF(set_name); - if (unlikely(tmp == NULL)) { - __Pyx_TypeName value_type_name = - __Pyx_PyType_GetName(Py_TYPE(value)); - __Pyx_TypeName type_name = __Pyx_PyType_GetName(type); - PyErr_Format(PyExc_RuntimeError, -#if PY_MAJOR_VERSION >= 3 - "Error calling __set_name__ on '" __Pyx_FMT_TYPENAME "' instance %R " "in '" __Pyx_FMT_TYPENAME "'", - value_type_name, key, type_name); -#else - "Error calling __set_name__ on '" __Pyx_FMT_TYPENAME "' instance %.100s in '" __Pyx_FMT_TYPENAME "'", - value_type_name, - PyString_Check(key) ? PyString_AS_STRING(key) : "?", - type_name); -#endif - goto bad; - } else { - Py_DECREF(tmp); - } - } - else if (unlikely(PyErr_Occurred())) { - goto bad; - } - } - Py_DECREF(names_to_set); - return 0; -bad: - Py_XDECREF(names_to_set); - return -1; -} -static PyObject *__Pyx_InitSubclassPEP487(PyObject *type_obj, PyObject *mkw) { -#if CYTHON_USE_TYPE_SLOTS && CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - PyTypeObject *type = (PyTypeObject*) type_obj; - PyObject *mro = type->tp_mro; - Py_ssize_t i, nbases; - if (unlikely(!mro)) goto done; - (void) &__Pyx_GetBuiltinName; - Py_INCREF(mro); - nbases = PyTuple_GET_SIZE(mro); - assert(PyTuple_GET_ITEM(mro, 0) == type_obj); - for (i = 1; i < nbases-1; i++) { - PyObject *base, *dict, *meth; - base = PyTuple_GET_ITEM(mro, i); - dict = ((PyTypeObject *)base)->tp_dict; - meth = __Pyx_PyDict_GetItemStrWithError(dict, __pyx_n_s_init_subclass); - if (unlikely(meth)) { - descrgetfunc f = Py_TYPE(meth)->tp_descr_get; - PyObject *res; - Py_INCREF(meth); - if (likely(f)) { - res = f(meth, NULL, type_obj); - Py_DECREF(meth); - if (unlikely(!res)) goto bad; - meth = res; - } - res = __Pyx_PyObject_FastCallDict(meth, NULL, 0, mkw); - Py_DECREF(meth); - if (unlikely(!res)) goto bad; - Py_DECREF(res); - goto done; - } else if (unlikely(PyErr_Occurred())) { - goto bad; - } - } -done: - Py_XDECREF(mro); - return type_obj; -bad: - Py_XDECREF(mro); - Py_DECREF(type_obj); - return NULL; -#else - PyObject *super_type, *super, *func, *res; -#if CYTHON_COMPILING_IN_PYPY && !defined(PySuper_Type) - super_type = __Pyx_GetBuiltinName(__pyx_n_s_super); -#else - super_type = (PyObject*) &PySuper_Type; - (void) &__Pyx_GetBuiltinName; -#endif - super = likely(super_type) ? __Pyx_PyObject_Call2Args(super_type, type_obj, type_obj) : NULL; -#if CYTHON_COMPILING_IN_PYPY && !defined(PySuper_Type) - Py_XDECREF(super_type); -#endif - if (unlikely(!super)) { - Py_CLEAR(type_obj); - goto done; - } - func = __Pyx_PyObject_GetAttrStrNoError(super, __pyx_n_s_init_subclass); - Py_DECREF(super); - if (likely(!func)) { - if (unlikely(PyErr_Occurred())) - Py_CLEAR(type_obj); - goto done; - } - res = __Pyx_PyObject_FastCallDict(func, NULL, 0, mkw); - Py_DECREF(func); - if (unlikely(!res)) - Py_CLEAR(type_obj); - Py_XDECREF(res); -done: - return type_obj; -#endif -} -#endif -static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, - PyObject *dict, PyObject *mkw, - int calculate_metaclass, int allow_py2_metaclass) { - PyObject *result; - PyObject *owned_metaclass = NULL; - PyObject *margs[4] = {NULL, name, bases, dict}; - if (allow_py2_metaclass) { - owned_metaclass = PyObject_GetItem(dict, __pyx_n_s_metaclass); - if (owned_metaclass) { - metaclass = owned_metaclass; - } else if (likely(PyErr_ExceptionMatches(PyExc_KeyError))) { - PyErr_Clear(); - } else { - return NULL; - } - } - if (calculate_metaclass && (!metaclass || PyType_Check(metaclass))) { - metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); - Py_XDECREF(owned_metaclass); - if (unlikely(!metaclass)) - return NULL; - owned_metaclass = metaclass; - } - result = __Pyx_PyObject_FastCallDict(metaclass, margs+1, 3 | __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET, -#if PY_VERSION_HEX < 0x030600A4 - (metaclass == (PyObject*)&PyType_Type) ? NULL : mkw -#else - mkw -#endif - ); - Py_XDECREF(owned_metaclass); -#if PY_VERSION_HEX < 0x030600A4 && CYTHON_PEP487_INIT_SUBCLASS - if (likely(result) && likely(PyType_Check(result))) { - if (unlikely(__Pyx_SetNamesPEP487(result) < 0)) { - Py_CLEAR(result); - } else { - result = __Pyx_InitSubclassPEP487(result, mkw); - } - } -#else - (void) &__Pyx_GetBuiltinName; -#endif - return result; -} - -/* ClassMethod */ -static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { -#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM <= 0x05080000 - if (PyObject_TypeCheck(method, &PyWrapperDescr_Type)) { - return PyClassMethod_New(method); - } -#else -#if CYTHON_COMPILING_IN_PYPY - if (PyMethodDescr_Check(method)) -#else - #if PY_MAJOR_VERSION == 2 - static PyTypeObject *methoddescr_type = NULL; - if (unlikely(methoddescr_type == NULL)) { - PyObject *meth = PyObject_GetAttrString((PyObject*)&PyList_Type, "append"); - if (unlikely(!meth)) return NULL; - methoddescr_type = Py_TYPE(meth); - Py_DECREF(meth); - } - #else - PyTypeObject *methoddescr_type = &PyMethodDescr_Type; - #endif - if (__Pyx_TypeCheck(method, methoddescr_type)) -#endif - { - PyMethodDescrObject *descr = (PyMethodDescrObject *)method; - #if PY_VERSION_HEX < 0x03020000 - PyTypeObject *d_type = descr->d_type; - #else - PyTypeObject *d_type = descr->d_common.d_type; - #endif - return PyDescr_NewClassMethod(d_type, descr->d_method); - } -#endif - else if (PyMethod_Check(method)) { - return PyClassMethod_New(PyMethod_GET_FUNCTION(method)); - } - else { - return PyClassMethod_New(method); - } -} - -/* GetNameInClass */ -static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name) { - PyObject *result; - PyObject *dict; - assert(PyType_Check(nmspace)); -#if CYTHON_USE_TYPE_SLOTS - dict = ((PyTypeObject*)nmspace)->tp_dict; - Py_XINCREF(dict); -#else - dict = PyObject_GetAttr(nmspace, __pyx_n_s_dict); -#endif - if (likely(dict)) { - result = PyObject_GetItem(dict, name); - Py_DECREF(dict); - if (result) { - return result; - } - } - PyErr_Clear(); - __Pyx_GetModuleGlobalNameUncached(result, name); - return result; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - CYTHON_MAYBE_UNUSED_VAR(tstate); - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStrNoError(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -#if !CYTHON_COMPILING_IN_LIMITED_API -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} -#endif - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -#if PY_VERSION_HEX >= 0x030b00a6 && !CYTHON_COMPILING_IN_LIMITED_API - #ifndef Py_BUILD_CORE - #define Py_BUILD_CORE 1 - #endif - #include "internal/pycore_frame.h" -#endif -#if CYTHON_COMPILING_IN_LIMITED_API -static PyObject *__Pyx_PyCode_Replace_For_AddTraceback(PyObject *code, PyObject *scratch_dict, - PyObject *firstlineno, PyObject *name) { - PyObject *replace = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_firstlineno", firstlineno))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "co_name", name))) return NULL; - replace = PyObject_GetAttrString(code, "replace"); - if (likely(replace)) { - PyObject *result; - result = PyObject_Call(replace, __pyx_empty_tuple, scratch_dict); - Py_DECREF(replace); - return result; - } - PyErr_Clear(); - #if __PYX_LIMITED_VERSION_HEX < 0x030780000 - { - PyObject *compiled = NULL, *result = NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "code", code))) return NULL; - if (unlikely(PyDict_SetItemString(scratch_dict, "type", (PyObject*)(&PyType_Type)))) return NULL; - compiled = Py_CompileString( - "out = type(code)(\n" - " code.co_argcount, code.co_kwonlyargcount, code.co_nlocals, code.co_stacksize,\n" - " code.co_flags, code.co_code, code.co_consts, code.co_names,\n" - " code.co_varnames, code.co_filename, co_name, co_firstlineno,\n" - " code.co_lnotab)\n", "", Py_file_input); - if (!compiled) return NULL; - result = PyEval_EvalCode(compiled, scratch_dict, scratch_dict); - Py_DECREF(compiled); - if (!result) PyErr_Print(); - Py_DECREF(result); - result = PyDict_GetItemString(scratch_dict, "out"); - if (result) Py_INCREF(result); - return result; - } - #else - return NULL; - #endif -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyObject *code_object = NULL, *py_py_line = NULL, *py_funcname = NULL, *dict = NULL; - PyObject *replace = NULL, *getframe = NULL, *frame = NULL; - PyObject *exc_type, *exc_value, *exc_traceback; - int success = 0; - if (c_line) { - (void) __pyx_cfilenm; - (void) __Pyx_CLineForTraceback(__Pyx_PyThreadState_Current, c_line); - } - PyErr_Fetch(&exc_type, &exc_value, &exc_traceback); - code_object = Py_CompileString("_getframe()", filename, Py_eval_input); - if (unlikely(!code_object)) goto bad; - py_py_line = PyLong_FromLong(py_line); - if (unlikely(!py_py_line)) goto bad; - py_funcname = PyUnicode_FromString(funcname); - if (unlikely(!py_funcname)) goto bad; - dict = PyDict_New(); - if (unlikely(!dict)) goto bad; - { - PyObject *old_code_object = code_object; - code_object = __Pyx_PyCode_Replace_For_AddTraceback(code_object, dict, py_py_line, py_funcname); - Py_DECREF(old_code_object); - } - if (unlikely(!code_object)) goto bad; - getframe = PySys_GetObject("_getframe"); - if (unlikely(!getframe)) goto bad; - if (unlikely(PyDict_SetItemString(dict, "_getframe", getframe))) goto bad; - frame = PyEval_EvalCode(code_object, dict, dict); - if (unlikely(!frame) || frame == Py_None) goto bad; - success = 1; - bad: - PyErr_Restore(exc_type, exc_value, exc_traceback); - Py_XDECREF(code_object); - Py_XDECREF(py_py_line); - Py_XDECREF(py_funcname); - Py_XDECREF(dict); - Py_XDECREF(replace); - if (success) { - PyTraceBack_Here( - (struct _frame*)frame); - } - Py_XDECREF(frame); -} -#else -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = NULL; - PyObject *py_funcname = NULL; - #if PY_MAJOR_VERSION < 3 - PyObject *py_srcfile = NULL; - py_srcfile = PyString_FromString(filename); - if (!py_srcfile) goto bad; - #endif - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - if (!py_funcname) goto bad; - funcname = PyUnicode_AsUTF8(py_funcname); - if (!funcname) goto bad; - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - if (!py_funcname) goto bad; - #endif - } - #if PY_MAJOR_VERSION < 3 - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - #else - py_code = PyCode_NewEmpty(filename, funcname, py_line); - #endif - Py_XDECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_funcname); - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_srcfile); - #endif - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject *ptype, *pvalue, *ptraceback; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) { - /* If the code object creation fails, then we should clear the - fetched exception references and propagate the new exception */ - Py_XDECREF(ptype); - Py_XDECREF(pvalue); - Py_XDECREF(ptraceback); - goto bad; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} -#endif - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(long) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 2 * PyLong_SHIFT)) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 3 * PyLong_SHIFT)) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) >= 4 * PyLong_SHIFT)) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(long) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(long, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(long) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(long) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(long) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(long) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(long) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(long) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(long) - 1 > 4 * PyLong_SHIFT)) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(long) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(long) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - long val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (long) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (long) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (long) -1; - } else { - stepval = v; - } - v = NULL; - val = (long) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(long) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((long) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(long) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((long) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((long) 1) << (sizeof(long) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (long) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const long neg_one = (long) -1, const_zero = (long) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - unsigned char *bytes = (unsigned char *)&value; -#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX >= 0x030d00A4 - if (is_unsigned) { - return PyLong_FromUnsignedNativeBytes(bytes, sizeof(value), -1); - } else { - return PyLong_FromNativeBytes(bytes, sizeof(value), -1); - } -#elif !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000 - int one = 1; int little = (int)*(unsigned char *)&one; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); -#else - int one = 1; int little = (int)*(unsigned char *)&one; - PyObject *from_bytes, *result = NULL; - PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL; - from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes"); - if (!from_bytes) return NULL; - py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof(long)); - if (!py_bytes) goto limited_bad; - order_str = PyUnicode_FromString(little ? "little" : "big"); - if (!order_str) goto limited_bad; - arg_tuple = PyTuple_Pack(2, py_bytes, order_str); - if (!arg_tuple) goto limited_bad; - if (!is_unsigned) { - kwds = PyDict_New(); - if (!kwds) goto limited_bad; - if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad; - } - result = PyObject_Call(from_bytes, arg_tuple, kwds); - limited_bad: - Py_XDECREF(kwds); - Py_XDECREF(arg_tuple); - Py_XDECREF(order_str); - Py_XDECREF(py_bytes); - Py_XDECREF(from_bytes); - return result; -#endif - } -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif - const int neg_one = (int) -1, const_zero = (int) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if ((sizeof(int) < sizeof(long))) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } -#endif - if (unlikely(!PyLong_Check(x))) { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - if (unlikely(__Pyx_PyLong_IsNeg(x))) { - goto raise_neg_overflow; - } else if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_upylong, __Pyx_PyLong_CompactValueUnsigned(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_DigitCount(x)) { - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 2 * PyLong_SHIFT)) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 3 * PyLong_SHIFT)) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) >= 4 * PyLong_SHIFT)) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030C00A7 - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if ((sizeof(int) <= sizeof(unsigned long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(unsigned PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - if (__Pyx_PyLong_IsCompact(x)) { - __PYX_VERIFY_RETURN_INT(int, __Pyx_compact_pylong, __Pyx_PyLong_CompactValue(x)) - } else { - const digit* digits = __Pyx_PyLong_Digits(x); - assert(__Pyx_PyLong_DigitCount(x) > 1); - switch (__Pyx_PyLong_SignedDigitCount(x)) { - case -2: - if ((8 * sizeof(int) - 1 > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if ((8 * sizeof(int) > 1 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 2 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if ((8 * sizeof(int) - 1 > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if ((8 * sizeof(int) > 2 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 3 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if ((8 * sizeof(int) - 1 > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if ((8 * sizeof(int) > 3 * PyLong_SHIFT)) { - if ((8 * sizeof(unsigned long) > 4 * PyLong_SHIFT)) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if ((8 * sizeof(int) - 1 > 4 * PyLong_SHIFT)) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } - } -#endif - if ((sizeof(int) <= sizeof(long))) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if ((sizeof(int) <= sizeof(PY_LONG_LONG))) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { - int val; - int ret = -1; -#if PY_VERSION_HEX >= 0x030d00A6 && !CYTHON_COMPILING_IN_LIMITED_API - Py_ssize_t bytes_copied = PyLong_AsNativeBytes( - x, &val, sizeof(val), Py_ASNATIVEBYTES_NATIVE_ENDIAN | (is_unsigned ? Py_ASNATIVEBYTES_UNSIGNED_BUFFER | Py_ASNATIVEBYTES_REJECT_NEGATIVE : 0)); - if (unlikely(bytes_copied == -1)) { - } else if (unlikely(bytes_copied > (Py_ssize_t) sizeof(val))) { - goto raise_overflow; - } else { - ret = 0; - } -#elif PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray) - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - ret = _PyLong_AsByteArray((PyLongObject *)x, - bytes, sizeof(val), - is_little, !is_unsigned); -#else - PyObject *v; - PyObject *stepval = NULL, *mask = NULL, *shift = NULL; - int bits, remaining_bits, is_negative = 0; - int chunk_size = (sizeof(long) < 8) ? 30 : 62; - if (likely(PyLong_CheckExact(x))) { - v = __Pyx_NewRef(x); - } else { - v = PyNumber_Long(x); - if (unlikely(!v)) return (int) -1; - assert(PyLong_CheckExact(v)); - } - { - int result = PyObject_RichCompareBool(v, Py_False, Py_LT); - if (unlikely(result < 0)) { - Py_DECREF(v); - return (int) -1; - } - is_negative = result == 1; - } - if (is_unsigned && unlikely(is_negative)) { - Py_DECREF(v); - goto raise_neg_overflow; - } else if (is_negative) { - stepval = PyNumber_Invert(v); - Py_DECREF(v); - if (unlikely(!stepval)) - return (int) -1; - } else { - stepval = v; - } - v = NULL; - val = (int) 0; - mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done; - shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done; - for (bits = 0; bits < (int) sizeof(int) * 8 - chunk_size; bits += chunk_size) { - PyObject *tmp, *digit; - long idigit; - digit = PyNumber_And(stepval, mask); - if (unlikely(!digit)) goto done; - idigit = PyLong_AsLong(digit); - Py_DECREF(digit); - if (unlikely(idigit < 0)) goto done; - val |= ((int) idigit) << bits; - tmp = PyNumber_Rshift(stepval, shift); - if (unlikely(!tmp)) goto done; - Py_DECREF(stepval); stepval = tmp; - } - Py_DECREF(shift); shift = NULL; - Py_DECREF(mask); mask = NULL; - { - long idigit = PyLong_AsLong(stepval); - if (unlikely(idigit < 0)) goto done; - remaining_bits = ((int) sizeof(int) * 8) - bits - (is_unsigned ? 0 : 1); - if (unlikely(idigit >= (1L << remaining_bits))) - goto raise_overflow; - val |= ((int) idigit) << bits; - } - if (!is_unsigned) { - if (unlikely(val & (((int) 1) << (sizeof(int) * 8 - 1)))) - goto raise_overflow; - if (is_negative) - val = ~val; - } - ret = 0; - done: - Py_XDECREF(shift); - Py_XDECREF(mask); - Py_XDECREF(stepval); -#endif - if (unlikely(ret)) - return (int) -1; - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = __Pyx_PyType_GetSlot(a, tp_base, PyTypeObject*); - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -static CYTHON_INLINE int __Pyx_IsAnySubtype2(PyTypeObject *cls, PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (cls == a || cls == b) return 1; - mro = cls->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - PyObject *base = PyTuple_GET_ITEM(mro, i); - if (base == (PyObject *)a || base == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(cls, a) || __Pyx_InBases(cls, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - if (exc_type1) { - return __Pyx_IsAnySubtype2((PyTypeObject*)err, (PyTypeObject*)exc_type1, (PyTypeObject*)exc_type2); - } else { - return __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; i= 0x030B00A4 - return Py_Version & ~0xFFUL; -#else - const char* rt_version = Py_GetVersion(); - unsigned long version = 0; - unsigned long factor = 0x01000000UL; - unsigned int digit = 0; - int i = 0; - while (factor) { - while ('0' <= rt_version[i] && rt_version[i] <= '9') { - digit = digit * 10 + (unsigned int) (rt_version[i] - '0'); - ++i; - } - version += factor * digit; - if (rt_version[i] != '.') - break; - digit = 0; - factor >>= 8; - ++i; - } - return version; -#endif -} -static int __Pyx_check_binary_version(unsigned long ct_version, unsigned long rt_version, int allow_newer) { - const unsigned long MAJOR_MINOR = 0xFFFF0000UL; - if ((rt_version & MAJOR_MINOR) == (ct_version & MAJOR_MINOR)) - return 0; - if (likely(allow_newer && (rt_version & MAJOR_MINOR) > (ct_version & MAJOR_MINOR))) - return 1; - { - char message[200]; - PyOS_snprintf(message, sizeof(message), - "compile time Python version %d.%d " - "of module '%.100s' " - "%s " - "runtime version %d.%d", - (int) (ct_version >> 24), (int) ((ct_version >> 16) & 0xFF), - __Pyx_MODULE_NAME, - (allow_newer) ? "was newer than" : "does not match", - (int) (rt_version >> 24), (int) ((rt_version >> 16) & 0xFF) - ); - return PyErr_WarnEx(NULL, message, 1); - } -} - -/* InitStrings */ -#if PY_MAJOR_VERSION >= 3 -static int __Pyx_InitString(__Pyx_StringTabEntry t, PyObject **str) { - if (t.is_unicode | t.is_str) { - if (t.intern) { - *str = PyUnicode_InternFromString(t.s); - } else if (t.encoding) { - *str = PyUnicode_Decode(t.s, t.n - 1, t.encoding, NULL); - } else { - *str = PyUnicode_FromStringAndSize(t.s, t.n - 1); - } - } else { - *str = PyBytes_FromStringAndSize(t.s, t.n - 1); - } - if (!*str) - return -1; - if (PyObject_Hash(*str) == -1) - return -1; - return 0; -} -#endif -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { - while (t->p) { - #if PY_MAJOR_VERSION >= 3 - __Pyx_InitString(*t, t->p); - #else - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - #endif - ++t; - } - return 0; -} - -#include -static CYTHON_INLINE Py_ssize_t __Pyx_ssize_strlen(const char *s) { - size_t len = strlen(s); - if (unlikely(len > (size_t) PY_SSIZE_T_MAX)) { - PyErr_SetString(PyExc_OverflowError, "byte string is too long"); - return -1; - } - return (Py_ssize_t) len; -} -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return __Pyx_PyUnicode_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE PyObject* __Pyx_PyByteArray_FromString(const char* c_str) { - Py_ssize_t len = __Pyx_ssize_strlen(c_str); - if (unlikely(len < 0)) return NULL; - return PyByteArray_FromStringAndSize(c_str, len); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY && !CYTHON_COMPILING_IN_LIMITED_API) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { - __Pyx_TypeName result_type_name = __Pyx_PyType_GetName(Py_TYPE(result)); -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type " __Pyx_FMT_TYPENAME "). " - "The ability to return an instance of a strict subclass of int is deprecated, " - "and may be removed in a future version of Python.", - result_type_name)) { - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; - } - __Pyx_DECREF_TypeName(result_type_name); - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type " __Pyx_FMT_TYPENAME ")", - type_name, type_name, result_type_name); - __Pyx_DECREF_TypeName(result_type_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - if (likely(__Pyx_PyLong_IsCompact(b))) { - return __Pyx_PyLong_CompactValue(b); - } else { - const digit* digits = __Pyx_PyLong_Digits(b); - const Py_ssize_t size = __Pyx_PyLong_SignedDigitCount(b); - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { - if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { - return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); -#if PY_MAJOR_VERSION < 3 - } else if (likely(PyInt_CheckExact(o))) { - return PyInt_AS_LONG(o); -#endif - } else { - Py_ssize_t ival; - PyObject *x; - x = PyNumber_Index(o); - if (!x) return -1; - ival = PyInt_AsLong(x); - Py_DECREF(x); - return ival; - } -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -/* #### Code section: utility_code_pragmas_end ### */ -#ifdef _MSC_VER -#pragma warning( pop ) -#endif - - - -/* #### Code section: end ### */ -#endif /* Py_PYTHON_H */ diff --git a/test/ext/test_hybrid.py b/test/ext/test_hybrid.py index f6ad0de8d4d..8e3d7e9cd57 100644 --- a/test/ext/test_hybrid.py +++ b/test/ext/test_hybrid.py @@ -22,7 +22,6 @@ from sqlalchemy.orm import relationship from sqlalchemy.orm import Session from sqlalchemy.orm import synonym -from sqlalchemy.orm.context import ORMSelectCompileState from sqlalchemy.sql import coercions from sqlalchemy.sql import operators from sqlalchemy.sql import roles @@ -532,9 +531,7 @@ def test_labeling_for_unnamed_matches_col( "SELECT a.id, a.foo FROM a", ) - compile_state = ORMSelectCompileState._create_orm_context( - stmt, toplevel=True, compiler=None - ) + compile_state = stmt._compile_state_factory(stmt, None) eq_( compile_state._column_naming_convention( LABEL_STYLE_DISAMBIGUATE_ONLY, legacy=False diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py index e0d75db7e16..51c86a5f1da 100644 --- a/test/orm/test_froms.py +++ b/test/orm/test_froms.py @@ -1893,9 +1893,7 @@ def test_no_uniquing_cols(self, with_entities): .order_by(User.id) ) - compile_state = ORMSelectCompileState._create_orm_context( - stmt, toplevel=True, compiler=None - ) + compile_state = ORMSelectCompileState.create_for_statement(stmt, None) is_(compile_state._primary_entity, None) def test_column_queries_one(self): From f998ae83d2c3dcd7f625e3d6a611cf2f7c56907c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 3 Mar 2025 17:01:15 -0500 Subject: [PATCH 457/544] ensure compiler is not optional in create_for_statement() this involved moving some methods around and changing the target of legacy orm/query.py calling upon this method to use an ORM-specific method instead (cherry picked from commit d9b4d8ff3aae504402d324f3ebf0b8faff78f5dc) Change-Id: I6f83a5b0e8f43a3eb633216c2f2fe2d28345e9bd --- lib/sqlalchemy/orm/context.py | 55 ++++++++++++++++++++------------ lib/sqlalchemy/orm/query.py | 4 ++- lib/sqlalchemy/sql/base.py | 5 ++- lib/sqlalchemy/sql/elements.py | 11 +++++-- lib/sqlalchemy/sql/selectable.py | 5 +-- test/ext/test_hybrid.py | 5 ++- test/orm/test_froms.py | 4 ++- 7 files changed, 61 insertions(+), 28 deletions(-) diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index 5e91cdf9e14..b04d6d48c28 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -265,10 +265,10 @@ def _init_global_attributes( @classmethod def create_for_statement( cls, - statement: Union[Select, FromStatement], - compiler: Optional[SQLCompiler], + statement: Executable, + compiler: SQLCompiler, **kw: Any, - ) -> AbstractORMCompileState: + ) -> CompileState: """Create a context for a statement given a :class:`.Compiler`. This method is always invoked in the context of SQLCompiler.process(). @@ -437,15 +437,30 @@ class default_compile_options(CacheableOptions): def __init__(self, *arg, **kw): raise NotImplementedError() - if TYPE_CHECKING: + @classmethod + def create_for_statement( + cls, + statement: Executable, + compiler: SQLCompiler, + **kw: Any, + ) -> ORMCompileState: + return cls._create_orm_context( + cast("Union[Select, FromStatement]", statement), + toplevel=not compiler.stack, + compiler=compiler, + **kw, + ) - @classmethod - def create_for_statement( - cls, - statement: Union[Select, FromStatement], - compiler: Optional[SQLCompiler], - **kw: Any, - ) -> ORMCompileState: ... + @classmethod + def _create_orm_context( + cls, + statement: Union[Select, FromStatement], + *, + toplevel: bool, + compiler: Optional[SQLCompiler], + **kw: Any, + ) -> ORMCompileState: + raise NotImplementedError() def _append_dedupe_col_collection(self, obj, col_collection): dedupe = self.dedupe_columns @@ -755,12 +770,16 @@ class ORMFromStatementCompileState(ORMCompileState): eager_joins = _EMPTY_DICT @classmethod - def create_for_statement( + def _create_orm_context( cls, - statement_container: Union[Select, FromStatement], + statement: Union[Select, FromStatement], + *, + toplevel: bool, compiler: Optional[SQLCompiler], **kw: Any, ) -> ORMFromStatementCompileState: + statement_container = statement + assert isinstance(statement_container, FromStatement) if compiler is not None and compiler.stack: @@ -1067,21 +1086,17 @@ class ORMSelectCompileState(ORMCompileState, SelectState): _having_criteria = () @classmethod - def create_for_statement( + def _create_orm_context( cls, statement: Union[Select, FromStatement], + *, + toplevel: bool, compiler: Optional[SQLCompiler], **kw: Any, ) -> ORMSelectCompileState: - """compiler hook, we arrive here from compiler.visit_select() only.""" self = cls.__new__(cls) - if compiler is not None: - toplevel = not compiler.stack - else: - toplevel = True - select_statement = statement # if we are a select() that was never a legacy Query, we won't diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 4dbb3009b39..af496b245f4 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -3340,7 +3340,9 @@ def _compile_state( ORMCompileState._get_plugin_class_for_plugin(stmt, "orm"), ) - return compile_state_cls.create_for_statement(stmt, None) + return compile_state_cls._create_orm_context( + stmt, toplevel=True, compiler=None + ) def _compile_context(self, for_statement: bool = False) -> QueryContext: compile_state = self._compile_state(for_statement=for_statement) diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 6d409a9fb7e..7ccef84e0d5 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -68,6 +68,7 @@ from ._orm_types import DMLStrategyArgument from ._orm_types import SynchronizeSessionArgument from ._typing import _CLE + from .compiler import SQLCompiler from .elements import BindParameter from .elements import ClauseList from .elements import ColumnClause # noqa @@ -657,7 +658,9 @@ class CompileState: _ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] @classmethod - def create_for_statement(cls, statement, compiler, **kw): + def create_for_statement( + cls, statement: Executable, compiler: SQLCompiler, **kw: Any + ) -> CompileState: # factory construction. if statement._propagate_attrs: diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index fde503aaf9b..cd1dc34e0a1 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -298,8 +298,7 @@ def compile( if bind: dialect = bind.dialect elif self.stringify_dialect == "default": - default = util.preloaded.engine_default - dialect = default.StrCompileDialect() + dialect = self._default_dialect() else: url = util.preloaded.engine_url dialect = url.URL.create( @@ -308,6 +307,10 @@ def compile( return self._compiler(dialect, **kw) + def _default_dialect(self): + default = util.preloaded.engine_default + return default.StrCompileDialect() + def _compiler(self, dialect: Dialect, **kw: Any) -> Compiled: """Return a compiler appropriate for this ClauseElement, given a Dialect.""" @@ -404,6 +407,10 @@ def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self: self._propagate_attrs = util.immutabledict(values) return self + def _default_compiler(self) -> SQLCompiler: + dialect = self._default_dialect() + return dialect.statement_compiler(dialect, self) # type: ignore + def _clone(self, **kw: Any) -> Self: """Create a shallow copy of this ClauseElement. diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 5db1e729e7a..d137ab504ea 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -4694,7 +4694,7 @@ def get_plugin_class( def __init__( self, statement: Select[Any], - compiler: Optional[SQLCompiler], + compiler: SQLCompiler, **kw: Any, ): self.statement = statement @@ -5742,8 +5742,9 @@ def get_final_froms(self) -> Sequence[FromClause]: :attr:`_sql.Select.columns_clause_froms` """ + compiler = self._default_compiler() - return self._compile_state_factory(self, None)._get_display_froms() + return self._compile_state_factory(self, compiler)._get_display_froms() @property @util.deprecated( diff --git a/test/ext/test_hybrid.py b/test/ext/test_hybrid.py index 8e3d7e9cd57..f6ad0de8d4d 100644 --- a/test/ext/test_hybrid.py +++ b/test/ext/test_hybrid.py @@ -22,6 +22,7 @@ from sqlalchemy.orm import relationship from sqlalchemy.orm import Session from sqlalchemy.orm import synonym +from sqlalchemy.orm.context import ORMSelectCompileState from sqlalchemy.sql import coercions from sqlalchemy.sql import operators from sqlalchemy.sql import roles @@ -531,7 +532,9 @@ def test_labeling_for_unnamed_matches_col( "SELECT a.id, a.foo FROM a", ) - compile_state = stmt._compile_state_factory(stmt, None) + compile_state = ORMSelectCompileState._create_orm_context( + stmt, toplevel=True, compiler=None + ) eq_( compile_state._column_naming_convention( LABEL_STYLE_DISAMBIGUATE_ONLY, legacy=False diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py index 51c86a5f1da..e0d75db7e16 100644 --- a/test/orm/test_froms.py +++ b/test/orm/test_froms.py @@ -1893,7 +1893,9 @@ def test_no_uniquing_cols(self, with_entities): .order_by(User.id) ) - compile_state = ORMSelectCompileState.create_for_statement(stmt, None) + compile_state = ORMSelectCompileState._create_orm_context( + stmt, toplevel=True, compiler=None + ) is_(compile_state._primary_entity, None) def test_column_queries_one(self): From f3cdad3597921ac8aecd3c67e564cf999c1e1373 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 5 Mar 2025 19:28:30 +0100 Subject: [PATCH 458/544] test related fixes improve test error logging remove obsolete emulated pipeline fix test in python 3.8 add order to test Change-Id: I2003f256a2690ee5673c72e2f1cb1340af750f83 --- .github/workflows/run-test.yaml | 41 ------------------- test/ext/asyncio/test_engine_py3k.py | 5 ++- test/sql/test_types.py | 3 +- .../plain_files/sql/common_sql_element.py | 40 +++++++++--------- 4 files changed, 25 insertions(+), 64 deletions(-) diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index b4dea776f05..6c93ef1b4f7 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -129,47 +129,6 @@ jobs: run: tox -e github-${{ matrix.build-type }} -- -q --nomemory --notimingintensive ${{ matrix.pytest-args }} continue-on-error: ${{ matrix.python-version == 'pypy-3.10' }} - run-test-arm64: - # Hopefully something native can be used at some point https://github.blog/changelog/2023-10-30-accelerate-your-ci-cd-with-arm-based-hosted-runners-in-github-actions/ - name: test-arm64-${{ matrix.python-version }}-${{ matrix.build-type }}-${{ matrix.os }} - runs-on: ubuntu-22.04 - strategy: - matrix: - python-version: - - cp37-cp37m - - cp38-cp38 - - cp39-cp39 - - cp310-cp310 - - cp311-cp311 - - cp312-cp312 - - cp313-cp313 - build-type: - - "cext" - - "nocext" - - fail-fast: false - - steps: - - name: Checkout repo - uses: actions/checkout@v4 - - - name: Set up emulation - run: | - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - - - name: Run tests - uses: docker://quay.io/pypa/manylinux2014_aarch64 - with: - args: | - bash -c " - export PATH=/opt/python/${{ matrix.python-version }}/bin:$PATH && - python --version && - python -m pip install --upgrade pip && - pip install --upgrade tox setuptools && - pip list && - tox -e github-${{ matrix.build-type }} -- -q --nomemory --notimingintensive ${{ matrix.pytest-args }} - " - run-tox: name: ${{ matrix.tox-env }}-${{ matrix.python-version }} runs-on: ${{ matrix.os }} diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index 6be408ecaea..231d32cbe18 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -1335,11 +1335,12 @@ async def test_one_multi_result(self, async_engine): @async_test async def test_scalars(self, async_engine, case): users = self.tables.users + stmt = select(users).order_by(users.c.user_id) async with async_engine.connect() as conn: if case == "scalars": - result = (await conn.scalars(select(users))).all() + result = (await conn.scalars(stmt)).all() elif case == "stream_scalars": - result = await (await conn.stream_scalars(select(users))).all() + result = await (await conn.stream_scalars(stmt)).all() eq_(result, list(range(1, 20))) diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 88c3b3a2540..5693ba70260 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -629,8 +629,9 @@ def test_pickle_types_other_process(self, name, type_, use_adapt): proc = subprocess.run( [sys.executable, "-c", code], env={**os.environ, "PYTHONPATH": pythonpath}, + stderr=subprocess.PIPE, ) - eq_(proc.returncode, 0) + eq_(proc.returncode, 0, proc.stderr.decode(errors="replace")) os.unlink(name) diff --git a/test/typing/plain_files/sql/common_sql_element.py b/test/typing/plain_files/sql/common_sql_element.py index d5b8f883400..5ce0793ac69 100644 --- a/test/typing/plain_files/sql/common_sql_element.py +++ b/test/typing/plain_files/sql/common_sql_element.py @@ -193,35 +193,35 @@ def core_expr(email: str) -> SQLColumnExpression[bool]: second_stmt = select(str_col, int_col) third_stmt = select(int_col, str_col) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(union(first_stmt, second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(union_all(first_stmt, second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(except_(first_stmt, second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(except_all(first_stmt, second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(intersect(first_stmt, second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(intersect_all(first_stmt, second_stmt)) -# EXPECTED_TYPE: Result[tuple[str, int]] +# EXPECTED_TYPE: Result[Tuple[str, int]] reveal_type(Session().execute(union(first_stmt, second_stmt))) -# EXPECTED_TYPE: Result[tuple[str, int]] +# EXPECTED_TYPE: Result[Tuple[str, int]] reveal_type(Session().execute(union_all(first_stmt, second_stmt))) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.union(second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.union_all(second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.except_(second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.except_all(second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.intersect(second_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.intersect_all(second_stmt)) # TODO: the following do not error because _SelectStatementForCompoundArgument @@ -243,15 +243,15 @@ def core_expr(email: str) -> SQLColumnExpression[bool]: # EXPECTED_TYPE: CompoundSelect[Never] reveal_type(intersect_all(first_stmt, third_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.union(third_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.union_all(third_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.except_(third_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.except_all(third_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.intersect(third_stmt)) -# EXPECTED_TYPE: CompoundSelect[tuple[str, int]] +# EXPECTED_TYPE: CompoundSelect[Tuple[str, int]] reveal_type(first_stmt.intersect_all(third_stmt)) From 1db6ee03c91cdcb618bac3c5119861656ba16521 Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Tue, 4 Mar 2025 15:28:47 -0500 Subject: [PATCH 459/544] Add type annotations to `postgresql.json` (Same as https://github.com/sqlalchemy/sqlalchemy/pull/12384, but for `json`.) ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [x] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. Related to #6810 **Have a nice day!** Closes: #12391 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12391 Pull-request-sha: 0a43724f1737a4519629a13e2d6bf33f7aecb9ac Change-Id: I2a0e88effccf351de7fa72389ee646532ce9cf69 (cherry picked from commit c7f4e8b9370487135777677eaf4d8992825c24aa) --- lib/sqlalchemy/dialects/postgresql/json.py | 75 +++++++++++++++------- lib/sqlalchemy/sql/sqltypes.py | 21 ++++-- lib/sqlalchemy/sql/type_api.py | 17 ++++- 3 files changed, 83 insertions(+), 30 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 2f26b39e31e..663be8b7a2b 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -4,8 +4,15 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import List +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union from .array import ARRAY from .array import array as _pg_array @@ -21,13 +28,23 @@ from .operators import PATH_MATCH from ... import types as sqltypes from ...sql import cast +from ...sql._typing import _T + +if TYPE_CHECKING: + from ...engine.interfaces import Dialect + from ...sql.elements import ColumnElement + from ...sql.type_api import _BindProcessorType + from ...sql.type_api import _LiteralProcessorType + from ...sql.type_api import TypeEngine __all__ = ("JSON", "JSONB") class JSONPathType(sqltypes.JSON.JSONPathType): - def _processor(self, dialect, super_proc): - def process(value): + def _processor( + self, dialect: Dialect, super_proc: Optional[Callable[[Any], Any]] + ) -> Callable[[Any], Any]: + def process(value: Any) -> Any: if isinstance(value, str): # If it's already a string assume that it's in json path # format. This allows using cast with json paths literals @@ -44,11 +61,13 @@ def process(value): return process - def bind_processor(self, dialect): - return self._processor(dialect, self.string_bind_processor(dialect)) + def bind_processor(self, dialect: Dialect) -> _BindProcessorType[Any]: + return self._processor(dialect, self.string_bind_processor(dialect)) # type: ignore[return-value] # noqa: E501 - def literal_processor(self, dialect): - return self._processor(dialect, self.string_literal_processor(dialect)) + def literal_processor( + self, dialect: Dialect + ) -> _LiteralProcessorType[Any]: + return self._processor(dialect, self.string_literal_processor(dialect)) # type: ignore[return-value] # noqa: E501 class JSONPATH(JSONPathType): @@ -148,9 +167,13 @@ class JSON(sqltypes.JSON): """ # noqa render_bind_cast = True - astext_type = sqltypes.Text() + astext_type: TypeEngine[str] = sqltypes.Text() - def __init__(self, none_as_null=False, astext_type=None): + def __init__( + self, + none_as_null: bool = False, + astext_type: Optional[TypeEngine[str]] = None, + ): """Construct a :class:`_types.JSON` type. :param none_as_null: if True, persist the value ``None`` as a @@ -175,11 +198,13 @@ def __init__(self, none_as_null=False, astext_type=None): if astext_type is not None: self.astext_type = astext_type - class Comparator(sqltypes.JSON.Comparator): + class Comparator(sqltypes.JSON.Comparator[_T]): """Define comparison operations for :class:`_types.JSON`.""" + type: JSON + @property - def astext(self): + def astext(self) -> ColumnElement[str]: """On an indexed expression, use the "astext" (e.g. "->>") conversion when rendered in SQL. @@ -193,13 +218,13 @@ def astext(self): """ if isinstance(self.expr.right.type, sqltypes.JSON.JSONPathType): - return self.expr.left.operate( + return self.expr.left.operate( # type: ignore[no-any-return] JSONPATH_ASTEXT, self.expr.right, result_type=self.type.astext_type, ) else: - return self.expr.left.operate( + return self.expr.left.operate( # type: ignore[no-any-return] ASTEXT, self.expr.right, result_type=self.type.astext_type ) @@ -258,28 +283,30 @@ class JSONB(JSON): __visit_name__ = "JSONB" - class Comparator(JSON.Comparator): + class Comparator(JSON.Comparator[_T]): """Define comparison operations for :class:`_types.JSON`.""" - def has_key(self, other): + type: JSONB + + def has_key(self, other: Any) -> ColumnElement[bool]: """Boolean expression. Test for presence of a key (equivalent of the ``?`` operator). Note that the key may be a SQLA expression. """ return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean) - def has_all(self, other): + def has_all(self, other: Any) -> ColumnElement[bool]: """Boolean expression. Test for presence of all keys in jsonb (equivalent of the ``?&`` operator) """ return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean) - def has_any(self, other): + def has_any(self, other: Any) -> ColumnElement[bool]: """Boolean expression. Test for presence of any key in jsonb (equivalent of the ``?|`` operator) """ return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean) - def contains(self, other, **kwargs): + def contains(self, other: Any, **kwargs: Any) -> ColumnElement[bool]: """Boolean expression. Test if keys (or array) are a superset of/contained the keys of the argument jsonb expression (equivalent of the ``@>`` operator). @@ -289,7 +316,7 @@ def contains(self, other, **kwargs): """ return self.operate(CONTAINS, other, result_type=sqltypes.Boolean) - def contained_by(self, other): + def contained_by(self, other: Any) -> ColumnElement[bool]: """Boolean expression. Test if keys are a proper subset of the keys of the argument jsonb expression (equivalent of the ``<@`` operator). @@ -298,7 +325,9 @@ def contained_by(self, other): CONTAINED_BY, other, result_type=sqltypes.Boolean ) - def delete_path(self, array): + def delete_path( + self, array: Union[List[str], _pg_array[str]] + ) -> ColumnElement[JSONB]: """JSONB expression. Deletes field or array element specified in the argument array (equivalent of the ``#-`` operator). @@ -308,11 +337,11 @@ def delete_path(self, array): .. versionadded:: 2.0 """ if not isinstance(array, _pg_array): - array = _pg_array(array) + array = _pg_array(array) # type: ignore[no-untyped-call] right_side = cast(array, ARRAY(sqltypes.TEXT)) return self.operate(DELETE_PATH, right_side, result_type=JSONB) - def path_exists(self, other): + def path_exists(self, other: Any) -> ColumnElement[bool]: """Boolean expression. Test for presence of item given by the argument JSONPath expression (equivalent of the ``@?`` operator). @@ -322,7 +351,7 @@ def path_exists(self, other): PATH_EXISTS, other, result_type=sqltypes.Boolean ) - def path_match(self, other): + def path_match(self, other: Any) -> ColumnElement[bool]: """Boolean expression. Test if JSONPath predicate given by the argument JSONPath expression matches (equivalent of the ``@@`` operator). diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index ee471a6c4ec..ad220356f04 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -72,6 +72,7 @@ from .schema import MetaData from .type_api import _BindProcessorType from .type_api import _ComparatorFactory + from .type_api import _LiteralProcessorType from .type_api import _MatchedOnType from .type_api import _ResultProcessorType from ..engine.interfaces import Dialect @@ -2465,17 +2466,21 @@ class JSONElementType(TypeEngine[Any]): _integer = Integer() _string = String() - def string_bind_processor(self, dialect): + def string_bind_processor( + self, dialect: Dialect + ) -> Optional[_BindProcessorType[str]]: return self._string._cached_bind_processor(dialect) - def string_literal_processor(self, dialect): + def string_literal_processor( + self, dialect: Dialect + ) -> Optional[_LiteralProcessorType[str]]: return self._string._cached_literal_processor(dialect) - def bind_processor(self, dialect): + def bind_processor(self, dialect: Dialect) -> _BindProcessorType[Any]: int_processor = self._integer._cached_bind_processor(dialect) string_processor = self.string_bind_processor(dialect) - def process(value): + def process(value: Optional[Any]) -> Any: if int_processor and isinstance(value, int): value = int_processor(value) elif string_processor and isinstance(value, str): @@ -2484,11 +2489,13 @@ def process(value): return process - def literal_processor(self, dialect): + def literal_processor( + self, dialect: Dialect + ) -> _LiteralProcessorType[Any]: int_processor = self._integer._cached_literal_processor(dialect) string_processor = self.string_literal_processor(dialect) - def process(value): + def process(value: Optional[Any]) -> Any: if int_processor and isinstance(value, int): value = int_processor(value) elif string_processor and isinstance(value, str): @@ -2539,6 +2546,8 @@ class Comparator(Indexable.Comparator[_T], Concatenable.Comparator[_T]): __slots__ = () + type: JSON + def _setup_getitem(self, index): if not isinstance(index, str) and isinstance( index, collections_abc.Sequence diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index aeb804d3f9b..8cdb323b2a6 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -67,6 +67,7 @@ _O = TypeVar("_O", bound=object) _TE = TypeVar("_TE", bound="TypeEngine[Any]") _CT = TypeVar("_CT", bound=Any) +_RT = TypeVar("_RT", bound=Any) _MatchedOnType = Union[ "GenericProtocol[Any]", TypeAliasType, NewType, Type[Any] @@ -186,10 +187,24 @@ def __init__(self, expr: ColumnElement[_CT]): def __reduce__(self) -> Any: return self.__class__, (self.expr,) + @overload + def operate( + self, + op: OperatorType, + *other: Any, + result_type: Type[TypeEngine[_RT]], + **kwargs: Any, + ) -> ColumnElement[_RT]: ... + + @overload + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[_CT]: ... + @util.preload_module("sqlalchemy.sql.default_comparator") def operate( self, op: OperatorType, *other: Any, **kwargs: Any - ) -> ColumnElement[_CT]: + ) -> ColumnElement[Any]: default_comparator = util.preloaded.sql_default_comparator op_fn, addtl_kw = default_comparator.operator_lookup[op.__name__] if kwargs: From 1658ce34fcf05d7ec25633431377a066a95b1cdf Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 3 Mar 2025 23:35:48 +0100 Subject: [PATCH 460/544] Improve identity column reflection Add SQL typing to reflection query used to retrieve a the structure of IDENTITY columns, adding explicit JSON typing to the query to suit unusual PostgreSQL driver configurations that don't support JSON natively. Fixed issue affecting PostgreSQL 17.3 and greater where reflection of domains with "NOT NULL" as part of their definition would include an invalid constraint entry in the data returned by :meth:`_postgresql.PGInspector.get_domains` corresponding to an additional "NOT NULL" constraint that isn't a CHECK constraint; the existing ``"nullable"`` entry in the dictionary already indicates if the domain includes a "not null" constraint. Note that such domains also cannot be reflected on PostgreSQL 17.0 through 17.2 due to a bug on the PostgreSQL side; if encountering errors in reflection of domains which include NOT NULL, upgrade to PostgreSQL server 17.3 or greater. Fixes: #11751 Change-Id: I8e69de51601dca3257186e38c6f699fbfd9014c6 (cherry picked from commit b23b6db14ac33a792520a5036af1ab02157b7df6) --- doc/build/changelog/unreleased_20/11751.rst | 21 +++++++++++++++++++++ lib/sqlalchemy/dialects/postgresql/base.py | 10 ++++++---- test/dialect/postgresql/test_reflection.py | 2 +- 3 files changed, 28 insertions(+), 5 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11751.rst diff --git a/doc/build/changelog/unreleased_20/11751.rst b/doc/build/changelog/unreleased_20/11751.rst new file mode 100644 index 00000000000..3686f4fbe90 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11751.rst @@ -0,0 +1,21 @@ +.. change:: + :tags: bug, postgresql + :tickets: 11751 + + Add SQL typing to reflection query used to retrieve a the structure + of IDENTITY columns, adding explicit JSON typing to the query to suit + unusual PostgreSQL driver configurations that don't support JSON natively. + +.. change:: + :tags: bug, postgresql + + Fixed issue affecting PostgreSQL 17.3 and greater where reflection of + domains with "NOT NULL" as part of their definition would include an + invalid constraint entry in the data returned by + :meth:`_postgresql.PGInspector.get_domains` corresponding to an additional + "NOT NULL" constraint that isn't a CHECK constraint; the existing + ``"nullable"`` entry in the dictionary already indicates if the domain + includes a "not null" constraint. Note that such domains also cannot be + reflected on PostgreSQL 17.0 through 17.2 due to a bug on the PostgreSQL + side; if encountering errors in reflection of domains which include NOT + NULL, upgrade to PostgreSQL server 17.3 or greater. diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 9bfaa277c8c..318da6d67b2 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -3601,6 +3601,7 @@ def _columns_query(self, schema, has_filter_names, scope, kind): pg_catalog.pg_sequence.c.seqcache, "cycle", pg_catalog.pg_sequence.c.seqcycle, + type_=sqltypes.JSON(), ) ) .select_from(pg_catalog.pg_sequence) @@ -5010,11 +5011,12 @@ def _load_domains(self, connection, schema=None, **kw): key=lambda t: t[0], ) for name, def_ in sorted_constraints: - # constraint is in the form "CHECK (expression)". + # constraint is in the form "CHECK (expression)" + # or "NOT NULL". Ignore the "NOT NULL" and # remove "CHECK (" and the tailing ")". - check = def_[7:-1] - constraints.append({"name": name, "check": check}) - + if def_.casefold().startswith("check"): + check = def_[7:-1] + constraints.append({"name": name, "check": check}) domain_rec: ReflectedDomain = { "name": domain["name"], "schema": domain["schema"], diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 510c8aa33c5..4d889c6775f 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -432,7 +432,7 @@ def testdomain(self, connection, broken_nullable_domains): @testing.fixture def testtable(self, connection, testdomain): connection.exec_driver_sql( - "CREATE TABLE testtable (question integer, answer " "testdomain)" + "CREATE TABLE testtable (question integer, answer testdomain)" ) yield connection.exec_driver_sql("DROP TABLE testtable") From a055dbe964abd3ed1ccb55a55e510852f9dfee5f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 11 Mar 2025 08:33:30 -0400 Subject: [PATCH 461/544] consolidate kwargs for "FOR UPDATE OF" Fixed compiler issue in the PostgreSQL dialect where incorrect keywords would be passed when using "FOR UPDATE OF" inside of a subquery. Fixes: #12417 Change-Id: I6255d165e8e719e1786e78aa60ee8e6a95af1dcb (cherry picked from commit 21630d2574328a0f01a1e994e264f56f1adf99db) --- doc/build/changelog/unreleased_20/12417.rst | 6 ++++++ lib/sqlalchemy/dialects/postgresql/base.py | 5 +++-- test/dialect/postgresql/test_compiler.py | 9 +++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12417.rst diff --git a/doc/build/changelog/unreleased_20/12417.rst b/doc/build/changelog/unreleased_20/12417.rst new file mode 100644 index 00000000000..b9b22a82475 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12417.rst @@ -0,0 +1,6 @@ +.. change:: + :tags: bug, postgresql + :tickets: 12417 + + Fixed compiler issue in the PostgreSQL dialect where incorrect keywords + would be passed when using "FOR UPDATE OF" inside of a subquery. diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 318da6d67b2..ca2a3fa59ef 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -2001,9 +2001,10 @@ def for_update_clause(self, select, **kw): for c in select._for_update_arg.of: tables.update(sql_util.surface_selectables_only(c)) + of_kw = dict(kw) + of_kw.update(ashint=True, use_schema=False) tmp += " OF " + ", ".join( - self.process(table, ashint=True, use_schema=False, **kw) - for table in tables + self.process(table, **of_kw) for table in tables ) if select._for_update_arg.nowait: diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index f02b42c0b21..55a43d20d1e 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -1731,6 +1731,15 @@ def test_for_update(self): "FOR UPDATE OF table1", ) + # test issue #12417 + subquery = select(table1.c.myid).with_for_update(of=table1).lateral() + statement = select(subquery.c.myid) + self.assert_compile( + statement, + "SELECT anon_1.myid FROM LATERAL (SELECT mytable.myid AS myid " + "FROM mytable FOR UPDATE OF mytable) AS anon_1", + ) + def test_for_update_with_schema(self): m = MetaData() table1 = Table( From f0de9dbbb0090aab056e9132fc84b0bfa17cd2bf Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Tue, 11 Mar 2025 09:27:13 -0400 Subject: [PATCH 462/544] Ensure PostgreSQL network address types are not cast as VARCHAR Fixed issue in PostgreSQL network types :class:`_postgresql.INET`, :class:`_postgresql.CIDR`, :class:`_postgresql.MACADDR`, :class:`_postgresql.MACADDR8` where sending string values to compare to these types would render an explicit CAST to VARCHAR, causing some SQL / driver combinations to fail. Pull request courtesy Denis Laxalde. Fixes: #12060 Closes: #12412 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12412 Pull-request-sha: 029fda7f2d182af71ebc48aef191aa9114927f28 Change-Id: Id4a502ebc119775567cacddbabef2ce9715c1a9f (cherry picked from commit f91e61e5c80004db6db47f4e13f37553ff22675a) --- doc/build/changelog/unreleased_20/12060.rst | 9 +++ lib/sqlalchemy/dialects/postgresql/types.py | 18 ++++-- test/dialect/postgresql/test_types.py | 67 +++++++++++++++++++++ 3 files changed, 90 insertions(+), 4 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12060.rst diff --git a/doc/build/changelog/unreleased_20/12060.rst b/doc/build/changelog/unreleased_20/12060.rst new file mode 100644 index 00000000000..c215d3799f3 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12060.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, postgresql + :tickets: 12060 + + Fixed issue in PostgreSQL network types :class:`_postgresql.INET`, + :class:`_postgresql.CIDR`, :class:`_postgresql.MACADDR`, + :class:`_postgresql.MACADDR8` where sending string values to compare to + these types would render an explicit CAST to VARCHAR, causing some SQL / + driver combinations to fail. Pull request courtesy Denis Laxalde. diff --git a/lib/sqlalchemy/dialects/postgresql/types.py b/lib/sqlalchemy/dialects/postgresql/types.py index 6fe4f576ebd..1aed2bf4724 100644 --- a/lib/sqlalchemy/dialects/postgresql/types.py +++ b/lib/sqlalchemy/dialects/postgresql/types.py @@ -52,28 +52,38 @@ class BYTEA(sqltypes.LargeBinary): __visit_name__ = "BYTEA" -class INET(sqltypes.TypeEngine[str]): +class _NetworkAddressTypeMixin: + + def coerce_compared_value( + self, op: Optional[OperatorType], value: Any + ) -> TypeEngine[Any]: + if TYPE_CHECKING: + assert isinstance(self, TypeEngine) + return self + + +class INET(_NetworkAddressTypeMixin, sqltypes.TypeEngine[str]): __visit_name__ = "INET" PGInet = INET -class CIDR(sqltypes.TypeEngine[str]): +class CIDR(_NetworkAddressTypeMixin, sqltypes.TypeEngine[str]): __visit_name__ = "CIDR" PGCidr = CIDR -class MACADDR(sqltypes.TypeEngine[str]): +class MACADDR(_NetworkAddressTypeMixin, sqltypes.TypeEngine[str]): __visit_name__ = "MACADDR" PGMacAddr = MACADDR -class MACADDR8(sqltypes.TypeEngine[str]): +class MACADDR8(_NetworkAddressTypeMixin, sqltypes.TypeEngine[str]): __visit_name__ = "MACADDR8" diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 5f39aa608c8..795a897699b 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -3447,6 +3447,49 @@ class SpecialTypesCompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_bit_compile(self, type_, expected): self.assert_compile(type_, expected) + @testing.combinations( + (psycopg.dialect(),), + (psycopg2.dialect(),), + (asyncpg.dialect(),), + (pg8000.dialect(),), + argnames="dialect", + id_="n", + ) + def test_network_address_cast(self, metadata, dialect): + t = Table( + "addresses", + metadata, + Column("id", Integer, primary_key=True), + Column("addr", postgresql.INET), + Column("addr2", postgresql.MACADDR), + Column("addr3", postgresql.CIDR), + Column("addr4", postgresql.MACADDR8), + ) + stmt = select(t.c.id).where( + t.c.addr == "127.0.0.1", + t.c.addr2 == "08:00:2b:01:02:03", + t.c.addr3 == "192.168.100.128/25", + t.c.addr4 == "08:00:2b:01:02:03:04:05", + ) + param, param2, param3, param4 = { + "format": ("%s", "%s", "%s", "%s"), + "numeric_dollar": ("$1", "$2", "$3", "$4"), + "pyformat": ( + "%(addr_1)s", + "%(addr2_1)s", + "%(addr3_1)s", + "%(addr4_1)s", + ), + }[dialect.paramstyle] + expected = ( + "SELECT addresses.id FROM addresses " + f"WHERE addresses.addr = {param} " + f"AND addresses.addr2 = {param2} " + f"AND addresses.addr3 = {param3} " + f"AND addresses.addr4 = {param4}" + ) + self.assert_compile(stmt, expected, dialect=dialect) + class SpecialTypesTest(fixtures.TablesTest, ComparesTables): """test DDL and reflection of PG-specific types""" @@ -3501,6 +3544,30 @@ def test_reflection(self, special_types_table, connection): assert t.c.precision_interval.type.precision == 3 assert t.c.bitstring.type.length == 4 + @testing.combinations( + (postgresql.INET, "127.0.0.1"), + (postgresql.CIDR, "192.168.100.128/25"), + (postgresql.MACADDR, "08:00:2b:01:02:03"), + (postgresql.MACADDR8, "08:00:2b:01:02:03:04:05"), + argnames="column_type, value", + id_="na", + ) + def test_network_address_round_trip( + self, connection, metadata, column_type, value + ): + t = Table( + "addresses", + metadata, + Column("name", String), + Column("value", column_type), + ) + t.create(connection) + connection.execute(t.insert(), {"name": "test", "value": value}) + eq_( + connection.scalar(select(t.c.name).where(t.c.value == value)), + "test", + ) + def test_tsvector_round_trip(self, connection, metadata): t = Table("t1", metadata, Column("data", postgresql.TSVECTOR)) t.create(connection) From 28429dc734052d942cd964cd236174b3519e919d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 11 Mar 2025 14:18:59 -0400 Subject: [PATCH 463/544] - 2.0.39 --- doc/build/changelog/changelog_20.rst | 115 +++++++++++++++++++- doc/build/changelog/unreleased_20/11751.rst | 21 ---- doc/build/changelog/unreleased_20/11922.rst | 8 -- doc/build/changelog/unreleased_20/12060.rst | 9 -- doc/build/changelog/unreleased_20/12326.rst | 7 -- doc/build/changelog/unreleased_20/12328.rst | 8 -- doc/build/changelog/unreleased_20/12338.rst | 8 -- doc/build/changelog/unreleased_20/12357.rst | 9 -- doc/build/changelog/unreleased_20/12364.rst | 7 -- doc/build/changelog/unreleased_20/12368.rst | 9 -- doc/build/changelog/unreleased_20/12382.rst | 10 -- doc/build/changelog/unreleased_20/12417.rst | 6 - doc/build/conf.py | 4 +- 13 files changed, 116 insertions(+), 105 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11751.rst delete mode 100644 doc/build/changelog/unreleased_20/11922.rst delete mode 100644 doc/build/changelog/unreleased_20/12060.rst delete mode 100644 doc/build/changelog/unreleased_20/12326.rst delete mode 100644 doc/build/changelog/unreleased_20/12328.rst delete mode 100644 doc/build/changelog/unreleased_20/12338.rst delete mode 100644 doc/build/changelog/unreleased_20/12357.rst delete mode 100644 doc/build/changelog/unreleased_20/12364.rst delete mode 100644 doc/build/changelog/unreleased_20/12368.rst delete mode 100644 doc/build/changelog/unreleased_20/12382.rst delete mode 100644 doc/build/changelog/unreleased_20/12417.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index e5e9a87d9af..213e3b38029 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,120 @@ .. changelog:: :version: 2.0.39 - :include_notes_from: unreleased_20 + :released: March 11, 2025 + + .. change:: + :tags: bug, postgresql + :tickets: 11751 + + Add SQL typing to reflection query used to retrieve a the structure + of IDENTITY columns, adding explicit JSON typing to the query to suit + unusual PostgreSQL driver configurations that don't support JSON natively. + + .. change:: + :tags: bug, postgresql + + Fixed issue affecting PostgreSQL 17.3 and greater where reflection of + domains with "NOT NULL" as part of their definition would include an + invalid constraint entry in the data returned by + :meth:`_postgresql.PGInspector.get_domains` corresponding to an additional + "NOT NULL" constraint that isn't a CHECK constraint; the existing + ``"nullable"`` entry in the dictionary already indicates if the domain + includes a "not null" constraint. Note that such domains also cannot be + reflected on PostgreSQL 17.0 through 17.2 due to a bug on the PostgreSQL + side; if encountering errors in reflection of domains which include NOT + NULL, upgrade to PostgreSQL server 17.3 or greater. + + .. change:: + :tags: typing, usecase + :tickets: 11922 + + Support generic types for compound selects (:func:`_sql.union`, + :func:`_sql.union_all`, :meth:`_sql.Select.union`, + :meth:`_sql.Select.union_all`, etc) returning the type of the first select. + Pull request courtesy of Mingyu Park. + + .. change:: + :tags: bug, postgresql + :tickets: 12060 + + Fixed issue in PostgreSQL network types :class:`_postgresql.INET`, + :class:`_postgresql.CIDR`, :class:`_postgresql.MACADDR`, + :class:`_postgresql.MACADDR8` where sending string values to compare to + these types would render an explicit CAST to VARCHAR, causing some SQL / + driver combinations to fail. Pull request courtesy Denis Laxalde. + + .. change:: + :tags: bug, orm + :tickets: 12326 + + Fixed bug where using DML returning such as :meth:`.Insert.returning` with + an ORM model that has :func:`_orm.column_property` constructs that contain + subqueries would fail with an internal error. + + .. change:: + :tags: bug, orm + :tickets: 12328 + + Fixed bug in ORM enabled UPDATE (and theoretically DELETE) where using a + multi-table DML statement would not allow ORM mapped columns from mappers + other than the primary UPDATE mapper to be named in the RETURNING clause; + they would be omitted instead and cause a column not found exception. + + .. change:: + :tags: bug, asyncio + :tickets: 12338 + + Fixed bug where :meth:`_asyncio.AsyncResult.scalar`, + :meth:`_asyncio.AsyncResult.scalar_one_or_none`, and + :meth:`_asyncio.AsyncResult.scalar_one` would raise an ``AttributeError`` + due to a missing internal attribute. Pull request courtesy Allen Ho. + + .. change:: + :tags: bug, orm + :tickets: 12357 + + Fixed issue where the "is ORM" flag of a :func:`.select` or other ORM + statement would not be propagated to the ORM :class:`.Session` based on a + multi-part operator expression alone, e.g. such as ``Cls.attr + Cls.attr + + Cls.attr`` or similar, leading to ORM behaviors not taking place for such + statements. + + .. change:: + :tags: bug, orm + :tickets: 12364 + + Fixed issue where using :func:`_orm.aliased` around a :class:`.CTE` + construct could cause inappropriate "duplicate CTE" errors in cases where + that aliased construct appeared multiple times in a single statement. + + .. change:: + :tags: bug, sqlite + :tickets: 12368 + + Fixed issue that omitted the comma between multiple SQLite table extension + clauses, currently ``WITH ROWID`` and ``STRICT``, when both options + :paramref:`.Table.sqlite_with_rowid` and :paramref:`.Table.sqlite_strict` + were configured at their non-default settings at the same time. Pull + request courtesy david-fed. + + .. change:: + :tags: bug, sql + :tickets: 12382 + + Added new parameters :paramref:`.AddConstraint.isolate_from_table` and + :paramref:`.DropConstraint.isolate_from_table`, defaulting to True, which + both document and allow to be controllable the long-standing behavior of + these two constructs blocking the given constraint from being included + inline within the "CREATE TABLE" sequence, under the assumption that + separate add/drop directives were to be used. + + .. change:: + :tags: bug, postgresql + :tickets: 12417 + + Fixed compiler issue in the PostgreSQL dialect where incorrect keywords + would be passed when using "FOR UPDATE OF" inside of a subquery. .. changelog:: :version: 2.0.38 diff --git a/doc/build/changelog/unreleased_20/11751.rst b/doc/build/changelog/unreleased_20/11751.rst deleted file mode 100644 index 3686f4fbe90..00000000000 --- a/doc/build/changelog/unreleased_20/11751.rst +++ /dev/null @@ -1,21 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 11751 - - Add SQL typing to reflection query used to retrieve a the structure - of IDENTITY columns, adding explicit JSON typing to the query to suit - unusual PostgreSQL driver configurations that don't support JSON natively. - -.. change:: - :tags: bug, postgresql - - Fixed issue affecting PostgreSQL 17.3 and greater where reflection of - domains with "NOT NULL" as part of their definition would include an - invalid constraint entry in the data returned by - :meth:`_postgresql.PGInspector.get_domains` corresponding to an additional - "NOT NULL" constraint that isn't a CHECK constraint; the existing - ``"nullable"`` entry in the dictionary already indicates if the domain - includes a "not null" constraint. Note that such domains also cannot be - reflected on PostgreSQL 17.0 through 17.2 due to a bug on the PostgreSQL - side; if encountering errors in reflection of domains which include NOT - NULL, upgrade to PostgreSQL server 17.3 or greater. diff --git a/doc/build/changelog/unreleased_20/11922.rst b/doc/build/changelog/unreleased_20/11922.rst deleted file mode 100644 index f0e7e3d9787..00000000000 --- a/doc/build/changelog/unreleased_20/11922.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: typing, usecase - :tickets: 11922 - - Support generic types for compound selects (:func:`_sql.union`, - :func:`_sql.union_all`, :meth:`_sql.Select.union`, - :meth:`_sql.Select.union_all`, etc) returning the type of the first select. - Pull request courtesy of Mingyu Park. diff --git a/doc/build/changelog/unreleased_20/12060.rst b/doc/build/changelog/unreleased_20/12060.rst deleted file mode 100644 index c215d3799f3..00000000000 --- a/doc/build/changelog/unreleased_20/12060.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 12060 - - Fixed issue in PostgreSQL network types :class:`_postgresql.INET`, - :class:`_postgresql.CIDR`, :class:`_postgresql.MACADDR`, - :class:`_postgresql.MACADDR8` where sending string values to compare to - these types would render an explicit CAST to VARCHAR, causing some SQL / - driver combinations to fail. Pull request courtesy Denis Laxalde. diff --git a/doc/build/changelog/unreleased_20/12326.rst b/doc/build/changelog/unreleased_20/12326.rst deleted file mode 100644 index 88e5de2f751..00000000000 --- a/doc/build/changelog/unreleased_20/12326.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 12326 - - Fixed bug where using DML returning such as :meth:`.Insert.returning` with - an ORM model that has :func:`_orm.column_property` constructs that contain - subqueries would fail with an internal error. diff --git a/doc/build/changelog/unreleased_20/12328.rst b/doc/build/changelog/unreleased_20/12328.rst deleted file mode 100644 index 9d9b70965e8..00000000000 --- a/doc/build/changelog/unreleased_20/12328.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 12328 - - Fixed bug in ORM enabled UPDATE (and theoretically DELETE) where using a - multi-table DML statement would not allow ORM mapped columns from mappers - other than the primary UPDATE mapper to be named in the RETURNING clause; - they would be omitted instead and cause a column not found exception. diff --git a/doc/build/changelog/unreleased_20/12338.rst b/doc/build/changelog/unreleased_20/12338.rst deleted file mode 100644 index 6a71f08d736..00000000000 --- a/doc/build/changelog/unreleased_20/12338.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, asyncio - :tickets: 12338 - - Fixed bug where :meth:`_asyncio.AsyncResult.scalar`, - :meth:`_asyncio.AsyncResult.scalar_one_or_none`, and - :meth:`_asyncio.AsyncResult.scalar_one` would raise an ``AttributeError`` - due to a missing internal attribute. Pull request courtesy Allen Ho. diff --git a/doc/build/changelog/unreleased_20/12357.rst b/doc/build/changelog/unreleased_20/12357.rst deleted file mode 100644 index 79fd888ba32..00000000000 --- a/doc/build/changelog/unreleased_20/12357.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 12357 - - Fixed issue where the "is ORM" flag of a :func:`.select` or other ORM - statement would not be propagated to the ORM :class:`.Session` based on a - multi-part operator expression alone, e.g. such as ``Cls.attr + Cls.attr + - Cls.attr`` or similar, leading to ORM behaviors not taking place for such - statements. diff --git a/doc/build/changelog/unreleased_20/12364.rst b/doc/build/changelog/unreleased_20/12364.rst deleted file mode 100644 index 59f5d24f067..00000000000 --- a/doc/build/changelog/unreleased_20/12364.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 12364 - - Fixed issue where using :func:`_orm.aliased` around a :class:`.CTE` - construct could cause inappropriate "duplicate CTE" errors in cases where - that aliased construct appeared multiple times in a single statement. diff --git a/doc/build/changelog/unreleased_20/12368.rst b/doc/build/changelog/unreleased_20/12368.rst deleted file mode 100644 index b02f0fb0a9d..00000000000 --- a/doc/build/changelog/unreleased_20/12368.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, sqlite - :tickets: 12368 - - Fixed issue that omitted the comma between multiple SQLite table extension - clauses, currently ``WITH ROWID`` and ``STRICT``, when both options - :paramref:`.Table.sqlite_with_rowid` and :paramref:`.Table.sqlite_strict` - were configured at their non-default settings at the same time. Pull - request courtesy david-fed. diff --git a/doc/build/changelog/unreleased_20/12382.rst b/doc/build/changelog/unreleased_20/12382.rst deleted file mode 100644 index 80f46309695..00000000000 --- a/doc/build/changelog/unreleased_20/12382.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 12382 - - Added new parameters :paramref:`.AddConstraint.isolate_from_table` and - :paramref:`.DropConstraint.isolate_from_table`, defaulting to True, which - both document and allow to be controllable the long-standing behavior of - these two constructs blocking the given constraint from being included - inline within the "CREATE TABLE" sequence, under the assumption that - separate add/drop directives were to be used. diff --git a/doc/build/changelog/unreleased_20/12417.rst b/doc/build/changelog/unreleased_20/12417.rst deleted file mode 100644 index b9b22a82475..00000000000 --- a/doc/build/changelog/unreleased_20/12417.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. change:: - :tags: bug, postgresql - :tickets: 12417 - - Fixed compiler issue in the PostgreSQL dialect where incorrect keywords - would be passed when using "FOR UPDATE OF" inside of a subquery. diff --git a/doc/build/conf.py b/doc/build/conf.py index 695f9104678..999ea7672f9 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.38" +release = "2.0.39" -release_date = "February 6, 2025" +release_date = "March 11, 2025" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 3f92784fbeebd8479c06250ab93f547515ec34fd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 11 Mar 2025 14:27:27 -0400 Subject: [PATCH 464/544] Version 2.0.40 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 213e3b38029..38ed6399c9a 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.40 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.39 :released: March 11, 2025 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 5b9c095ce9c..f219838dcd4 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.39" +__version__ = "2.0.40" def __go(lcls: Any) -> None: From bdf4ef844caf622da96460d57b86f4cf41cdbd45 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 23 Feb 2025 11:20:18 -0500 Subject: [PATCH 465/544] re-support mysql-connector python Support has been re-added for the MySQL-Connector/Python DBAPI using the ``mysql+mysqlconnector://`` URL scheme. The DBAPI now works against modern MySQL versions as well as MariaDB versions (in the latter case it's required to pass charset/collation explicitly). Note however that server side cursor support is disabled due to unresolved issues with this driver. note the 2.0 backport here necessarily needs to also backport some of 49ce245998 to handle the mariadb database working under mysql connector. References: #12332 Change-Id: I81279478196e830d3c0d5f24ecb3fe2dc18d4ca6 (cherry picked from commit b056dd2c5ab71ce4143a95cd0fdd4a4190de19e6) --- doc/build/changelog/unreleased_20/12332.rst | 10 ++ lib/sqlalchemy/dialects/mysql/base.py | 14 ++- lib/sqlalchemy/dialects/mysql/mariadb.py | 28 +++-- .../dialects/mysql/mysqlconnector.py | 103 ++++++++++++++---- lib/sqlalchemy/dialects/mysql/provision.py | 4 + lib/sqlalchemy/dialects/mysql/types.py | 9 +- lib/sqlalchemy/testing/suite/test_results.py | 2 + setup.cfg | 1 + test/dialect/mysql/test_dialect.py | 22 ++-- test/dialect/mysql/test_for_update.py | 12 +- test/engine/test_execute.py | 2 +- test/requirements.py | 7 +- tox.ini | 5 +- 13 files changed, 165 insertions(+), 54 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12332.rst diff --git a/doc/build/changelog/unreleased_20/12332.rst b/doc/build/changelog/unreleased_20/12332.rst new file mode 100644 index 00000000000..a6c1d4e2fb1 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12332.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, mysql + :tickets: 12332 + + Support has been re-added for the MySQL-Connector/Python DBAPI using the + ``mysql+mysqlconnector://`` URL scheme. The DBAPI now works against + modern MySQL versions as well as MariaDB versions (in the latter case it's + required to pass charset/collation explicitly). Note however that + server side cursor support is disabled due to unresolved issues with this + driver. diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 4a52d1b67a7..8bae6193b51 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -2520,6 +2520,10 @@ class MySQLDialect(default.DefaultDialect): # allow for the "true" and "false" keywords, however supports_native_boolean = False + # support for BIT type; mysqlconnector coerces result values automatically, + # all other MySQL DBAPIs require a conversion routine + supports_native_bit = False + # identifiers are 64, however aliases can be 255... max_identifier_length = 255 max_index_name_length = 64 @@ -2721,10 +2725,12 @@ def _set_mariadb(self, is_mariadb, server_version_info): % (".".join(map(str, server_version_info)),) ) if is_mariadb: - self.preparer = MariaDBIdentifierPreparer - # this would have been set by the default dialect already, - # so set it again - self.identifier_preparer = self.preparer(self) + + if not issubclass(self.preparer, MariaDBIdentifierPreparer): + self.preparer = MariaDBIdentifierPreparer + # this would have been set by the default dialect already, + # so set it again + self.identifier_preparer = self.preparer(self) # this will be updated on first connect in initialize() # if using older mariadb version diff --git a/lib/sqlalchemy/dialects/mysql/mariadb.py b/lib/sqlalchemy/dialects/mysql/mariadb.py index ac2cfbd1b00..b84dee37a7b 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadb.py +++ b/lib/sqlalchemy/dialects/mysql/mariadb.py @@ -46,16 +46,22 @@ class MariaDBDialect(MySQLDialect): def loader(driver): - driver_mod = __import__( + dialect_mod = __import__( "sqlalchemy.dialects.mysql.%s" % driver ).dialects.mysql - driver_cls = getattr(driver_mod, driver).dialect - - return type( - "MariaDBDialect_%s" % driver, - ( - MariaDBDialect, - driver_cls, - ), - {"supports_statement_cache": True}, - ) + + driver_mod = getattr(dialect_mod, driver) + if hasattr(driver_mod, "mariadb_dialect"): + driver_cls = driver_mod.mariadb_dialect + return driver_cls + else: + driver_cls = driver_mod.dialect + + return type( + "MariaDBDialect_%s" % driver, + ( + MariaDBDialect, + driver_cls, + ), + {"supports_statement_cache": True}, + ) diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index e88f8fd71a6..71ac58601c1 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -14,24 +14,51 @@ :connectstring: mysql+mysqlconnector://:@[:]/ :url: https://pypi.org/project/mysql-connector-python/ -.. note:: +Driver Status +------------- + +MySQL Connector/Python is supported as of SQLAlchemy 2.0.39 to the +degree which the driver is functional. There are still ongoing issues +with features such as server side cursors which remain disabled until +upstream issues are repaired. + +.. versionchanged:: 2.0.39 + + The MySQL Connector/Python dialect has been updated to support the + latest version of this DBAPI. Previously, MySQL Connector/Python + was not fully supported. + +Connecting to MariaDB with MySQL Connector/Python +-------------------------------------------------- + +MySQL Connector/Python may attempt to pass an incompatible collation to the +database when connecting to MariaDB. Experimentation has shown that using +``?charset=utf8mb4&collation=utfmb4_general_ci`` or similar MariaDB-compatible +charset/collation will allow connectivity. - The MySQL Connector/Python DBAPI has had many issues since its release, - some of which may remain unresolved, and the mysqlconnector dialect is - **not tested as part of SQLAlchemy's continuous integration**. - The recommended MySQL dialects are mysqlclient and PyMySQL. """ # noqa import re from .base import BIT +from .base import MariaDBIdentifierPreparer from .base import MySQLCompiler from .base import MySQLDialect +from .base import MySQLExecutionContext from .base import MySQLIdentifierPreparer +from .mariadb import MariaDBDialect from ... import util +class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): + def create_server_side_cursor(self): + return self._dbapi_connection.cursor(buffered=False) + + def create_default_cursor(self): + return self._dbapi_connection.cursor(buffered=True) + + class MySQLCompiler_mysqlconnector(MySQLCompiler): def visit_mod_binary(self, binary, operator, **kw): return ( @@ -41,7 +68,7 @@ def visit_mod_binary(self, binary, operator, **kw): ) -class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): +class IdentifierPreparerCommon_mysqlconnector: @property def _double_percents(self): return False @@ -55,6 +82,18 @@ def _escape_identifier(self, value): return value +class MySQLIdentifierPreparer_mysqlconnector( + IdentifierPreparerCommon_mysqlconnector, MySQLIdentifierPreparer +): + pass + + +class MariaDBIdentifierPreparer_mysqlconnector( + IdentifierPreparerCommon_mysqlconnector, MariaDBIdentifierPreparer +): + pass + + class _myconnpyBIT(BIT): def result_processor(self, dialect, coltype): """MySQL-connector already converts mysql bits, so.""" @@ -71,9 +110,16 @@ class MySQLDialect_mysqlconnector(MySQLDialect): supports_native_decimal = True + supports_native_bit = True + + # not until https://bugs.mysql.com/bug.php?id=117548 + supports_server_side_cursors = False + default_paramstyle = "format" statement_compiler = MySQLCompiler_mysqlconnector + execution_ctx_cls = MySQLExecutionContext_mysqlconnector + preparer = MySQLIdentifierPreparer_mysqlconnector colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _myconnpyBIT}) @@ -111,9 +157,13 @@ def create_connect_args(self, url): util.coerce_kw_type(opts, "use_pure", bool) util.coerce_kw_type(opts, "use_unicode", bool) - # unfortunately, MySQL/connector python refuses to release a - # cursor without reading fully, so non-buffered isn't an option - opts.setdefault("buffered", True) + # note that "buffered" is set to False by default in MySQL/connector + # python. If you set it to True, then there is no way to get a server + # side cursor because the logic is written to disallow that. + + # leaving this at True until + # https://bugs.mysql.com/bug.php?id=117548 can be fixed + opts["buffered"] = True # FOUND_ROWS must be set in ClientFlag to enable # supports_sane_rowcount. @@ -128,6 +178,7 @@ def create_connect_args(self, url): opts["client_flags"] = client_flags except Exception: pass + return [[], opts] @util.memoized_property @@ -145,7 +196,11 @@ def _extract_error_code(self, exception): def is_disconnect(self, e, connection, cursor): errnos = (2006, 2013, 2014, 2045, 2055, 2048) - exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError) + exceptions = ( + self.dbapi.OperationalError, + self.dbapi.InterfaceError, + self.dbapi.ProgrammingError, + ) if isinstance(e, exceptions): return ( e.errno in errnos @@ -161,20 +216,30 @@ def _compat_fetchall(self, rp, charset=None): def _compat_fetchone(self, rp, charset=None): return rp.fetchone() - _isolation_lookup = { - "SERIALIZABLE", - "READ UNCOMMITTED", - "READ COMMITTED", - "REPEATABLE READ", - "AUTOCOMMIT", - } + def get_isolation_level_values(self, dbapi_connection): + return ( + "SERIALIZABLE", + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + "AUTOCOMMIT", + ) - def _set_isolation_level(self, connection, level): + def set_isolation_level(self, connection, level): if level == "AUTOCOMMIT": connection.autocommit = True else: connection.autocommit = False - super()._set_isolation_level(connection, level) + super().set_isolation_level(connection, level) + + +class MariaDBDialect_mysqlconnector( + MariaDBDialect, MySQLDialect_mysqlconnector +): + supports_statement_cache = True + _allows_uuid_binds = False + preparer = MariaDBIdentifierPreparer_mysqlconnector dialect = MySQLDialect_mysqlconnector +mariadb_dialect = MariaDBDialect_mysqlconnector diff --git a/lib/sqlalchemy/dialects/mysql/provision.py b/lib/sqlalchemy/dialects/mysql/provision.py index 7807af40975..46070848cb1 100644 --- a/lib/sqlalchemy/dialects/mysql/provision.py +++ b/lib/sqlalchemy/dialects/mysql/provision.py @@ -42,6 +42,10 @@ def generate_driver_url(url, driver, query_str): if driver == "mariadbconnector": new_url = new_url.difference_update_query(["charset"]) + elif driver == "mysqlconnector": + new_url = new_url.update_query_pairs( + [("collation", "utf8mb4_general_ci")] + ) try: new_url.get_dialect() diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py index 0c05aacb7cd..ace6824a740 100644 --- a/lib/sqlalchemy/dialects/mysql/types.py +++ b/lib/sqlalchemy/dialects/mysql/types.py @@ -374,12 +374,11 @@ def __init__(self, length=None): self.length = length def result_processor(self, dialect, coltype): - """Convert a MySQL's 64 bit, variable length binary string to a long. + """Convert a MySQL's 64 bit, variable length binary string to a + long.""" - TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector - already do this, so this logic should be moved to those dialects. - - """ + if dialect.supports_native_bit: + return None def process(value): if value is not None: diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py index a6179d85598..317195fd1e9 100644 --- a/lib/sqlalchemy/testing/suite/test_results.py +++ b/lib/sqlalchemy/testing/suite/test_results.py @@ -268,6 +268,8 @@ def _is_server_side(self, cursor): return isinstance(cursor, sscursor) elif self.engine.dialect.driver == "mariadbconnector": return not cursor.buffered + elif self.engine.dialect.driver == "mysqlconnector": + return "buffered" not in type(cursor).__name__.lower() elif self.engine.dialect.driver in ("asyncpg", "aiosqlite"): return cursor.server_side elif self.engine.dialect.driver == "pg8000": diff --git a/setup.cfg b/setup.cfg index bf9aedd8b6b..21ab1374257 100644 --- a/setup.cfg +++ b/setup.cfg @@ -179,6 +179,7 @@ asyncmy = mysql+asyncmy://scott:tiger@127.0.0.1:3306/test?charset=utf8mb4 asyncmy_fallback = mysql+asyncmy://scott:tiger@127.0.0.1:3306/test?charset=utf8mb4&async_fallback=true mariadb = mariadb+mysqldb://scott:tiger@127.0.0.1:3306/test mariadb_connector = mariadb+mariadbconnector://scott:tiger@127.0.0.1:3306/test +mysql_connector = mariadb+mysqlconnector://scott:tiger@127.0.0.1:3306/test mssql = mssql+pyodbc://scott:tiger^5HHH@mssql2022:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional mssql_async = mssql+aioodbc://scott:tiger^5HHH@mssql2022:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional pymssql = mssql+pymssql://scott:tiger^5HHH@mssql2022:1433/test diff --git a/test/dialect/mysql/test_dialect.py b/test/dialect/mysql/test_dialect.py index cf74f17ad66..7e31c666f3a 100644 --- a/test/dialect/mysql/test_dialect.py +++ b/test/dialect/mysql/test_dialect.py @@ -302,15 +302,19 @@ def test_mysqlconnector_buffered_arg(self): )[1] eq_(kw["buffered"], True) - kw = dialect.create_connect_args( - make_url("mysql+mysqlconnector://u:p@host/db?buffered=false") - )[1] - eq_(kw["buffered"], False) - - kw = dialect.create_connect_args( - make_url("mysql+mysqlconnector://u:p@host/db") - )[1] - eq_(kw["buffered"], True) + # this is turned off for now due to + # https://bugs.mysql.com/bug.php?id=117548 + if dialect.supports_server_side_cursors: + kw = dialect.create_connect_args( + make_url("mysql+mysqlconnector://u:p@host/db?buffered=false") + )[1] + eq_(kw["buffered"], False) + + kw = dialect.create_connect_args( + make_url("mysql+mysqlconnector://u:p@host/db") + )[1] + # defaults to False as of 2.0.39 + eq_(kw.get("buffered"), None) def test_mysqlconnector_raise_on_warnings_arg(self): from sqlalchemy.dialects.mysql import mysqlconnector diff --git a/test/dialect/mysql/test_for_update.py b/test/dialect/mysql/test_for_update.py index 0895a098d1f..5c26d8eb6d5 100644 --- a/test/dialect/mysql/test_for_update.py +++ b/test/dialect/mysql/test_for_update.py @@ -90,7 +90,11 @@ def _assert_a_is_locked(self, should_be_locked): # set x/y > 10 try: alt_trans.execute(update(A).values(x=15, y=19)) - except (exc.InternalError, exc.OperationalError) as err: + except ( + exc.InternalError, + exc.OperationalError, + exc.DatabaseError, + ) as err: assert "Lock wait timeout exceeded" in str(err) assert should_be_locked else: @@ -103,7 +107,11 @@ def _assert_b_is_locked(self, should_be_locked): # set x/y > 10 try: alt_trans.execute(update(B).values(x=15, y=19)) - except (exc.InternalError, exc.OperationalError) as err: + except ( + exc.InternalError, + exc.OperationalError, + exc.DatabaseError, + ) as err: assert "Lock wait timeout exceeded" in str(err) assert should_be_locked else: diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 3291fa30478..28541ca33a1 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -555,7 +555,7 @@ def test_stmt_exception_pickleable_no_dbapi(self): "Older versions don't support cursor pickling, newer ones do", ) @testing.fails_on( - "mysql+mysqlconnector", + "+mysqlconnector", "Exception doesn't come back exactly the same from pickle", ) @testing.fails_on( diff --git a/test/requirements.py b/test/requirements.py index 98a98cd74e6..12c25ece1aa 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -1012,7 +1012,12 @@ def symbol_names_w_double_quote(self): @property def arraysize(self): - return skip_if("+pymssql", "DBAPI is missing this attribute") + return skip_if( + [ + no_support("+pymssql", "DBAPI is missing this attribute"), + no_support("+mysqlconnector", "DBAPI ignores this attribute"), + ] + ) @property def emulated_lastrowid(self): diff --git a/tox.ini b/tox.ini index ca7177b2ece..ae11373548a 100644 --- a/tox.ini +++ b/tox.ini @@ -38,6 +38,7 @@ extras= mysql: mysql mysql: pymysql mysql: mariadb_connector + mysql: mysql_connector oracle: oracle oracle: oracle_oracledb @@ -145,8 +146,8 @@ setenv= memusage: WORKERS={env:TOX_WORKERS:-n2} mysql: MYSQL={env:TOX_MYSQL:--db mysql} - mysql: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver asyncmy --dbdriver aiomysql --dbdriver mariadbconnector} - mysql-nogreenlet: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver mariadbconnector} + mysql: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver asyncmy --dbdriver aiomysql --dbdriver mariadbconnector --dbdriver mysqlconnector} + mysql-nogreenlet: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver mariadbconnector --dbdriver mysqlconnector} mssql: MSSQL={env:TOX_MSSQL:--db mssql} mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc --dbdriver pymssql} From 5188b39dd23b40c243dd96a618614c892264227e Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Wed, 5 Mar 2025 15:59:39 -0500 Subject: [PATCH 466/544] Complement type annotations for ARRAY This complements the type annotations of the `ARRAY` class, in preparation of #12384. This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [ ] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [x] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. Related to https://github.com/sqlalchemy/sqlalchemy/issues/6810 Closes: #12386 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12386 Pull-request-sha: c9513ce729fa1116b46b02336d4e2cda3d096fee Change-Id: If9df4708c8e597eedc79ee3990792fa6c72f1afe (cherry picked from commit 0bf7e02afbec557eb3a5607db407f27deb7aac77) --- lib/sqlalchemy/sql/elements.py | 8 +++-- lib/sqlalchemy/sql/sqltypes.py | 60 ++++++++++++++++++++++++++-------- 2 files changed, 52 insertions(+), 16 deletions(-) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index cd1dc34e0a1..0a0d432e030 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -3788,7 +3788,9 @@ def _create_all( # operate and reverse_operate are hardwired to # dispatch onto the type comparator directly, so that we can # ensure "reversed" behavior. - def operate(self, op, *other, **kwargs): + def operate( + self, op: OperatorType, *other: Any, **kwargs: Any + ) -> ColumnElement[_T]: if not operators.is_comparison(op): raise exc.ArgumentError( "Only comparison operators may be used with ANY/ALL" @@ -3796,7 +3798,9 @@ def operate(self, op, *other, **kwargs): kwargs["reverse"] = True return self.comparator.operate(operators.mirror(op), *other, **kwargs) - def reverse_operate(self, op, other, **kwargs): + def reverse_operate( + self, op: OperatorType, other: Any, **kwargs: Any + ) -> ColumnElement[_T]: # comparison operators should never call reverse_operate assert not operators.is_comparison(op) raise exc.ArgumentError( diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index ad220356f04..cde52209470 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -21,6 +21,7 @@ from typing import Callable from typing import cast from typing import Dict +from typing import Iterable from typing import List from typing import Optional from typing import overload @@ -68,6 +69,7 @@ if TYPE_CHECKING: from ._typing import _ColumnExpressionArgument from ._typing import _TypeEngineArgument + from .elements import ColumnElement from .operators import OperatorType from .schema import MetaData from .type_api import _BindProcessorType @@ -80,6 +82,7 @@ _T = TypeVar("_T", bound="Any") _CT = TypeVar("_CT", bound=Any) _TE = TypeVar("_TE", bound="TypeEngine[Any]") +_P = TypeVar("_P") class HasExpressionLookup(TypeEngineMixin): @@ -2955,7 +2958,20 @@ class Comparator( type: ARRAY - def _setup_getitem(self, index): + @overload + def _setup_getitem( + self, index: int + ) -> Tuple[OperatorType, int, TypeEngine[Any]]: ... + + @overload + def _setup_getitem( + self, index: slice + ) -> Tuple[OperatorType, Slice, TypeEngine[Any]]: ... + + def _setup_getitem(self, index: Union[int, slice]) -> Union[ + Tuple[OperatorType, int, TypeEngine[Any]], + Tuple[OperatorType, Slice, TypeEngine[Any]], + ]: arr_type = self.type return_type: TypeEngine[Any] @@ -2981,7 +2997,7 @@ def _setup_getitem(self, index): return operators.getitem, index, return_type - def contains(self, *arg, **kw): + def contains(self, *arg: Any, **kw: Any) -> ColumnElement[bool]: """``ARRAY.contains()`` not implemented for the base ARRAY type. Use the dialect-specific ARRAY type. @@ -2995,7 +3011,9 @@ def contains(self, *arg, **kw): ) @util.preload_module("sqlalchemy.sql.elements") - def any(self, other, operator=None): + def any( + self, other: Any, operator: Optional[OperatorType] = None + ) -> ColumnElement[bool]: """Return ``other operator ANY (array)`` clause. .. legacy:: This method is an :class:`_types.ARRAY` - specific @@ -3042,7 +3060,9 @@ def any(self, other, operator=None): ) @util.preload_module("sqlalchemy.sql.elements") - def all(self, other, operator=None): + def all( + self, other: Any, operator: Optional[OperatorType] = None + ) -> ColumnElement[bool]: """Return ``other operator ALL (array)`` clause. .. legacy:: This method is an :class:`_types.ARRAY` - specific @@ -3091,23 +3111,27 @@ def all(self, other, operator=None): comparator_factory = Comparator @property - def hashable(self): + def hashable(self) -> bool: # type: ignore[override] return self.as_tuple @property - def python_type(self): + def python_type(self) -> Type[Any]: return list - def compare_values(self, x, y): - return x == y + def compare_values(self, x: Any, y: Any) -> bool: + return x == y # type: ignore[no-any-return] - def _set_parent(self, parent, outer=False, **kw): + def _set_parent( + self, parent: SchemaEventTarget, outer: bool = False, **kw: Any + ) -> None: """Support SchemaEventTarget""" if not outer and isinstance(self.item_type, SchemaEventTarget): self.item_type._set_parent(parent, **kw) - def _set_parent_with_dispatch(self, parent, **kw): + def _set_parent_with_dispatch( + self, parent: SchemaEventTarget, **kw: Any + ) -> None: """Support SchemaEventTarget""" super()._set_parent_with_dispatch(parent, outer=True) @@ -3115,17 +3139,19 @@ def _set_parent_with_dispatch(self, parent, **kw): if isinstance(self.item_type, SchemaEventTarget): self.item_type._set_parent_with_dispatch(parent) - def literal_processor(self, dialect): + def literal_processor( + self, dialect: Dialect + ) -> Optional[_LiteralProcessorType[_T]]: item_proc = self.item_type.dialect_impl(dialect).literal_processor( dialect ) if item_proc is None: return None - def to_str(elements): + def to_str(elements: Iterable[Any]) -> str: return f"[{', '.join(elements)}]" - def process(value): + def process(value: Sequence[Any]) -> str: inner = self._apply_item_processor( value, item_proc, self.dimensions, to_str ) @@ -3133,7 +3159,13 @@ def process(value): return process - def _apply_item_processor(self, arr, itemproc, dim, collection_callable): + def _apply_item_processor( + self, + arr: Sequence[Any], + itemproc: Optional[Callable[[Any], Any]], + dim: Optional[int], + collection_callable: Callable[[Iterable[Any]], _P], + ) -> _P: """Helper method that can be used by bind_processor(), literal_processor(), etc. to apply an item processor to elements of an array value, taking into account the 'dimensions' for this From 13b704b8919d80b0bc81bc2bfcd19998b27a11a6 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 14 Mar 2025 10:33:22 -0400 Subject: [PATCH 467/544] anonymize CRUD params if visiting_cte is present Fixed issue in :class:`.CTE` constructs involving multiple DDL :class:`.Insert` statements with multiple VALUES parameter sets where the bound parameter names generated for these parameter sets would conflict, generating a compile time error. Fixes: #12363 Change-Id: If8344ff725d4e0ec58d3ff61f38a0edcfc5bdebd (cherry picked from commit ec20f346a6ed6e5c6de3ee6b6972cf13dba4752d) --- doc/build/changelog/unreleased_20/12363.rst | 9 ++++++ lib/sqlalchemy/sql/crud.py | 15 ++++++++-- test/sql/test_cte.py | 31 +++++++++++++++++++++ 3 files changed, 52 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12363.rst diff --git a/doc/build/changelog/unreleased_20/12363.rst b/doc/build/changelog/unreleased_20/12363.rst new file mode 100644 index 00000000000..e04e51fe0de --- /dev/null +++ b/doc/build/changelog/unreleased_20/12363.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, sql + :tickets: 12363 + + Fixed issue in :class:`.CTE` constructs involving multiple DDL + :class:`.Insert` statements with multiple VALUES parameter sets where the + bound parameter names generated for these parameter sets would conflict, + generating a compile time error. + diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py index 19af40ff080..c0c0c86bb9c 100644 --- a/lib/sqlalchemy/sql/crud.py +++ b/lib/sqlalchemy/sql/crud.py @@ -393,6 +393,7 @@ def _create_bind_param( process: Literal[True] = ..., required: bool = False, name: Optional[str] = None, + force_anonymous: bool = False, **kw: Any, ) -> str: ... @@ -413,10 +414,14 @@ def _create_bind_param( process: bool = True, required: bool = False, name: Optional[str] = None, + force_anonymous: bool = False, **kw: Any, ) -> Union[str, elements.BindParameter[Any]]: - if name is None: + if force_anonymous: + name = None + elif name is None: name = col.key + bindparam = elements.BindParameter( name, value, type_=col.type, required=required ) @@ -486,7 +491,7 @@ def _key_getters_for_crud_column( ) def _column_as_key( - key: Union[ColumnClause[Any], str] + key: Union[ColumnClause[Any], str], ) -> Union[str, Tuple[str, str]]: str_key = c_key_role(key) if hasattr(key, "table") and key.table in _et: @@ -832,6 +837,7 @@ def _append_param_parameter( ): value = parameters.pop(col_key) + has_visiting_cte = kw.get("visiting_cte") is not None col_value = compiler.preparer.format_column( c, use_table=compile_state.include_table_with_column_exprs ) @@ -864,6 +870,7 @@ def _append_param_parameter( else "%s_m0" % _col_bind_name(c) ), accumulate_bind_names=accumulated_bind_names, + force_anonymous=has_visiting_cte, **kw, ) elif value._is_bind_parameter: @@ -1435,6 +1442,7 @@ def _extend_values_for_multiparams( values_0 = initial_values values = [initial_values] + has_visiting_cte = kw.get("visiting_cte") is not None mp = compile_state._multi_parameters assert mp is not None for i, row in enumerate(mp[1:]): @@ -1451,7 +1459,8 @@ def _extend_values_for_multiparams( compiler, col, row[key], - name="%s_m%d" % (col.key, i + 1), + name=("%s_m%d" % (col.key, i + 1)), + force_anonymous=has_visiting_cte, **kw, ) else: diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index d0ecc38c86f..92b83b7fe35 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -1900,6 +1900,37 @@ def test_insert_uses_independent_cte(self): checkparams={"id": 1, "price": 20, "param_1": 10, "price_1": 50}, ) + @testing.variation("num_ctes", ["one", "two"]) + def test_multiple_multivalues_inserts(self, num_ctes): + """test #12363""" + + t1 = table("table1", column("id"), column("a"), column("b")) + + t2 = table("table2", column("id"), column("a"), column("b")) + + if num_ctes.one: + self.assert_compile( + insert(t1) + .values([{"a": 1}, {"a": 2}]) + .add_cte(insert(t2).values([{"a": 5}, {"a": 6}]).cte()), + "WITH anon_1 AS " + "(INSERT INTO table2 (a) VALUES (:param_1), (:param_2)) " + "INSERT INTO table1 (a) VALUES (:a_m0), (:a_m1)", + ) + + elif num_ctes.two: + self.assert_compile( + insert(t1) + .values([{"a": 1}, {"a": 2}]) + .add_cte(insert(t1).values([{"b": 5}, {"b": 6}]).cte()) + .add_cte(insert(t2).values([{"a": 5}, {"a": 6}]).cte()), + "WITH anon_1 AS " + "(INSERT INTO table1 (b) VALUES (:param_1), (:param_2)), " + "anon_2 AS " + "(INSERT INTO table2 (a) VALUES (:param_3), (:param_4)) " + "INSERT INTO table1 (a) VALUES (:a_m0), (:a_m1)", + ) + def test_insert_from_select_uses_independent_cte(self): """test #7036""" From e279b737c34bc0a1b335b272862dc42be79522c3 Mon Sep 17 00:00:00 2001 From: Pablo Estevez Date: Sat, 8 Feb 2025 10:46:24 -0500 Subject: [PATCH 468/544] miscellaneous to type dialects Type of certain methods that are called by dialect, so typing dialects is easier. Related to https://github.com/sqlalchemy/sqlalchemy/pull/12164 breaking changes: - Change modifiers from TextClause to InmutableDict, from Mapping, as is in the other classes Closes: #12231 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12231 Pull-request-sha: 514fe4751c7b1ceefffed2a4ef9c8df339bd9c25 Change-Id: I29314045b2c7eb5428f8d6fec8911c4b6d5ae73e (cherry picked from commit 4418ef79104a0e4591ff8268d75f1deb59bfcec3) --- lib/sqlalchemy/connectors/pyodbc.py | 6 +- lib/sqlalchemy/dialects/postgresql/base.py | 5 +- lib/sqlalchemy/engine/cursor.py | 7 +- lib/sqlalchemy/engine/default.py | 39 ++- lib/sqlalchemy/engine/interfaces.py | 15 +- lib/sqlalchemy/pool/base.py | 2 +- lib/sqlalchemy/sql/coercions.py | 2 +- lib/sqlalchemy/sql/compiler.py | 330 +++++++++++++-------- lib/sqlalchemy/sql/ddl.py | 95 +++--- lib/sqlalchemy/sql/elements.py | 6 +- lib/sqlalchemy/sql/sqltypes.py | 62 +++- lib/sqlalchemy/sql/type_api.py | 4 + lib/sqlalchemy/sql/util.py | 2 +- lib/sqlalchemy/util/_collections.py | 4 +- lib/sqlalchemy/util/typing.py | 1 + pyproject.toml | 2 + test/dialect/oracle/test_dialect.py | 1 - 17 files changed, 369 insertions(+), 214 deletions(-) diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py index 92b7c3a4138..091ff2042f1 100644 --- a/lib/sqlalchemy/connectors/pyodbc.py +++ b/lib/sqlalchemy/connectors/pyodbc.py @@ -227,11 +227,9 @@ def do_set_input_sizes( ) def get_isolation_level_values( - self, dbapi_connection: interfaces.DBAPIConnection + self, dbapi_conn: interfaces.DBAPIConnection ) -> List[IsolationLevel]: - return super().get_isolation_level_values(dbapi_connection) + [ - "AUTOCOMMIT" - ] + return [*super().get_isolation_level_values(dbapi_conn), "AUTOCOMMIT"] def set_isolation_level( self, diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index ca2a3fa59ef..c8f15d14471 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1482,6 +1482,7 @@ def update(): import re from typing import Any from typing import cast +from typing import Dict from typing import List from typing import Optional from typing import Tuple @@ -3744,8 +3745,8 @@ def get_multi_columns( def _reflect_type( self, format_type: Optional[str], - domains: dict[str, ReflectedDomain], - enums: dict[str, ReflectedEnum], + domains: Dict[str, ReflectedDomain], + enums: Dict[str, ReflectedEnum], type_description: str, ) -> sqltypes.TypeEngine[Any]: """ diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index ff14ad8eed4..54e9784e0c4 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -20,6 +20,7 @@ from typing import cast from typing import ClassVar from typing import Dict +from typing import Iterable from typing import Iterator from typing import List from typing import Mapping @@ -1289,12 +1290,16 @@ class FullyBufferedCursorFetchStrategy(CursorFetchStrategy): __slots__ = ("_rowbuffer", "alternate_cursor_description") def __init__( - self, dbapi_cursor, alternate_description=None, initial_buffer=None + self, + dbapi_cursor: Optional[DBAPICursor], + alternate_description: Optional[_DBAPICursorDescription] = None, + initial_buffer: Optional[Iterable[Any]] = None, ): self.alternate_cursor_description = alternate_description if initial_buffer is not None: self._rowbuffer = collections.deque(initial_buffer) else: + assert dbapi_cursor is not None self._rowbuffer = collections.deque(dbapi_cursor.fetchall()) def yield_per(self, result, dbapi_cursor, num): diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index dd4250ffc40..f0b58b634f9 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -77,9 +77,11 @@ from .interfaces import _CoreSingleExecuteParams from .interfaces import _DBAPICursorDescription from .interfaces import _DBAPIMultiExecuteParams + from .interfaces import _DBAPISingleExecuteParams from .interfaces import _ExecuteOptions from .interfaces import _MutableCoreSingleExecuteParams from .interfaces import _ParamStyle + from .interfaces import ConnectArgsType from .interfaces import DBAPIConnection from .interfaces import IsolationLevel from .row import Row @@ -99,6 +101,7 @@ from ..sql.type_api import _ResultProcessorType from ..sql.type_api import TypeEngine + # When we're handed literal SQL, ensure it's a SELECT query SERVER_SIDE_CURSOR_RE = re.compile(r"\s*SELECT", re.I | re.UNICODE) @@ -437,7 +440,7 @@ def loaded_dbapi(self) -> ModuleType: def _bind_typing_render_casts(self): return self.bind_typing is interfaces.BindTyping.RENDER_CASTS - def _ensure_has_table_connection(self, arg): + def _ensure_has_table_connection(self, arg: Connection) -> None: if not isinstance(arg, Connection): raise exc.ArgumentError( "The argument passed to Dialect.has_table() should be a " @@ -515,7 +518,7 @@ def builtin_connect(dbapi_conn, conn_rec): else: return None - def initialize(self, connection): + def initialize(self, connection: Connection) -> None: try: self.server_version_info = self._get_server_version_info( connection @@ -551,7 +554,7 @@ def initialize(self, connection): % (self.label_length, self.max_identifier_length) ) - def on_connect(self): + def on_connect(self) -> Optional[Callable[[Any], Any]]: # inherits the docstring from interfaces.Dialect.on_connect return None @@ -610,18 +613,18 @@ def has_schema( ) -> bool: return schema_name in self.get_schema_names(connection, **kw) - def validate_identifier(self, ident): + def validate_identifier(self, ident: str) -> None: if len(ident) > self.max_identifier_length: raise exc.IdentifierError( "Identifier '%s' exceeds maximum length of %d characters" % (ident, self.max_identifier_length) ) - def connect(self, *cargs, **cparams): + def connect(self, *cargs: Any, **cparams: Any) -> DBAPIConnection: # inherits the docstring from interfaces.Dialect.connect - return self.loaded_dbapi.connect(*cargs, **cparams) + return self.loaded_dbapi.connect(*cargs, **cparams) # type: ignore[no-any-return] # NOQA: E501 - def create_connect_args(self, url): + def create_connect_args(self, url: URL) -> ConnectArgsType: # inherits the docstring from interfaces.Dialect.create_connect_args opts = url.translate_connect_args() opts.update(url.query) @@ -944,7 +947,14 @@ def do_execute(self, cursor, statement, parameters, context=None): def do_execute_no_params(self, cursor, statement, context=None): cursor.execute(statement) - def is_disconnect(self, e, connection, cursor): + def is_disconnect( + self, + e: Exception, + connection: Union[ + pool.PoolProxiedConnection, interfaces.DBAPIConnection, None + ], + cursor: Optional[interfaces.DBAPICursor], + ) -> bool: return False @util.memoized_instancemethod @@ -1660,7 +1670,12 @@ def prefetch_cols(self) -> Optional[Sequence[Column[Any]]]: def no_parameters(self): return self.execution_options.get("no_parameters", False) - def _execute_scalar(self, stmt, type_, parameters=None): + def _execute_scalar( + self, + stmt: str, + type_: Optional[TypeEngine[Any]], + parameters: Optional[_DBAPISingleExecuteParams] = None, + ) -> Any: """Execute a string statement on the current cursor, returning a scalar result. @@ -1734,7 +1749,7 @@ def _use_server_side_cursor(self): return use_server_side - def create_cursor(self): + def create_cursor(self) -> DBAPICursor: if ( # inlining initial preference checks for SS cursors self.dialect.supports_server_side_cursors @@ -1755,10 +1770,10 @@ def create_cursor(self): def fetchall_for_returning(self, cursor): return cursor.fetchall() - def create_default_cursor(self): + def create_default_cursor(self) -> DBAPICursor: return self._dbapi_connection.cursor() - def create_server_side_cursor(self): + def create_server_side_cursor(self) -> DBAPICursor: raise NotImplementedError() def pre_exec(self): diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 9fb39db78bd..1823c97fc31 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -122,7 +122,7 @@ def close(self) -> None: ... def commit(self) -> None: ... - def cursor(self) -> DBAPICursor: ... + def cursor(self, *args: Any, **kwargs: Any) -> DBAPICursor: ... def rollback(self) -> None: ... @@ -779,6 +779,12 @@ def loaded_dbapi(self) -> ModuleType: max_identifier_length: int """The maximum length of identifier names.""" + max_index_name_length: Optional[int] + """The maximum length of index names if different from + ``max_identifier_length``.""" + max_constraint_name_length: Optional[int] + """The maximum length of constraint names if different from + ``max_identifier_length``.""" supports_server_side_cursors: bool """indicates if the dialect supports server side cursors""" @@ -1282,8 +1288,6 @@ def initialize(self, connection: Connection) -> None: """ - pass - if TYPE_CHECKING: def _overrides_default(self, method_name: str) -> bool: ... @@ -2482,7 +2486,7 @@ def get_default_isolation_level( def get_isolation_level_values( self, dbapi_conn: DBAPIConnection - ) -> List[IsolationLevel]: + ) -> Sequence[IsolationLevel]: """return a sequence of string isolation level names that are accepted by this dialect. @@ -2656,6 +2660,9 @@ def get_dialect_pool_class(self, url: URL) -> Type[Pool]: """return a Pool class to use for a given URL""" raise NotImplementedError() + def validate_identifier(self, ident: str) -> None: + """Validates an identifier name, raising an exception if invalid""" + class CreateEnginePlugin: """A set of hooks intended to augment the construction of an diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index 32fdc414a74..b070bff197d 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -1075,7 +1075,7 @@ class PoolProxiedConnection(ManagesConnection): def commit(self) -> None: ... - def cursor(self) -> DBAPICursor: ... + def cursor(self, *args: Any, **kwargs: Any) -> DBAPICursor: ... def rollback(self) -> None: ... diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 802ce75700b..e174833fbdc 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -75,7 +75,7 @@ _T = TypeVar("_T", bound=Any) -def _is_literal(element): +def _is_literal(element: Any) -> bool: """Return whether or not the element is a "literal" in the context of a SQL expression construct. diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index da476849ea0..146dde7a1bc 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -74,38 +74,49 @@ from .base import _from_objects from .base import _NONE_NAME from .base import _SentinelDefaultCharacterization -from .base import Executable from .base import NO_ARG -from .elements import ClauseElement from .elements import quoted_name -from .schema import Column from .sqltypes import TupleType -from .type_api import TypeEngine from .visitors import prefix_anon_map -from .visitors import Visitable from .. import exc from .. import util from ..util import FastIntFlag from ..util.typing import Literal from ..util.typing import Protocol +from ..util.typing import Self from ..util.typing import TypedDict if typing.TYPE_CHECKING: from .annotation import _AnnotationDict from .base import _AmbiguousTableNameMap from .base import CompileState + from .base import Executable from .cache_key import CacheKey from .ddl import ExecutableDDLElement from .dml import Insert + from .dml import Update from .dml import UpdateBase + from .dml import UpdateDMLState from .dml import ValuesBase from .elements import _truncated_label + from .elements import BinaryExpression from .elements import BindParameter + from .elements import ClauseElement from .elements import ColumnClause from .elements import ColumnElement + from .elements import False_ from .elements import Label + from .elements import Null + from .elements import True_ from .functions import Function + from .schema import Column + from .schema import Constraint + from .schema import ForeignKeyConstraint + from .schema import Index + from .schema import PrimaryKeyConstraint from .schema import Table + from .schema import UniqueConstraint + from .selectable import _ColumnsClauseElement from .selectable import AliasedReturnsRows from .selectable import CompoundSelectState from .selectable import CTE @@ -115,6 +126,10 @@ from .selectable import Select from .selectable import SelectState from .type_api import _BindProcessorType + from .type_api import TypeDecorator + from .type_api import TypeEngine + from .type_api import UserDefinedType + from .visitors import Visitable from ..engine.cursor import CursorResultMetaData from ..engine.interfaces import _CoreSingleExecuteParams from ..engine.interfaces import _DBAPIAnyExecuteParams @@ -126,6 +141,7 @@ from ..engine.interfaces import Dialect from ..engine.interfaces import SchemaTranslateMapType + _FromHintsType = Dict["FromClause", str] RESERVED_WORDS = { @@ -870,6 +886,7 @@ def __init__( self.string = self.process(self.statement, **compile_kwargs) if render_schema_translate: + assert schema_translate_map is not None self.string = self.preparer._render_schema_translates( self.string, schema_translate_map ) @@ -902,7 +919,7 @@ def visit_unsupported_compilation(self, element, err, **kw): raise exc.UnsupportedCompilationError(self, type(element)) from err @property - def sql_compiler(self): + def sql_compiler(self) -> SQLCompiler: """Return a Compiled that is capable of processing SQL expressions. If this compiler is one, it would likely just return 'self'. @@ -1791,7 +1808,7 @@ def is_subquery(self): return len(self.stack) > 1 @property - def sql_compiler(self): + def sql_compiler(self) -> Self: return self def construct_expanded_state( @@ -2342,7 +2359,7 @@ def get(row, parameters): return get - def default_from(self): + def default_from(self) -> str: """Called when a SELECT statement has no froms, and no FROM clause is to be appended. @@ -2734,16 +2751,16 @@ def visit_textual_select( return text - def visit_null(self, expr, **kw): + def visit_null(self, expr: Null, **kw: Any) -> str: return "NULL" - def visit_true(self, expr, **kw): + def visit_true(self, expr: True_, **kw: Any) -> str: if self.dialect.supports_native_boolean: return "true" else: return "1" - def visit_false(self, expr, **kw): + def visit_false(self, expr: False_, **kw: Any) -> str: if self.dialect.supports_native_boolean: return "false" else: @@ -2985,7 +3002,7 @@ def visit_sequence(self, sequence, **kw): % self.dialect.name ) - def function_argspec(self, func, **kwargs): + def function_argspec(self, func: Function[Any], **kwargs: Any) -> str: return func.clause_expr._compiler_dispatch(self, **kwargs) def visit_compound_select( @@ -3449,8 +3466,12 @@ def visit_custom_op_unary_modifier(self, element, operator, **kw): ) def _generate_generic_binary( - self, binary, opstring, eager_grouping=False, **kw - ): + self, + binary: BinaryExpression[Any], + opstring: str, + eager_grouping: bool = False, + **kw: Any, + ) -> str: _in_operator_expression = kw.get("_in_operator_expression", False) kw["_in_operator_expression"] = True @@ -3619,19 +3640,25 @@ def visit_not_between_op_binary(self, binary, operator, **kw): **kw, ) - def visit_regexp_match_op_binary(self, binary, operator, **kw): + def visit_regexp_match_op_binary( + self, binary: BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: raise exc.CompileError( "%s dialect does not support regular expressions" % self.dialect.name ) - def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + def visit_not_regexp_match_op_binary( + self, binary: BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: raise exc.CompileError( "%s dialect does not support regular expressions" % self.dialect.name ) - def visit_regexp_replace_op_binary(self, binary, operator, **kw): + def visit_regexp_replace_op_binary( + self, binary: BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: raise exc.CompileError( "%s dialect does not support regular expression replacements" % self.dialect.name @@ -3838,7 +3865,9 @@ def render_literal_bindparam( else: return self.render_literal_value(value, bindparam.type) - def render_literal_value(self, value, type_): + def render_literal_value( + self, value: Any, type_: sqltypes.TypeEngine[Any] + ) -> str: """Render the value of a bind parameter as a quoted literal. This is used for statement sections that do not accept bind parameters @@ -4612,7 +4641,9 @@ def format_from_hint_text(self, sqltext, table, hint, iscrud): def get_select_hint_text(self, byfroms): return None - def get_from_hint_text(self, table, text): + def get_from_hint_text( + self, table: FromClause, text: Optional[str] + ) -> Optional[str]: return None def get_crud_hint_text(self, table, text): @@ -5097,7 +5128,7 @@ def get_cte_preamble(self, recursive): else: return "WITH" - def get_select_precolumns(self, select, **kw): + def get_select_precolumns(self, select: Select[Any], **kw: Any) -> str: """Called when building a ``SELECT`` statement, position is just before column list. @@ -5142,7 +5173,7 @@ def for_update_clause(self, select, **kw): def returning_clause( self, stmt: UpdateBase, - returning_cols: Sequence[ColumnElement[Any]], + returning_cols: Sequence[_ColumnsClauseElement], *, populate_result_map: bool, **kw: Any, @@ -6155,11 +6186,18 @@ def update_from_clause( "criteria within UPDATE" ) - def visit_update(self, update_stmt, visiting_cte=None, **kw): - compile_state = update_stmt._compile_state_factory( - update_stmt, self, **kw + def visit_update( + self, + update_stmt: Update, + visiting_cte: Optional[CTE] = None, + **kw: Any, + ) -> str: + compile_state = update_stmt._compile_state_factory( # type: ignore[call-arg] # noqa: E501 + update_stmt, self, **kw # type: ignore[arg-type] ) - update_stmt = compile_state.statement + if TYPE_CHECKING: + assert isinstance(compile_state, UpdateDMLState) + update_stmt = compile_state.statement # type: ignore[assignment] if visiting_cte is not None: kw["visiting_cte"] = visiting_cte @@ -6297,7 +6335,7 @@ def visit_update(self, update_stmt, visiting_cte=None, **kw): return text def delete_extra_from_clause( - self, update_stmt, from_table, extra_froms, from_hints, **kw + self, delete_stmt, from_table, extra_froms, from_hints, **kw ): """Provide a hook to override the generation of an DELETE..FROM clause. @@ -6519,7 +6557,7 @@ def visit_sequence(self, sequence, **kw): def returning_clause( self, stmt: UpdateBase, - returning_cols: Sequence[ColumnElement[Any]], + returning_cols: Sequence[_ColumnsClauseElement], *, populate_result_map: bool, **kw: Any, @@ -6540,7 +6578,7 @@ def update_from_clause( ) def delete_extra_from_clause( - self, update_stmt, from_table, extra_froms, from_hints, **kw + self, delete_stmt, from_table, extra_froms, from_hints, **kw ): kw["asfrom"] = True return ", " + ", ".join( @@ -6587,8 +6625,8 @@ def __init__( compile_kwargs: Mapping[str, Any] = ..., ): ... - @util.memoized_property - def sql_compiler(self): + @util.ro_memoized_property + def sql_compiler(self) -> SQLCompiler: return self.dialect.statement_compiler( self.dialect, None, schema_translate_map=self.schema_translate_map ) @@ -6752,7 +6790,7 @@ def visit_drop_table(self, drop, **kw): def visit_drop_view(self, drop, **kw): return "\nDROP VIEW " + self.preparer.format_table(drop.element) - def _verify_index_table(self, index): + def _verify_index_table(self, index: Index) -> None: if index.table is None: raise exc.CompileError( "Index '%s' is not associated with any table." % index.name @@ -6803,7 +6841,9 @@ def visit_drop_index(self, drop, **kw): return text + self._prepared_index_name(index, include_schema=True) - def _prepared_index_name(self, index, include_schema=False): + def _prepared_index_name( + self, index: Index, include_schema: bool = False + ) -> str: if index.table is not None: effective_schema = self.preparer.schema_for_object(index.table) else: @@ -6950,13 +6990,13 @@ def create_table_suffix(self, table): def post_create_table(self, table): return "" - def get_column_default_string(self, column): + def get_column_default_string(self, column: Column[Any]) -> Optional[str]: if isinstance(column.server_default, schema.DefaultClause): return self.render_default_string(column.server_default.arg) else: return None - def render_default_string(self, default): + def render_default_string(self, default: Union[Visitable, str]) -> str: if isinstance(default, str): return self.sql_compiler.render_literal_value( default, sqltypes.STRINGTYPE @@ -6994,7 +7034,9 @@ def visit_column_check_constraint(self, constraint, **kw): text += self.define_constraint_deferrability(constraint) return text - def visit_primary_key_constraint(self, constraint, **kw): + def visit_primary_key_constraint( + self, constraint: PrimaryKeyConstraint, **kw: Any + ) -> str: if len(constraint) == 0: return "" text = "" @@ -7043,7 +7085,9 @@ def define_constraint_remote_table(self, constraint, table, preparer): return preparer.format_table(table) - def visit_unique_constraint(self, constraint, **kw): + def visit_unique_constraint( + self, constraint: UniqueConstraint, **kw: Any + ) -> str: if len(constraint) == 0: return "" text = "" @@ -7058,10 +7102,14 @@ def visit_unique_constraint(self, constraint, **kw): text += self.define_constraint_deferrability(constraint) return text - def define_unique_constraint_distinct(self, constraint, **kw): + def define_unique_constraint_distinct( + self, constraint: UniqueConstraint, **kw: Any + ) -> str: return "" - def define_constraint_cascades(self, constraint): + def define_constraint_cascades( + self, constraint: ForeignKeyConstraint + ) -> str: text = "" if constraint.ondelete is not None: text += " ON DELETE %s" % self.preparer.validate_sql_phrase( @@ -7073,7 +7121,7 @@ def define_constraint_cascades(self, constraint): ) return text - def define_constraint_deferrability(self, constraint): + def define_constraint_deferrability(self, constraint: Constraint) -> str: text = "" if constraint.deferrable is not None: if constraint.deferrable: @@ -7113,19 +7161,21 @@ def visit_identity_column(self, identity, **kw): class GenericTypeCompiler(TypeCompiler): - def visit_FLOAT(self, type_, **kw): + def visit_FLOAT(self, type_: sqltypes.Float[Any], **kw: Any) -> str: return "FLOAT" - def visit_DOUBLE(self, type_, **kw): + def visit_DOUBLE(self, type_: sqltypes.Double[Any], **kw: Any) -> str: return "DOUBLE" - def visit_DOUBLE_PRECISION(self, type_, **kw): + def visit_DOUBLE_PRECISION( + self, type_: sqltypes.DOUBLE_PRECISION[Any], **kw: Any + ) -> str: return "DOUBLE PRECISION" - def visit_REAL(self, type_, **kw): + def visit_REAL(self, type_: sqltypes.REAL[Any], **kw: Any) -> str: return "REAL" - def visit_NUMERIC(self, type_, **kw): + def visit_NUMERIC(self, type_: sqltypes.Numeric[Any], **kw: Any) -> str: if type_.precision is None: return "NUMERIC" elif type_.scale is None: @@ -7136,7 +7186,7 @@ def visit_NUMERIC(self, type_, **kw): "scale": type_.scale, } - def visit_DECIMAL(self, type_, **kw): + def visit_DECIMAL(self, type_: sqltypes.DECIMAL[Any], **kw: Any) -> str: if type_.precision is None: return "DECIMAL" elif type_.scale is None: @@ -7147,128 +7197,138 @@ def visit_DECIMAL(self, type_, **kw): "scale": type_.scale, } - def visit_INTEGER(self, type_, **kw): + def visit_INTEGER(self, type_: sqltypes.Integer, **kw: Any) -> str: return "INTEGER" - def visit_SMALLINT(self, type_, **kw): + def visit_SMALLINT(self, type_: sqltypes.SmallInteger, **kw: Any) -> str: return "SMALLINT" - def visit_BIGINT(self, type_, **kw): + def visit_BIGINT(self, type_: sqltypes.BigInteger, **kw: Any) -> str: return "BIGINT" - def visit_TIMESTAMP(self, type_, **kw): + def visit_TIMESTAMP(self, type_: sqltypes.TIMESTAMP, **kw: Any) -> str: return "TIMESTAMP" - def visit_DATETIME(self, type_, **kw): + def visit_DATETIME(self, type_: sqltypes.DateTime, **kw: Any) -> str: return "DATETIME" - def visit_DATE(self, type_, **kw): + def visit_DATE(self, type_: sqltypes.Date, **kw: Any) -> str: return "DATE" - def visit_TIME(self, type_, **kw): + def visit_TIME(self, type_: sqltypes.Time, **kw: Any) -> str: return "TIME" - def visit_CLOB(self, type_, **kw): + def visit_CLOB(self, type_: sqltypes.CLOB, **kw: Any) -> str: return "CLOB" - def visit_NCLOB(self, type_, **kw): + def visit_NCLOB(self, type_: sqltypes.Text, **kw: Any) -> str: return "NCLOB" - def _render_string_type(self, type_, name, length_override=None): + def _render_string_type( + self, name: str, length: Optional[int], collation: Optional[str] + ) -> str: text = name - if length_override: - text += "(%d)" % length_override - elif type_.length: - text += "(%d)" % type_.length - if type_.collation: - text += ' COLLATE "%s"' % type_.collation + if length: + text += f"({length})" + if collation: + text += f' COLLATE "{collation}"' return text - def visit_CHAR(self, type_, **kw): - return self._render_string_type(type_, "CHAR") + def visit_CHAR(self, type_: sqltypes.CHAR, **kw: Any) -> str: + return self._render_string_type("CHAR", type_.length, type_.collation) - def visit_NCHAR(self, type_, **kw): - return self._render_string_type(type_, "NCHAR") + def visit_NCHAR(self, type_: sqltypes.NCHAR, **kw: Any) -> str: + return self._render_string_type("NCHAR", type_.length, type_.collation) - def visit_VARCHAR(self, type_, **kw): - return self._render_string_type(type_, "VARCHAR") + def visit_VARCHAR(self, type_: sqltypes.String, **kw: Any) -> str: + return self._render_string_type( + "VARCHAR", type_.length, type_.collation + ) - def visit_NVARCHAR(self, type_, **kw): - return self._render_string_type(type_, "NVARCHAR") + def visit_NVARCHAR(self, type_: sqltypes.NVARCHAR, **kw: Any) -> str: + return self._render_string_type( + "NVARCHAR", type_.length, type_.collation + ) - def visit_TEXT(self, type_, **kw): - return self._render_string_type(type_, "TEXT") + def visit_TEXT(self, type_: sqltypes.Text, **kw: Any) -> str: + return self._render_string_type("TEXT", type_.length, type_.collation) - def visit_UUID(self, type_, **kw): + def visit_UUID(self, type_: sqltypes.Uuid[Any], **kw: Any) -> str: return "UUID" - def visit_BLOB(self, type_, **kw): + def visit_BLOB(self, type_: sqltypes.LargeBinary, **kw: Any) -> str: return "BLOB" - def visit_BINARY(self, type_, **kw): + def visit_BINARY(self, type_: sqltypes.BINARY, **kw: Any) -> str: return "BINARY" + (type_.length and "(%d)" % type_.length or "") - def visit_VARBINARY(self, type_, **kw): + def visit_VARBINARY(self, type_: sqltypes.VARBINARY, **kw: Any) -> str: return "VARBINARY" + (type_.length and "(%d)" % type_.length or "") - def visit_BOOLEAN(self, type_, **kw): + def visit_BOOLEAN(self, type_: sqltypes.Boolean, **kw: Any) -> str: return "BOOLEAN" - def visit_uuid(self, type_, **kw): + def visit_uuid(self, type_: sqltypes.Uuid[Any], **kw: Any) -> str: if not type_.native_uuid or not self.dialect.supports_native_uuid: - return self._render_string_type(type_, "CHAR", length_override=32) + return self._render_string_type("CHAR", length=32, collation=None) else: return self.visit_UUID(type_, **kw) - def visit_large_binary(self, type_, **kw): + def visit_large_binary( + self, type_: sqltypes.LargeBinary, **kw: Any + ) -> str: return self.visit_BLOB(type_, **kw) - def visit_boolean(self, type_, **kw): + def visit_boolean(self, type_: sqltypes.Boolean, **kw: Any) -> str: return self.visit_BOOLEAN(type_, **kw) - def visit_time(self, type_, **kw): + def visit_time(self, type_: sqltypes.Time, **kw: Any) -> str: return self.visit_TIME(type_, **kw) - def visit_datetime(self, type_, **kw): + def visit_datetime(self, type_: sqltypes.DateTime, **kw: Any) -> str: return self.visit_DATETIME(type_, **kw) - def visit_date(self, type_, **kw): + def visit_date(self, type_: sqltypes.Date, **kw: Any) -> str: return self.visit_DATE(type_, **kw) - def visit_big_integer(self, type_, **kw): + def visit_big_integer(self, type_: sqltypes.BigInteger, **kw: Any) -> str: return self.visit_BIGINT(type_, **kw) - def visit_small_integer(self, type_, **kw): + def visit_small_integer( + self, type_: sqltypes.SmallInteger, **kw: Any + ) -> str: return self.visit_SMALLINT(type_, **kw) - def visit_integer(self, type_, **kw): + def visit_integer(self, type_: sqltypes.Integer, **kw: Any) -> str: return self.visit_INTEGER(type_, **kw) - def visit_real(self, type_, **kw): + def visit_real(self, type_: sqltypes.REAL[Any], **kw: Any) -> str: return self.visit_REAL(type_, **kw) - def visit_float(self, type_, **kw): + def visit_float(self, type_: sqltypes.Float[Any], **kw: Any) -> str: return self.visit_FLOAT(type_, **kw) - def visit_double(self, type_, **kw): + def visit_double(self, type_: sqltypes.Double[Any], **kw: Any) -> str: return self.visit_DOUBLE(type_, **kw) - def visit_numeric(self, type_, **kw): + def visit_numeric(self, type_: sqltypes.Numeric[Any], **kw: Any) -> str: return self.visit_NUMERIC(type_, **kw) - def visit_string(self, type_, **kw): + def visit_string(self, type_: sqltypes.String, **kw: Any) -> str: return self.visit_VARCHAR(type_, **kw) - def visit_unicode(self, type_, **kw): + def visit_unicode(self, type_: sqltypes.Unicode, **kw: Any) -> str: return self.visit_VARCHAR(type_, **kw) - def visit_text(self, type_, **kw): + def visit_text(self, type_: sqltypes.Text, **kw: Any) -> str: return self.visit_TEXT(type_, **kw) - def visit_unicode_text(self, type_, **kw): + def visit_unicode_text( + self, type_: sqltypes.UnicodeText, **kw: Any + ) -> str: return self.visit_TEXT(type_, **kw) - def visit_enum(self, type_, **kw): + def visit_enum(self, type_: sqltypes.Enum, **kw: Any) -> str: return self.visit_VARCHAR(type_, **kw) def visit_null(self, type_, **kw): @@ -7278,10 +7338,14 @@ def visit_null(self, type_, **kw): "type on this Column?" % type_ ) - def visit_type_decorator(self, type_, **kw): + def visit_type_decorator( + self, type_: TypeDecorator[Any], **kw: Any + ) -> str: return self.process(type_.type_engine(self.dialect), **kw) - def visit_user_defined(self, type_, **kw): + def visit_user_defined( + self, type_: UserDefinedType[Any], **kw: Any + ) -> str: return type_.get_col_spec(**kw) @@ -7356,12 +7420,12 @@ class IdentifierPreparer: def __init__( self, - dialect, - initial_quote='"', - final_quote=None, - escape_quote='"', - quote_case_sensitive_collations=True, - omit_schema=False, + dialect: Dialect, + initial_quote: str = '"', + final_quote: Optional[str] = None, + escape_quote: str = '"', + quote_case_sensitive_collations: bool = True, + omit_schema: bool = False, ): """Construct a new ``IdentifierPreparer`` object. @@ -7414,7 +7478,9 @@ def symbol_getter(obj): prep._includes_none_schema_translate = includes_none return prep - def _render_schema_translates(self, statement, schema_translate_map): + def _render_schema_translates( + self, statement: str, schema_translate_map: SchemaTranslateMapType + ) -> str: d = schema_translate_map if None in d: if not self._includes_none_schema_translate: @@ -7426,7 +7492,7 @@ def _render_schema_translates(self, statement, schema_translate_map): "schema_translate_map dictionaries." ) - d["_none"] = d[None] + d["_none"] = d[None] # type: ignore[index] def replace(m): name = m.group(2) @@ -7619,7 +7685,9 @@ def format_collation(self, collation_name): else: return collation_name - def format_sequence(self, sequence, use_schema=True): + def format_sequence( + self, sequence: schema.Sequence, use_schema: bool = True + ) -> str: name = self.quote(sequence.name) effective_schema = self.schema_for_object(sequence) @@ -7656,7 +7724,9 @@ def format_savepoint(self, savepoint, name=None): return ident @util.preload_module("sqlalchemy.sql.naming") - def format_constraint(self, constraint, _alembic_quote=True): + def format_constraint( + self, constraint: Union[Constraint, Index], _alembic_quote: bool = True + ) -> Optional[str]: naming = util.preloaded.sql_naming if constraint.name is _NONE_NAME: @@ -7669,6 +7739,7 @@ def format_constraint(self, constraint, _alembic_quote=True): else: name = constraint.name + assert name is not None if constraint.__visit_name__ == "index": return self.truncate_and_render_index_name( name, _alembic_quote=_alembic_quote @@ -7678,7 +7749,9 @@ def format_constraint(self, constraint, _alembic_quote=True): name, _alembic_quote=_alembic_quote ) - def truncate_and_render_index_name(self, name, _alembic_quote=True): + def truncate_and_render_index_name( + self, name: str, _alembic_quote: bool = True + ) -> str: # calculate these at format time so that ad-hoc changes # to dialect.max_identifier_length etc. can be reflected # as IdentifierPreparer is long lived @@ -7690,7 +7763,9 @@ def truncate_and_render_index_name(self, name, _alembic_quote=True): name, max_, _alembic_quote ) - def truncate_and_render_constraint_name(self, name, _alembic_quote=True): + def truncate_and_render_constraint_name( + self, name: str, _alembic_quote: bool = True + ) -> str: # calculate these at format time so that ad-hoc changes # to dialect.max_identifier_length etc. can be reflected # as IdentifierPreparer is long lived @@ -7702,7 +7777,9 @@ def truncate_and_render_constraint_name(self, name, _alembic_quote=True): name, max_, _alembic_quote ) - def _truncate_and_render_maxlen_name(self, name, max_, _alembic_quote): + def _truncate_and_render_maxlen_name( + self, name: str, max_: int, _alembic_quote: bool + ) -> str: if isinstance(name, elements._truncated_label): if len(name) > max_: name = name[0 : max_ - 8] + "_" + util.md5_hex(name)[-4:] @@ -7714,13 +7791,21 @@ def _truncate_and_render_maxlen_name(self, name, max_, _alembic_quote): else: return self.quote(name) - def format_index(self, index): - return self.format_constraint(index) + def format_index(self, index: Index) -> str: + name = self.format_constraint(index) + assert name is not None + return name - def format_table(self, table, use_schema=True, name=None): + def format_table( + self, + table: FromClause, + use_schema: bool = True, + name: Optional[str] = None, + ) -> str: """Prepare a quoted table and schema name.""" - if name is None: + if TYPE_CHECKING: + assert isinstance(table, NamedFromClause) name = table.name result = self.quote(name) @@ -7752,17 +7837,18 @@ def format_label_name( def format_column( self, - column, - use_table=False, - name=None, - table_name=None, - use_schema=False, - anon_map=None, - ): + column: ColumnElement[Any], + use_table: bool = False, + name: Optional[str] = None, + table_name: Optional[str] = None, + use_schema: bool = False, + anon_map: Optional[Mapping[str, Any]] = None, + ) -> str: """Prepare a quoted column name.""" if name is None: name = column.name + assert name is not None if anon_map is not None and isinstance( name, elements._truncated_label @@ -7830,7 +7916,7 @@ def _r_identifiers(self): ) return r - def unformat_identifiers(self, identifiers): + def unformat_identifiers(self, identifiers: str) -> Sequence[str]: """Unpack 'schema.table.column'-like strings into components.""" r = self._r_identifiers diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 81a49151cc3..a81f5f9cbe1 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -17,11 +17,14 @@ import typing from typing import Any from typing import Callable +from typing import Generic from typing import Iterable from typing import List from typing import Optional from typing import Sequence as typing_Sequence from typing import Tuple +from typing import TypeVar +from typing import Union from . import roles from .base import _generative @@ -38,10 +41,12 @@ from .compiler import Compiled from .compiler import DDLCompiler from .elements import BindParameter + from .schema import Column from .schema import Constraint from .schema import ForeignKeyConstraint + from .schema import Index from .schema import SchemaItem - from .schema import Sequence + from .schema import Sequence as Sequence # noqa: F401 from .schema import Table from .selectable import TableClause from ..engine.base import Connection @@ -50,6 +55,8 @@ from ..engine.interfaces import Dialect from ..engine.interfaces import SchemaTranslateMapType +_SI = TypeVar("_SI", bound=Union["SchemaItem", str]) + class BaseDDLElement(ClauseElement): """The root of DDL constructs, including those that are sub-elements @@ -87,7 +94,7 @@ class DDLIfCallable(Protocol): def __call__( self, ddl: BaseDDLElement, - target: SchemaItem, + target: Union[SchemaItem, str], bind: Optional[Connection], tables: Optional[List[Table]] = None, state: Optional[Any] = None, @@ -106,7 +113,7 @@ class DDLIf(typing.NamedTuple): def _should_execute( self, ddl: BaseDDLElement, - target: SchemaItem, + target: Union[SchemaItem, str], bind: Optional[Connection], compiler: Optional[DDLCompiler] = None, **kw: Any, @@ -172,7 +179,7 @@ class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement): """ _ddl_if: Optional[DDLIf] = None - target: Optional[SchemaItem] = None + target: Union[SchemaItem, str, None] = None def _execute_on_connection( self, connection, distilled_params, execution_options @@ -415,7 +422,7 @@ def __repr__(self): ) -class _CreateDropBase(ExecutableDDLElement): +class _CreateDropBase(ExecutableDDLElement, Generic[_SI]): """Base class for DDL constructs that represent CREATE and DROP or equivalents. @@ -425,15 +432,13 @@ class _CreateDropBase(ExecutableDDLElement): """ - def __init__( - self, - element, - ): + def __init__(self, element: _SI) -> None: self.element = self.target = element self._ddl_if = getattr(element, "_ddl_if", None) @property def stringify_dialect(self): + assert not isinstance(self.element, str) return self.element.create_drop_stringify_dialect def _create_rule_disable(self, compiler): @@ -447,19 +452,19 @@ def _create_rule_disable(self, compiler): return False -class _CreateBase(_CreateDropBase): - def __init__(self, element, if_not_exists=False): +class _CreateBase(_CreateDropBase[_SI]): + def __init__(self, element: _SI, if_not_exists: bool = False) -> None: super().__init__(element) self.if_not_exists = if_not_exists -class _DropBase(_CreateDropBase): - def __init__(self, element, if_exists=False): +class _DropBase(_CreateDropBase[_SI]): + def __init__(self, element: _SI, if_exists: bool = False) -> None: super().__init__(element) self.if_exists = if_exists -class CreateSchema(_CreateBase): +class CreateSchema(_CreateBase[str]): """Represent a CREATE SCHEMA statement. The argument here is the string name of the schema. @@ -474,13 +479,13 @@ def __init__( self, name: str, if_not_exists: bool = False, - ): + ) -> None: """Create a new :class:`.CreateSchema` construct.""" super().__init__(element=name, if_not_exists=if_not_exists) -class DropSchema(_DropBase): +class DropSchema(_DropBase[str]): """Represent a DROP SCHEMA statement. The argument here is the string name of the schema. @@ -496,14 +501,14 @@ def __init__( name: str, cascade: bool = False, if_exists: bool = False, - ): + ) -> None: """Create a new :class:`.DropSchema` construct.""" super().__init__(element=name, if_exists=if_exists) self.cascade = cascade -class CreateTable(_CreateBase): +class CreateTable(_CreateBase["Table"]): """Represent a CREATE TABLE statement.""" __visit_name__ = "create_table" @@ -515,7 +520,7 @@ def __init__( typing_Sequence[ForeignKeyConstraint] ] = None, if_not_exists: bool = False, - ): + ) -> None: """Create a :class:`.CreateTable` construct. :param element: a :class:`_schema.Table` that's the subject @@ -537,7 +542,7 @@ def __init__( self.include_foreign_key_constraints = include_foreign_key_constraints -class _DropView(_DropBase): +class _DropView(_DropBase["Table"]): """Semi-public 'DROP VIEW' construct. Used by the test suite for dialect-agnostic drops of views. @@ -549,7 +554,9 @@ class _DropView(_DropBase): class CreateConstraint(BaseDDLElement): - def __init__(self, element: Constraint): + element: Constraint + + def __init__(self, element: Constraint) -> None: self.element = element @@ -666,16 +673,18 @@ def skip_xmin(element, compiler, **kw): __visit_name__ = "create_column" - def __init__(self, element): + element: Column[Any] + + def __init__(self, element: Column[Any]) -> None: self.element = element -class DropTable(_DropBase): +class DropTable(_DropBase["Table"]): """Represent a DROP TABLE statement.""" __visit_name__ = "drop_table" - def __init__(self, element: Table, if_exists: bool = False): + def __init__(self, element: Table, if_exists: bool = False) -> None: """Create a :class:`.DropTable` construct. :param element: a :class:`_schema.Table` that's the subject @@ -690,30 +699,24 @@ def __init__(self, element: Table, if_exists: bool = False): super().__init__(element, if_exists=if_exists) -class CreateSequence(_CreateBase): +class CreateSequence(_CreateBase["Sequence"]): """Represent a CREATE SEQUENCE statement.""" __visit_name__ = "create_sequence" - def __init__(self, element: Sequence, if_not_exists: bool = False): - super().__init__(element, if_not_exists=if_not_exists) - -class DropSequence(_DropBase): +class DropSequence(_DropBase["Sequence"]): """Represent a DROP SEQUENCE statement.""" __visit_name__ = "drop_sequence" - def __init__(self, element: Sequence, if_exists: bool = False): - super().__init__(element, if_exists=if_exists) - -class CreateIndex(_CreateBase): +class CreateIndex(_CreateBase["Index"]): """Represent a CREATE INDEX statement.""" __visit_name__ = "create_index" - def __init__(self, element, if_not_exists=False): + def __init__(self, element: Index, if_not_exists: bool = False) -> None: """Create a :class:`.Createindex` construct. :param element: a :class:`_schema.Index` that's the subject @@ -727,12 +730,12 @@ def __init__(self, element, if_not_exists=False): super().__init__(element, if_not_exists=if_not_exists) -class DropIndex(_DropBase): +class DropIndex(_DropBase["Index"]): """Represent a DROP INDEX statement.""" __visit_name__ = "drop_index" - def __init__(self, element, if_exists=False): + def __init__(self, element: Index, if_exists: bool = False) -> None: """Create a :class:`.DropIndex` construct. :param element: a :class:`_schema.Index` that's the subject @@ -746,7 +749,7 @@ def __init__(self, element, if_exists=False): super().__init__(element, if_exists=if_exists) -class AddConstraint(_CreateBase): +class AddConstraint(_CreateBase["Constraint"]): """Represent an ALTER TABLE ADD CONSTRAINT statement.""" __visit_name__ = "add_constraint" @@ -756,7 +759,7 @@ def __init__( element: Constraint, *, isolate_from_table: bool = True, - ): + ) -> None: """Construct a new :class:`.AddConstraint` construct. :param element: a :class:`.Constraint` object @@ -780,7 +783,7 @@ def __init__( ) -class DropConstraint(_DropBase): +class DropConstraint(_DropBase["Constraint"]): """Represent an ALTER TABLE DROP CONSTRAINT statement.""" __visit_name__ = "drop_constraint" @@ -793,7 +796,7 @@ def __init__( if_exists: bool = False, isolate_from_table: bool = True, **kw: Any, - ): + ) -> None: """Construct a new :class:`.DropConstraint` construct. :param element: a :class:`.Constraint` object @@ -821,13 +824,13 @@ def __init__( ) -class SetTableComment(_CreateDropBase): +class SetTableComment(_CreateDropBase["Table"]): """Represent a COMMENT ON TABLE IS statement.""" __visit_name__ = "set_table_comment" -class DropTableComment(_CreateDropBase): +class DropTableComment(_CreateDropBase["Table"]): """Represent a COMMENT ON TABLE '' statement. Note this varies a lot across database backends. @@ -837,25 +840,25 @@ class DropTableComment(_CreateDropBase): __visit_name__ = "drop_table_comment" -class SetColumnComment(_CreateDropBase): +class SetColumnComment(_CreateDropBase["Column[Any]"]): """Represent a COMMENT ON COLUMN IS statement.""" __visit_name__ = "set_column_comment" -class DropColumnComment(_CreateDropBase): +class DropColumnComment(_CreateDropBase["Column[Any]"]): """Represent a COMMENT ON COLUMN IS NULL statement.""" __visit_name__ = "drop_column_comment" -class SetConstraintComment(_CreateDropBase): +class SetConstraintComment(_CreateDropBase["Constraint"]): """Represent a COMMENT ON CONSTRAINT IS statement.""" __visit_name__ = "set_constraint_comment" -class DropConstraintComment(_CreateDropBase): +class DropConstraintComment(_CreateDropBase["Constraint"]): """Represent a COMMENT ON CONSTRAINT IS NULL statement.""" __visit_name__ = "drop_constraint_comment" diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 0a0d432e030..b259f96463e 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2215,8 +2215,9 @@ class TypeClause(DQLDMLClauseElement): _traverse_internals: _TraverseInternalsType = [ ("type", InternalTraversal.dp_type) ] + type: TypeEngine[Any] - def __init__(self, type_): + def __init__(self, type_: TypeEngine[Any]): self.type = type_ @@ -3883,10 +3884,9 @@ class BinaryExpression(OperatorExpression[_T]): """ - modifiers: Optional[Mapping[str, Any]] - left: ColumnElement[Any] right: ColumnElement[Any] + modifiers: Mapping[str, Any] def __init__( self, diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index cde52209470..1c316eecf62 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -6,9 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: allow-untyped-defs, allow-untyped-calls -"""SQL specific types. - -""" +"""SQL specific types.""" from __future__ import annotations import collections.abc as collections_abc @@ -23,6 +21,7 @@ from typing import Dict from typing import Iterable from typing import List +from typing import Mapping from typing import Optional from typing import overload from typing import Sequence @@ -248,10 +247,14 @@ def process(value): return process - def bind_processor(self, dialect): + def bind_processor( + self, dialect: Dialect + ) -> Optional[_BindProcessorType[str]]: return None - def result_processor(self, dialect, coltype): + def result_processor( + self, dialect: Dialect, coltype: object + ) -> Optional[_ResultProcessorType[str]]: return None @property @@ -872,6 +875,8 @@ def literal_processor(self, dialect): class _Binary(TypeEngine[bytes]): """Define base behavior for binary types.""" + length: Optional[int] + def __init__(self, length: Optional[int] = None): self.length = length @@ -1196,6 +1201,9 @@ def _we_are_the_impl(typ): return _we_are_the_impl(variant_mapping["_default"]) +_EnumTupleArg = Union[Sequence[enum.Enum], Sequence[str]] + + class Enum(String, SchemaType, Emulated, TypeEngine[Union[str, enum.Enum]]): """Generic Enum Type. @@ -1272,7 +1280,18 @@ class MyEnum(enum.Enum): __visit_name__ = "enum" - def __init__(self, *enums: object, **kw: Any): + values_callable: Optional[Callable[[Type[enum.Enum]], Sequence[str]]] + enum_class: Optional[Type[enum.Enum]] + _valid_lookup: Dict[Union[enum.Enum, str, None], Optional[str]] + _object_lookup: Dict[Optional[str], Union[enum.Enum, str, None]] + + @overload + def __init__(self, enums: Type[enum.Enum], **kw: Any) -> None: ... + + @overload + def __init__(self, *enums: str, **kw: Any) -> None: ... + + def __init__(self, *enums: Union[str, Type[enum.Enum]], **kw: Any) -> None: r"""Construct an enum. Keyword arguments which don't apply to a specific backend are ignored @@ -1404,7 +1423,7 @@ class was used, its name (converted to lower case) is used by .. versionchanged:: 2.0 This parameter now defaults to True. """ - self._enum_init(enums, kw) + self._enum_init(enums, kw) # type: ignore[arg-type] @property def _enums_argument(self): @@ -1413,7 +1432,7 @@ def _enums_argument(self): else: return self.enums - def _enum_init(self, enums, kw): + def _enum_init(self, enums: _EnumTupleArg, kw: Dict[str, Any]) -> None: """internal init for :class:`.Enum` and subclasses. friendly init helper used by subclasses to remove @@ -1472,15 +1491,19 @@ def _enum_init(self, enums, kw): _adapted_from=kw.pop("_adapted_from", None), ) - def _parse_into_values(self, enums, kw): + def _parse_into_values( + self, enums: _EnumTupleArg, kw: Any + ) -> Tuple[Sequence[str], _EnumTupleArg]: if not enums and "_enums" in kw: enums = kw.pop("_enums") if len(enums) == 1 and hasattr(enums[0], "__members__"): - self.enum_class = enums[0] + self.enum_class = enums[0] # type: ignore[assignment] + assert self.enum_class is not None _members = self.enum_class.__members__ + members: Mapping[str, enum.Enum] if self._omit_aliases is True: # remove aliases members = OrderedDict( @@ -1496,7 +1519,7 @@ def _parse_into_values(self, enums, kw): return values, objects else: self.enum_class = None - return enums, enums + return enums, enums # type: ignore[return-value] def _resolve_for_literal(self, value: Any) -> Enum: tv = type(value) @@ -1586,7 +1609,12 @@ def process_literal(pt): self._generic_type_affinity(_enums=enum_args, **kw), # type: ignore # noqa: E501 ) - def _setup_for_values(self, values, objects, kw): + def _setup_for_values( + self, + values: Sequence[str], + objects: _EnumTupleArg, + kw: Any, + ) -> None: self.enums = list(values) self._valid_lookup = dict(zip(reversed(objects), reversed(values))) @@ -1653,9 +1681,10 @@ def _adapt_expression( comparator_factory = Comparator - def _object_value_for_elem(self, elem): + def _object_value_for_elem(self, elem: str) -> Union[str, enum.Enum]: try: - return self._object_lookup[elem] + # Value will not be None beacuse key is not None + return self._object_lookup[elem] # type: ignore[return-value] except KeyError as err: raise LookupError( "'%s' is not among the defined enum values. " @@ -3616,6 +3645,7 @@ class Uuid(Emulated, TypeEngine[_UUID_RETURN]): __visit_name__ = "uuid" + length: Optional[int] = None collation: Optional[str] = None @overload @@ -3667,7 +3697,9 @@ def coerce_compared_value(self, op, value): else: return super().coerce_compared_value(op, value) - def bind_processor(self, dialect): + def bind_processor( + self, dialect: Dialect + ) -> Optional[_BindProcessorType[_UUID_RETURN]]: character_based_uuid = ( not dialect.supports_native_uuid or not self.native_uuid ) diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 8cdb323b2a6..6e2dfef6659 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -1391,6 +1391,10 @@ def coerce_compared_value( return self + if TYPE_CHECKING: + + def get_col_spec(self, **kw: Any) -> str: ... + class Emulated(TypeEngineMixin): """Mixin for base types that emulate the behavior of a DB-native type. diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 29cd0e2b005..06ca1532887 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -480,7 +480,7 @@ def surface_selectables(clause): stack.append(elem.element) -def surface_selectables_only(clause): +def surface_selectables_only(clause: ClauseElement) -> Iterator[ClauseElement]: stack = [clause] while stack: elem = stack.pop() diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index 12283eba94b..c5e00a636d7 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -454,7 +454,9 @@ def to_column_set(x: Any) -> Set[Any]: return x -def update_copy(d, _new=None, **kw): +def update_copy( + d: Dict[Any, Any], _new: Optional[Dict[Any, Any]] = None, **kw: Any +) -> Dict[Any, Any]: """Copy the given dict and update with the given values.""" d = d.copy() diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 62fd47c6a33..32237833e78 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -56,6 +56,7 @@ from typing_extensions import Self as Self # 3.11 from typing_extensions import TypeAliasType as TypeAliasType # 3.12 from typing_extensions import Never as Never # 3.11 + from typing_extensions import LiteralString as LiteralString # 3.11 _T = TypeVar("_T", bound=Any) _KT = TypeVar("_KT") diff --git a/pyproject.toml b/pyproject.toml index 5f3bd34e532..59107eb23cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,8 @@ reportTypedDictNotRequiredAccess = "warning" mypy_path = "./lib/" show_error_codes = true incremental = true +# would be nice to enable this but too many error are surfaceds +# enable_error_code = "ignore-without-code" [[tool.mypy.overrides]] diff --git a/test/dialect/oracle/test_dialect.py b/test/dialect/oracle/test_dialect.py index 8ea523fb7e5..1f8a23f70dc 100644 --- a/test/dialect/oracle/test_dialect.py +++ b/test/dialect/oracle/test_dialect.py @@ -681,7 +681,6 @@ def server_version_info(conn): dialect._get_server_version_info = server_version_info dialect.get_isolation_level = Mock() - dialect._check_unicode_returns = Mock() dialect._check_unicode_description = Mock() dialect._get_default_schema_name = Mock() dialect._detect_decimal_char = Mock() From 6883304451acfa020c47e37d942c2c8023f2dbbc Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Fri, 14 Mar 2025 17:01:50 -0400 Subject: [PATCH 469/544] Add type annotations to `postgresql.array` Improved static typing for `postgresql.array()` by making the type parameter (the type of array's elements) inferred from the `clauses` and `type_` arguments while also ensuring they are consistent. Also completed type annotations of `postgresql.ARRAY` following commit 0bf7e02afbec557eb3a5607db407f27deb7aac77 and added type annotations for functions `postgresql.Any()` and `postgresql.All()`. Finally, fixed shadowing `typing.Any` by the `Any()` function through aliasing as `typing_Any`. Related to #6810 Closes: #12384 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12384 Pull-request-sha: 78eea29f1de850afda036502974521969629de7e Change-Id: I5d35d15ec8ba4d58eeb9bf00abb710e2e585731f (cherry picked from commit 75c8e112c9362f89787d8fc25a6a200700052450) --- lib/sqlalchemy/dialects/postgresql/array.py | 141 +++++++++++------- lib/sqlalchemy/dialects/postgresql/json.py | 2 +- .../dialects/postgresql/pg_stuff.py | 18 +++ 3 files changed, 109 insertions(+), 52 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 7708769cb53..8cbe0c48cf9 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -4,15 +4,18 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors from __future__ import annotations import re -from typing import Any +from typing import Any as typing_Any +from typing import Iterable from typing import Optional +from typing import Sequence +from typing import TYPE_CHECKING from typing import TypeVar +from typing import Union from .operators import CONTAINED_BY from .operators import CONTAINS @@ -21,28 +24,50 @@ from ... import util from ...sql import expression from ...sql import operators -from ...sql._typing import _TypeEngineArgument - -_T = TypeVar("_T", bound=Any) - - -def Any(other, arrexpr, operator=operators.eq): +if TYPE_CHECKING: + from ...engine.interfaces import Dialect + from ...sql._typing import _ColumnExpressionArgument + from ...sql._typing import _TypeEngineArgument + from ...sql.elements import ColumnElement + from ...sql.elements import Grouping + from ...sql.expression import BindParameter + from ...sql.operators import OperatorType + from ...sql.selectable import _SelectIterable + from ...sql.type_api import _BindProcessorType + from ...sql.type_api import _LiteralProcessorType + from ...sql.type_api import _ResultProcessorType + from ...sql.type_api import TypeEngine + from ...util.typing import Self + + +_T = TypeVar("_T", bound=typing_Any) + + +def Any( + other: typing_Any, + arrexpr: _ColumnExpressionArgument[_T], + operator: OperatorType = operators.eq, +) -> ColumnElement[bool]: """A synonym for the ARRAY-level :meth:`.ARRAY.Comparator.any` method. See that method for details. """ - return arrexpr.any(other, operator) + return arrexpr.any(other, operator) # type: ignore[no-any-return, union-attr] # noqa: E501 -def All(other, arrexpr, operator=operators.eq): +def All( + other: typing_Any, + arrexpr: _ColumnExpressionArgument[_T], + operator: OperatorType = operators.eq, +) -> ColumnElement[bool]: """A synonym for the ARRAY-level :meth:`.ARRAY.Comparator.all` method. See that method for details. """ - return arrexpr.all(other, operator) + return arrexpr.all(other, operator) # type: ignore[no-any-return, union-attr] # noqa: E501 class array(expression.ExpressionClauseList[_T]): @@ -107,16 +132,19 @@ class array(expression.ExpressionClauseList[_T]): stringify_dialect = "postgresql" inherit_cache = True - def __init__(self, clauses, **kw): - type_arg = kw.pop("type_", None) + def __init__( + self, + clauses: Iterable[_T], + *, + type_: Optional[_TypeEngineArgument[_T]] = None, + **kw: typing_Any, + ): super().__init__(operators.comma_op, *clauses, **kw) - self._type_tuple = [arg.type for arg in self.clauses] - main_type = ( - type_arg - if type_arg is not None - else self._type_tuple[0] if self._type_tuple else sqltypes.NULLTYPE + type_ + if type_ is not None + else self.clauses[0].type if self.clauses else sqltypes.NULLTYPE ) if isinstance(main_type, ARRAY): @@ -127,15 +155,21 @@ def __init__(self, clauses, **kw): if main_type.dimensions is not None else 2 ), - ) + ) # type: ignore[assignment] else: - self.type = ARRAY(main_type) + self.type = ARRAY(main_type) # type: ignore[assignment] @property - def _select_iterable(self): + def _select_iterable(self) -> _SelectIterable: return (self,) - def _bind_param(self, operator, obj, _assume_scalar=False, type_=None): + def _bind_param( + self, + operator: OperatorType, + obj: typing_Any, + type_: Optional[TypeEngine[_T]] = None, + _assume_scalar: bool = False, + ) -> BindParameter[_T]: if _assume_scalar or operator is operators.getitem: return expression.BindParameter( None, @@ -154,9 +188,11 @@ def _bind_param(self, operator, obj, _assume_scalar=False, type_=None): ) for o in obj ] - ) + ) # type: ignore[return-value] - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> Union[Self, Grouping[_T]]: if against in (operators.any_op, operators.all_op, operators.getitem): return expression.Grouping(self) else: @@ -237,7 +273,7 @@ class SomeOrmClass(Base): def __init__( self, - item_type: _TypeEngineArgument[Any], + item_type: _TypeEngineArgument[typing_Any], as_tuple: bool = False, dimensions: Optional[int] = None, zero_indexes: bool = False, @@ -296,7 +332,9 @@ class Comparator(sqltypes.ARRAY.Comparator): """ - def contains(self, other, **kwargs): + def contains( + self, other: typing_Any, **kwargs: typing_Any + ) -> ColumnElement[bool]: """Boolean expression. Test if elements are a superset of the elements of the argument array expression. @@ -305,7 +343,7 @@ def contains(self, other, **kwargs): """ return self.operate(CONTAINS, other, result_type=sqltypes.Boolean) - def contained_by(self, other): + def contained_by(self, other: typing_Any) -> ColumnElement[bool]: """Boolean expression. Test if elements are a proper subset of the elements of the argument array expression. """ @@ -313,7 +351,7 @@ def contained_by(self, other): CONTAINED_BY, other, result_type=sqltypes.Boolean ) - def overlap(self, other): + def overlap(self, other: typing_Any) -> ColumnElement[bool]: """Boolean expression. Test if array has elements in common with an argument array expression. """ @@ -321,35 +359,26 @@ def overlap(self, other): comparator_factory = Comparator - @property - def hashable(self): - return self.as_tuple - - @property - def python_type(self): - return list - - def compare_values(self, x, y): - return x == y - @util.memoized_property - def _against_native_enum(self): + def _against_native_enum(self) -> bool: return ( isinstance(self.item_type, sqltypes.Enum) and self.item_type.native_enum ) - def literal_processor(self, dialect): + def literal_processor( + self, dialect: Dialect + ) -> Optional[_LiteralProcessorType[_T]]: item_proc = self.item_type.dialect_impl(dialect).literal_processor( dialect ) if item_proc is None: return None - def to_str(elements): + def to_str(elements: Iterable[typing_Any]) -> str: return f"ARRAY[{', '.join(elements)}]" - def process(value): + def process(value: Sequence[typing_Any]) -> str: inner = self._apply_item_processor( value, item_proc, self.dimensions, to_str ) @@ -357,12 +386,16 @@ def process(value): return process - def bind_processor(self, dialect): + def bind_processor( + self, dialect: Dialect + ) -> Optional[_BindProcessorType[Sequence[typing_Any]]]: item_proc = self.item_type.dialect_impl(dialect).bind_processor( dialect ) - def process(value): + def process( + value: Optional[Sequence[typing_Any]], + ) -> Optional[list[typing_Any]]: if value is None: return value else: @@ -372,12 +405,16 @@ def process(value): return process - def result_processor(self, dialect, coltype): + def result_processor( + self, dialect: Dialect, coltype: object + ) -> _ResultProcessorType[Sequence[typing_Any]]: item_proc = self.item_type.dialect_impl(dialect).result_processor( dialect, coltype ) - def process(value): + def process( + value: Sequence[typing_Any], + ) -> Optional[Sequence[typing_Any]]: if value is None: return value else: @@ -392,11 +429,13 @@ def process(value): super_rp = process pattern = re.compile(r"^{(.*)}$") - def handle_raw_string(value): - inner = pattern.match(value).group(1) + def handle_raw_string(value: str) -> list[str]: + inner = pattern.match(value).group(1) # type: ignore[union-attr] # noqa: E501 return _split_enum_values(inner) - def process(value): + def process( + value: Sequence[typing_Any], + ) -> Optional[Sequence[typing_Any]]: if value is None: return value # isinstance(value, str) is required to handle @@ -411,7 +450,7 @@ def process(value): return process -def _split_enum_values(array_string): +def _split_enum_values(array_string: str) -> list[str]: if '"' not in array_string: # no escape char is present so it can just split on the comma return array_string.split(",") if array_string else [] diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 663be8b7a2b..06f8db5b2af 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -337,7 +337,7 @@ def delete_path( .. versionadded:: 2.0 """ if not isinstance(array, _pg_array): - array = _pg_array(array) # type: ignore[no-untyped-call] + array = _pg_array(array) right_side = cast(array, ARRAY(sqltypes.TEXT)) return self.operate(DELETE_PATH, right_side, result_type=JSONB) diff --git a/test/typing/plain_files/dialects/postgresql/pg_stuff.py b/test/typing/plain_files/dialects/postgresql/pg_stuff.py index 5e56efba98c..45ec981bbae 100644 --- a/test/typing/plain_files/dialects/postgresql/pg_stuff.py +++ b/test/typing/plain_files/dialects/postgresql/pg_stuff.py @@ -99,3 +99,21 @@ class Test(Base): # EXPECTED_TYPE: Select[Tuple[Range[int], Sequence[Range[int]]]] reveal_type(range_col_stmt) + +array_from_ints = array(range(2)) + +# EXPECTED_TYPE: array[int] +reveal_type(array_from_ints) + +array_of_strings = array([], type_=Text) + +# EXPECTED_TYPE: array[str] +reveal_type(array_of_strings) + +array_of_ints = array([0], type_=Integer) + +# EXPECTED_TYPE: array[int] +reveal_type(array_of_ints) + +# EXPECTED_MYPY: Cannot infer type argument 1 of "array" +array([0], type_=Text) From f6296c19f557111c5b34a8403d97a76b701280b3 Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Thu, 13 Mar 2025 08:43:53 -0400 Subject: [PATCH 470/544] Support column list for foreign key ON DELETE SET actions on PostgreSQL Added support for specifying a list of columns for ``SET NULL`` and ``SET DEFAULT`` actions of ``ON DELETE`` clause of foreign key definition on PostgreSQL. Pull request courtesy Denis Laxalde. Fixes: #11595 Closes: #12421 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12421 Pull-request-sha: d0394db7066ba8a8eaf3d3972d779f3e170e9406 Change-Id: I036a559ae4a8efafe9ba64d776a840bd785a7397 (cherry picked from commit 39bb17442ce6ac9a3dde5e2b72376b77ffce5e28) --- doc/build/changelog/unreleased_20/11595.rst | 11 +++++ doc/build/core/constraints.rst | 14 +++++- lib/sqlalchemy/dialects/postgresql/base.py | 40 ++++++++++++++++- lib/sqlalchemy/sql/compiler.py | 23 +++++++--- lib/sqlalchemy/sql/schema.py | 28 +++++++++--- test/dialect/postgresql/test_compiler.py | 42 ++++++++++++++++++ test/dialect/postgresql/test_reflection.py | 49 +++++++++++++++++++++ test/sql/test_compiler.py | 6 ++- 8 files changed, 198 insertions(+), 15 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/11595.rst diff --git a/doc/build/changelog/unreleased_20/11595.rst b/doc/build/changelog/unreleased_20/11595.rst new file mode 100644 index 00000000000..faefd245c04 --- /dev/null +++ b/doc/build/changelog/unreleased_20/11595.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: usecase, postgresql + :tickets: 11595 + + Added support for specifying a list of columns for ``SET NULL`` and ``SET + DEFAULT`` actions of ``ON DELETE`` clause of foreign key definition on + PostgreSQL. Pull request courtesy Denis Laxalde. + + .. seealso:: + + :ref:`postgresql_constraint_options` diff --git a/doc/build/core/constraints.rst b/doc/build/core/constraints.rst index c63ad858e2c..9251bbf8306 100644 --- a/doc/build/core/constraints.rst +++ b/doc/build/core/constraints.rst @@ -308,8 +308,12 @@ arguments. The value is any string which will be output after the appropriate ), ) -Note that these clauses require ``InnoDB`` tables when used with MySQL. -They may also not be supported on other databases. +Note that some backends have special requirements for cascades to function: + +* MySQL / MariaDB - the ``InnoDB`` storage engine should be used (this is + typically the default in modern databases) +* SQLite - constraints are not enabled by default. + See :ref:`sqlite_foreign_keys` .. seealso:: @@ -320,6 +324,12 @@ They may also not be supported on other databases. :ref:`passive_deletes_many_to_many` + :ref:`postgresql_constraint_options` - indicates additional options + available for foreign key cascades such as column lists + + :ref:`sqlite_foreign_keys` - background on enabling foreign key support + with SQLite + .. _schema_unique_constraint: UNIQUE Constraint diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index c8f15d14471..45c83d7209c 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1264,6 +1264,29 @@ def update(): `_ - in the PostgreSQL documentation. +* Column list with foreign key ``ON DELETE SET`` actions: This applies to + :class:`.ForeignKey` and :class:`.ForeignKeyConstraint`, the :paramref:`.ForeignKey.ondelete` + parameter will accept on the PostgreSQL backend only a string list of column + names inside parenthesis, following the ``SET NULL`` or ``SET DEFAULT`` + phrases, which will limit the set of columns that are subject to the + action:: + + fktable = Table( + "fktable", + metadata, + Column("tid", Integer), + Column("id", Integer), + Column("fk_id_del_set_null", Integer), + ForeignKeyConstraint( + columns=["tid", "fk_id_del_set_null"], + refcolumns=[pktable.c.tid, pktable.c.id], + ondelete="SET NULL (fk_id_del_set_null)", + ), + ) + + .. versionadded:: 2.0.40 + + .. _postgresql_table_valued_overview: Table values, Table and Column valued functions, Row and Tuple objects @@ -1673,6 +1696,7 @@ def update(): "verbose", } + colspecs = { sqltypes.ARRAY: _array.ARRAY, sqltypes.Interval: INTERVAL, @@ -2257,6 +2281,19 @@ def visit_foreign_key_constraint(self, constraint, **kw): text += self._define_constraint_validity(constraint) return text + @util.memoized_property + def _fk_ondelete_pattern(self): + return re.compile( + r"^(?:RESTRICT|CASCADE|SET (?:NULL|DEFAULT)(?:\s*\(.+\))?" + r"|NO ACTION)$", + re.I, + ) + + def define_constraint_ondelete_cascade(self, constraint): + return " ON DELETE %s" % self.preparer.validate_sql_phrase( + constraint.ondelete, self._fk_ondelete_pattern + ) + def visit_create_enum_type(self, create, **kw): type_ = create.element @@ -4258,7 +4295,8 @@ def _fk_regex_pattern(self): r"[\s]?(ON UPDATE " r"(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?" r"[\s]?(ON DELETE " - r"(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?" + r"(CASCADE|RESTRICT|NO ACTION|" + r"SET (?:NULL|DEFAULT)(?:\s\(.+\))?)+)?" r"[\s]?(DEFERRABLE|NOT DEFERRABLE)?" r"[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?" ) diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 146dde7a1bc..f55f0d96bb7 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -7112,15 +7112,26 @@ def define_constraint_cascades( ) -> str: text = "" if constraint.ondelete is not None: - text += " ON DELETE %s" % self.preparer.validate_sql_phrase( - constraint.ondelete, FK_ON_DELETE - ) + text += self.define_constraint_ondelete_cascade(constraint) + if constraint.onupdate is not None: - text += " ON UPDATE %s" % self.preparer.validate_sql_phrase( - constraint.onupdate, FK_ON_UPDATE - ) + text += self.define_constraint_onupdate_cascade(constraint) return text + def define_constraint_ondelete_cascade( + self, constraint: ForeignKeyConstraint + ) -> str: + return " ON DELETE %s" % self.preparer.validate_sql_phrase( + constraint.ondelete, FK_ON_DELETE + ) + + def define_constraint_onupdate_cascade( + self, constraint: ForeignKeyConstraint + ) -> str: + return " ON UPDATE %s" % self.preparer.validate_sql_phrase( + constraint.onupdate, FK_ON_UPDATE + ) + def define_constraint_deferrability(self, constraint: Constraint) -> str: text = "" if constraint.deferrable is not None: diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index a6c24ce618d..d65b2de06ae 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -2837,9 +2837,18 @@ def __init__( issuing DDL for this constraint. Typical values include CASCADE, DELETE and RESTRICT. + .. seealso:: + + :ref:`on_update_on_delete` + :param ondelete: Optional string. If set, emit ON DELETE when issuing DDL for this constraint. Typical values include CASCADE, - SET NULL and RESTRICT. + SET NULL and RESTRICT. Some dialects may allow for additional + syntaxes. + + .. seealso:: + + :ref:`on_update_on_delete` :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when issuing DDL for this constraint. @@ -4623,12 +4632,21 @@ def __init__( :param name: Optional, the in-database name of the key. :param onupdate: Optional string. If set, emit ON UPDATE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + + .. seealso:: + + :ref:`on_update_on_delete` :param ondelete: Optional string. If set, emit ON DELETE when - issuing DDL for this constraint. Typical values include CASCADE, - SET NULL and RESTRICT. + issuing DDL for this constraint. Typical values include CASCADE, + SET NULL and RESTRICT. Some dialects may allow for additional + syntaxes. + + .. seealso:: + + :ref:`on_update_on_delete` :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when issuing DDL for this constraint. diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index 55a43d20d1e..34ab31395f2 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -1140,6 +1140,48 @@ def test_create_foreign_key_column_not_valid(self): ")", ) + def test_create_foreign_key_constraint_ondelete_column_list(self): + m = MetaData() + pktable = Table( + "pktable", + m, + Column("tid", Integer, primary_key=True), + Column("id", Integer, primary_key=True), + ) + fktable = Table( + "fktable", + m, + Column("tid", Integer), + Column("id", Integer), + Column("fk_id_del_set_null", Integer), + Column("fk_id_del_set_default", Integer, server_default=text("0")), + ForeignKeyConstraint( + columns=["tid", "fk_id_del_set_null"], + refcolumns=[pktable.c.tid, pktable.c.id], + ondelete="SET NULL (fk_id_del_set_null)", + ), + ForeignKeyConstraint( + columns=["tid", "fk_id_del_set_default"], + refcolumns=[pktable.c.tid, pktable.c.id], + ondelete="SET DEFAULT(fk_id_del_set_default)", + ), + ) + + self.assert_compile( + schema.CreateTable(fktable), + "CREATE TABLE fktable (" + "tid INTEGER, id INTEGER, " + "fk_id_del_set_null INTEGER, " + "fk_id_del_set_default INTEGER DEFAULT 0, " + "FOREIGN KEY(tid, fk_id_del_set_null)" + " REFERENCES pktable (tid, id)" + " ON DELETE SET NULL (fk_id_del_set_null), " + "FOREIGN KEY(tid, fk_id_del_set_default)" + " REFERENCES pktable (tid, id)" + " ON DELETE SET DEFAULT(fk_id_del_set_default)" + ")", + ) + def test_exclude_constraint_min(self): m = MetaData() tbl = Table("testtbl", m, Column("room", Integer, primary_key=True)) diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 4d889c6775f..20844a0eaea 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -7,6 +7,7 @@ from sqlalchemy import Column from sqlalchemy import exc from sqlalchemy import ForeignKey +from sqlalchemy import ForeignKeyConstraint from sqlalchemy import Identity from sqlalchemy import Index from sqlalchemy import inspect @@ -20,6 +21,7 @@ from sqlalchemy import Table from sqlalchemy import testing from sqlalchemy import Text +from sqlalchemy import text from sqlalchemy import UniqueConstraint from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.dialects.postgresql import base as postgresql @@ -908,6 +910,53 @@ def test_reflected_primary_key_order(self, metadata, connection): subject = Table("subject", meta2, autoload_with=connection) eq_(subject.primary_key.columns.keys(), ["p2", "p1"]) + def test_reflected_foreign_key_ondelete_column_list( + self, metadata, connection + ): + meta1 = metadata + pktable = Table( + "pktable", + meta1, + Column("tid", Integer, primary_key=True), + Column("id", Integer, primary_key=True), + ) + Table( + "fktable", + meta1, + Column("tid", Integer), + Column("id", Integer), + Column("fk_id_del_set_null", Integer), + Column("fk_id_del_set_default", Integer, server_default=text("0")), + ForeignKeyConstraint( + name="fktable_tid_fk_id_del_set_null_fkey", + columns=["tid", "fk_id_del_set_null"], + refcolumns=[pktable.c.tid, pktable.c.id], + ondelete="SET NULL (fk_id_del_set_null)", + ), + ForeignKeyConstraint( + name="fktable_tid_fk_id_del_set_default_fkey", + columns=["tid", "fk_id_del_set_default"], + refcolumns=[pktable.c.tid, pktable.c.id], + ondelete="SET DEFAULT(fk_id_del_set_default)", + ), + ) + + meta1.create_all(connection) + meta2 = MetaData() + fktable = Table("fktable", meta2, autoload_with=connection) + fkey_set_null = next( + c + for c in fktable.foreign_key_constraints + if c.name == "fktable_tid_fk_id_del_set_null_fkey" + ) + eq_(fkey_set_null.ondelete, "SET NULL (fk_id_del_set_null)") + fkey_set_default = next( + c + for c in fktable.foreign_key_constraints + if c.name == "fktable_tid_fk_id_del_set_default_fkey" + ) + eq_(fkey_set_default.ondelete, "SET DEFAULT (fk_id_del_set_default)") + def test_pg_weirdchar_reflection(self, metadata, connection): meta1 = metadata subject = Table( diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 3e8fca59a88..4d546b57b38 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -12,6 +12,7 @@ import datetime import decimal +import re from typing import TYPE_CHECKING from sqlalchemy import alias @@ -6674,6 +6675,9 @@ def test_fk_illegal_sql_phrases(self): "FOO RESTRICT", "CASCADE WRONG", "SET NULL", + # test that PostgreSQL's syntax added in #11595 is not + # accepted by base compiler + "SET NULL(postgresql_db.some_column)", ): const = schema.AddConstraint( schema.ForeignKeyConstraint( @@ -6682,7 +6686,7 @@ def test_fk_illegal_sql_phrases(self): ) assert_raises_message( exc.CompileError, - r"Unexpected SQL phrase: '%s'" % phrase, + rf"Unexpected SQL phrase: '{re.escape(phrase)}'", const.compile, ) From a936360ef01ab78b83d0c16ebbd61b1c55801ac2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 12 Mar 2025 16:25:48 -0400 Subject: [PATCH 471/544] expand paren rules for default rendering, sqlite/mysql Expanded the rules for when to apply parenthesis to a server default in DDL to suit the general case of a default string that contains non-word characters such as spaces or operators and is not a string literal. Fixed issue in MySQL server default reflection where a default that has spaces would not be correctly reflected. Additionally, expanded the rules for when to apply parenthesis to a server default in DDL to suit the general case of a default string that contains non-word characters such as spaces or operators and is not a string literal. Fixes: #12425 Change-Id: Ie40703dcd5fdc135025d676c01baba57ff3b71ad (cherry picked from commit 1afb820427545e259397b98851a910d7379b2eb8) --- doc/build/changelog/unreleased_20/12425.rst | 18 +++++ doc/build/orm/extensions/asyncio.rst | 2 +- lib/sqlalchemy/dialects/mysql/base.py | 9 +-- lib/sqlalchemy/dialects/mysql/reflection.py | 2 +- lib/sqlalchemy/dialects/sqlite/base.py | 11 +-- lib/sqlalchemy/testing/assertions.py | 4 +- lib/sqlalchemy/testing/requirements.py | 13 ++++ .../testing/suite/test_reflection.py | 44 ++++++++++++ test/dialect/mysql/test_compiler.py | 2 +- test/dialect/mysql/test_query.py | 34 ++++++++++ test/dialect/test_sqlite.py | 67 ++++++++++++------- test/requirements.py | 27 ++++++-- 12 files changed, 193 insertions(+), 40 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12425.rst diff --git a/doc/build/changelog/unreleased_20/12425.rst b/doc/build/changelog/unreleased_20/12425.rst new file mode 100644 index 00000000000..fbc1f8a4ef2 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12425.rst @@ -0,0 +1,18 @@ +.. change:: + :tags: bug, sqlite + :tickets: 12425 + + Expanded the rules for when to apply parenthesis to a server default in DDL + to suit the general case of a default string that contains non-word + characters such as spaces or operators and is not a string literal. + +.. change:: + :tags: bug, mysql + :tickets: 12425 + + Fixed issue in MySQL server default reflection where a default that has + spaces would not be correctly reflected. Additionally, expanded the rules + for when to apply parenthesis to a server default in DDL to suit the + general case of a default string that contains non-word characters such as + spaces or operators and is not a string literal. + diff --git a/doc/build/orm/extensions/asyncio.rst b/doc/build/orm/extensions/asyncio.rst index fbd965d15d9..5b881054304 100644 --- a/doc/build/orm/extensions/asyncio.rst +++ b/doc/build/orm/extensions/asyncio.rst @@ -281,7 +281,7 @@ configuration: CREATE TABLE a ( id INTEGER NOT NULL, data VARCHAR NOT NULL, - create_date DATETIME DEFAULT (CURRENT_TIMESTAMP) NOT NULL, + create_date DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, PRIMARY KEY (id) ) ... diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 8bae6193b51..122a7cb2e5e 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1928,12 +1928,13 @@ def get_column_specification(self, column, **kw): colspec.append("AUTO_INCREMENT") else: default = self.get_column_default_string(column) + if default is not None: if ( - isinstance( - column.server_default.arg, functions.FunctionElement - ) - and self.dialect._support_default_function + self.dialect._support_default_function + and not re.match(r"^\s*[\'\"\(]", default) + and "ON UPDATE" not in default + and re.match(r".*\W.*", default) ): colspec.append(f"DEFAULT ({default})") else: diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py index 3998be977d9..d62390bb845 100644 --- a/lib/sqlalchemy/dialects/mysql/reflection.py +++ b/lib/sqlalchemy/dialects/mysql/reflection.py @@ -451,7 +451,7 @@ def _prep_regexes(self): r"(?: +COLLATE +(?P[\w_]+))?" r"(?: +(?P(?:NOT )?NULL))?" r"(?: +DEFAULT +(?P" - r"(?:NULL|'(?:''|[^'])*'|[\-\w\.\(\)]+" + r"(?:NULL|'(?:''|[^'])*'|\(.+?\)|[\-\w\.\(\)]+" r"(?: +ON UPDATE [\-\w\.\(\)]+)?)" r"))?" r"(?: +(?:GENERATED ALWAYS)? ?AS +(?P\(" diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 96b2414ccec..c09fbb32ccc 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -932,7 +932,6 @@ def set_sqlite_pragma(dbapi_connection, connection_record): from ...engine import reflection from ...engine.reflection import ReflectionDefaults from ...sql import coercions -from ...sql import ColumnElement from ...sql import compiler from ...sql import elements from ...sql import roles @@ -1594,9 +1593,13 @@ def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) + " " + coltype default = self.get_column_default_string(column) if default is not None: - if isinstance(column.server_default.arg, ColumnElement): - default = "(" + default + ")" - colspec += " DEFAULT " + default + + if not re.match(r"""^\s*[\'\"\(]""", default) and re.match( + r".*\W.*", default + ): + colspec += f" DEFAULT ({default})" + else: + colspec += f" DEFAULT {default}" if not column.nullable: colspec += " NOT NULL" diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py index 8364c15f8ff..719692125fb 100644 --- a/lib/sqlalchemy/testing/assertions.py +++ b/lib/sqlalchemy/testing/assertions.py @@ -274,8 +274,8 @@ def int_within_variance(expected, received, variance): ) -def eq_regex(a, b, msg=None): - assert re.match(b, a), msg or "%r !~ %r" % (a, b) +def eq_regex(a, b, msg=None, flags=0): + assert re.match(b, a, flags), msg or "%r !~ %r" % (a, b) def eq_(a, b, msg=None): diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 93541dca70e..a2c3aa531dc 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1168,6 +1168,19 @@ def cast_precision_numerics_many_significant_digits(self): """ return self.precision_numerics_many_significant_digits + @property + def server_defaults(self): + """Target backend supports server side defaults for columns""" + + return exclusions.closed() + + @property + def expression_server_defaults(self): + """Target backend supports server side defaults with SQL expressions + for columns""" + + return exclusions.closed() + @property def implicit_decimal_binds(self): """target backend will return a selected Decimal as a Decimal, not diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 2837e9fe0a3..0f2a2062a8e 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -14,6 +14,7 @@ from .. import config from .. import engines from .. import eq_ +from .. import eq_regex from .. import expect_raises from .. import expect_raises_message from .. import expect_warnings @@ -23,6 +24,8 @@ from ..provision import temp_table_keyword_args from ..schema import Column from ..schema import Table +from ... import Boolean +from ... import DateTime from ... import event from ... import ForeignKey from ... import func @@ -2883,6 +2886,47 @@ def test_get_foreign_key_options( eq_(opts, expected) # eq_(dict((k, opts[k]) for k in opts if opts[k]), expected) + @testing.combinations( + (Integer, sa.text("10"), r"'?10'?"), + (Integer, "10", r"'?10'?"), + (Boolean, sa.true(), r"1|true"), + ( + Integer, + sa.text("3 + 5"), + r"3\+5", + testing.requires.expression_server_defaults, + ), + ( + Integer, + sa.text("(3 * 5)"), + r"3\*5", + testing.requires.expression_server_defaults, + ), + (DateTime, func.now(), r"current_timestamp|now|getdate"), + ( + Integer, + sa.literal_column("3") + sa.literal_column("5"), + r"3\+5", + testing.requires.expression_server_defaults, + ), + argnames="datatype, default, expected_reg", + ) + @testing.requires.server_defaults + def test_server_defaults( + self, metadata, connection, datatype, default, expected_reg + ): + t = Table( + "t", + metadata, + Column("id", Integer, primary_key=True), + Column("thecol", datatype, server_default=default), + ) + t.create(connection) + + reflected = inspect(connection).get_columns("t")[1]["default"] + reflected_sanitized = re.sub(r"[\(\) \']", "", reflected) + eq_regex(reflected_sanitized, expected_reg, flags=re.IGNORECASE) + class NormalizedNameTest(fixtures.TablesTest): __requires__ = ("denormalized_names",) diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index 8387d4e07c6..f9cfeba05b8 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -446,7 +446,7 @@ def test_create_server_default_with_function_using( self.assert_compile( schema.CreateTable(tbl), "CREATE TABLE testtbl (" - "time DATETIME DEFAULT (CURRENT_TIMESTAMP), " + "time DATETIME DEFAULT CURRENT_TIMESTAMP, " "name VARCHAR(255) DEFAULT 'some str', " "description VARCHAR(255) DEFAULT (lower('hi')), " "data JSON DEFAULT (json_object()))", diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py index 9cbc38378fb..96650dab564 100644 --- a/test/dialect/mysql/test_query.py +++ b/test/dialect/mysql/test_query.py @@ -5,16 +5,21 @@ from sqlalchemy import cast from sqlalchemy import Column from sqlalchemy import Computed +from sqlalchemy import DateTime from sqlalchemy import exc from sqlalchemy import false from sqlalchemy import ForeignKey +from sqlalchemy import func from sqlalchemy import Integer +from sqlalchemy import literal_column from sqlalchemy import MetaData from sqlalchemy import or_ from sqlalchemy import schema from sqlalchemy import select from sqlalchemy import String from sqlalchemy import Table +from sqlalchemy import testing +from sqlalchemy import text from sqlalchemy import true from sqlalchemy.testing import assert_raises from sqlalchemy.testing import combinations @@ -50,6 +55,35 @@ def test_is_boolean_symbols_despite_no_native(self, connection): ) +class ServerDefaultCreateTest(fixtures.TestBase): + @testing.combinations( + (Integer, text("10")), + (Integer, text("'10'")), + (Integer, "10"), + (Boolean, true()), + (Integer, text("3+5"), testing.requires.mysql_expression_defaults), + (Integer, text("3 + 5"), testing.requires.mysql_expression_defaults), + (Integer, text("(3 * 5)"), testing.requires.mysql_expression_defaults), + (DateTime, func.now()), + ( + Integer, + literal_column("3") + literal_column("5"), + testing.requires.mysql_expression_defaults, + ), + argnames="datatype, default", + ) + def test_create_server_defaults( + self, connection, metadata, datatype, default + ): + t = Table( + "t", + metadata, + Column("id", Integer, primary_key=True), + Column("thecol", datatype, server_default=default), + ) + t.create(connection) + + class MatchTest(fixtures.TablesTest): __only_on__ = "mysql", "mariadb" __backend__ = True diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 819bf8aa06b..c2c63e9ef06 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -1032,39 +1032,60 @@ def test_constraints_with_schemas(self): ")", ) - def test_column_defaults_ddl(self): + @testing.combinations( + ( + Boolean(create_constraint=True), + sql.false(), + "BOOLEAN DEFAULT 0, CHECK (x IN (0, 1))", + ), + ( + String(), + func.sqlite_version(), + "VARCHAR DEFAULT (sqlite_version())", + ), + (Integer(), func.abs(-5) + 17, "INTEGER DEFAULT (abs(-5) + 17)"), + ( + # test #12425 + String(), + func.now(), + "VARCHAR DEFAULT CURRENT_TIMESTAMP", + ), + ( + # test #12425 + String(), + func.datetime(func.now(), "localtime"), + "VARCHAR DEFAULT (datetime(CURRENT_TIMESTAMP, 'localtime'))", + ), + ( + # test #12425 + String(), + text("datetime(CURRENT_TIMESTAMP, 'localtime')"), + "VARCHAR DEFAULT (datetime(CURRENT_TIMESTAMP, 'localtime'))", + ), + ( + # default with leading spaces that should not be + # parenthesized + String, + text(" 'some default'"), + "VARCHAR DEFAULT 'some default'", + ), + (String, text("'some default'"), "VARCHAR DEFAULT 'some default'"), + argnames="datatype,default,expected", + ) + def test_column_defaults_ddl(self, datatype, default, expected): t = Table( "t", MetaData(), Column( "x", - Boolean(create_constraint=True), - server_default=sql.false(), + datatype, + server_default=default, ), ) self.assert_compile( CreateTable(t), - "CREATE TABLE t (x BOOLEAN DEFAULT (0), CHECK (x IN (0, 1)))", - ) - - t = Table( - "t", - MetaData(), - Column("x", String(), server_default=func.sqlite_version()), - ) - self.assert_compile( - CreateTable(t), - "CREATE TABLE t (x VARCHAR DEFAULT (sqlite_version()))", - ) - - t = Table( - "t", - MetaData(), - Column("x", Integer(), server_default=func.abs(-5) + 17), - ) - self.assert_compile( - CreateTable(t), "CREATE TABLE t (x INTEGER DEFAULT (abs(-5) + 17))" + f"CREATE TABLE t (x {expected})", ) def test_create_partial_index(self): diff --git a/test/requirements.py b/test/requirements.py index 12c25ece1aa..2311f6e35fc 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -1,7 +1,4 @@ -"""Requirements specific to SQLAlchemy's own unit tests. - - -""" +"""Requirements specific to SQLAlchemy's own unit tests.""" from sqlalchemy import exc from sqlalchemy.sql import sqltypes @@ -212,6 +209,19 @@ def non_native_boolean_unconstrained(self): ] ) + @property + def server_defaults(self): + """Target backend supports server side defaults for columns""" + + return exclusions.open() + + @property + def expression_server_defaults(self): + return skip_if( + lambda config: against(config, "mysql", "mariadb") + and not self._mysql_expression_defaults(config) + ) + @property def qmark_paramstyle(self): return only_on(["sqlite", "+pyodbc"]) @@ -1818,6 +1828,15 @@ def _mysql_check_constraints_dont_exist(self, config): # 2. they dont enforce check constraints return not self._mysql_check_constraints_exist(config) + def _mysql_expression_defaults(self, config): + return (against(config, ["mysql", "mariadb"])) and ( + config.db.dialect._support_default_function + ) + + @property + def mysql_expression_defaults(self): + return only_if(self._mysql_expression_defaults) + def _mysql_not_mariadb_102(self, config): return (against(config, ["mysql", "mariadb"])) and ( not config.db.dialect._is_mariadb From 79421dfee1d960e4b2ad72f52ea59d835e9768bf Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 17 Mar 2025 21:33:31 +0100 Subject: [PATCH 472/544] fix rst target for Insert Change-Id: Iee0b8e90223722c40b25c309c47fd6175680ca0e (cherry picked from commit 6047ccd72b7ec6e3730845985ec46fa3a7dce07d) --- doc/build/changelog/unreleased_20/12363.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/changelog/unreleased_20/12363.rst b/doc/build/changelog/unreleased_20/12363.rst index e04e51fe0de..35aa9dbdf0d 100644 --- a/doc/build/changelog/unreleased_20/12363.rst +++ b/doc/build/changelog/unreleased_20/12363.rst @@ -3,7 +3,7 @@ :tickets: 12363 Fixed issue in :class:`.CTE` constructs involving multiple DDL - :class:`.Insert` statements with multiple VALUES parameter sets where the + :class:`_sql.Insert` statements with multiple VALUES parameter sets where the bound parameter names generated for these parameter sets would conflict, generating a compile time error. From 9516633f5bdfb1e3e463a75f205d682def50e7f4 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 17 Mar 2025 16:46:12 -0400 Subject: [PATCH 473/544] ensure SQL expressions w/o bool pass through to correct typing error Fixed regression which occurred as of 2.0.37 where the checked :class:`.ArgumentError` that's raised when an inappropriate type or object is used inside of a :class:`.Mapped` annotation would raise ``TypeError`` with "boolean value of this clause is not defined" if the object resolved into a SQL expression in a boolean context, for programs where future annotations mode was not enabled. This case is now handled explicitly and a new error message has also been tailored for this case. In addition, as there are at least half a dozen distinct error scenarios for intepretation of the :class:`.Mapped` construct, these scenarios have all been unified under a new subclass of :class:`.ArgumentError` called :class:`.MappedAnnotationError`, to provide some continuity between these different scenarios, even though specific messaging remains distinct. Fixes: #12329 Change-Id: I0193e3479c84a48b364df8655f050e2e84151122 (cherry picked from commit b19a09812c2b0806cc063e42993216fc1ead6ed2) --- doc/build/changelog/unreleased_20/12329.rst | 16 ++ lib/sqlalchemy/orm/decl_base.py | 2 +- lib/sqlalchemy/orm/exc.py | 9 + lib/sqlalchemy/orm/properties.py | 15 +- lib/sqlalchemy/orm/util.py | 11 +- lib/sqlalchemy/util/typing.py | 17 +- .../test_tm_future_annotations_sync.py | 192 ++++++++++++++++-- test/orm/declarative/test_typed_mapping.py | 192 ++++++++++++++++-- 8 files changed, 416 insertions(+), 38 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12329.rst diff --git a/doc/build/changelog/unreleased_20/12329.rst b/doc/build/changelog/unreleased_20/12329.rst new file mode 100644 index 00000000000..9e4d1519a5c --- /dev/null +++ b/doc/build/changelog/unreleased_20/12329.rst @@ -0,0 +1,16 @@ +.. change:: + :tags: bug, orm + :tickets: 12329 + + Fixed regression which occurred as of 2.0.37 where the checked + :class:`.ArgumentError` that's raised when an inappropriate type or object + is used inside of a :class:`.Mapped` annotation would raise ``TypeError`` + with "boolean value of this clause is not defined" if the object resolved + into a SQL expression in a boolean context, for programs where future + annotations mode was not enabled. This case is now handled explicitly and + a new error message has also been tailored for this case. In addition, as + there are at least half a dozen distinct error scenarios for intepretation + of the :class:`.Mapped` construct, these scenarios have all been unified + under a new subclass of :class:`.ArgumentError` called + :class:`.MappedAnnotationError`, to provide some continuity between these + different scenarios, even though specific messaging remains distinct. diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index c480994d8fd..f17717b53cc 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -1577,7 +1577,7 @@ def _extract_mappable_attributes(self) -> None: is_dataclass, ) except NameError as ne: - raise exc.ArgumentError( + raise orm_exc.MappedAnnotationError( f"Could not resolve all types within mapped " f'annotation: "{annotation}". Ensure all ' f"types are written correctly and are " diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py index 0494edf983a..a2f7c9f78a3 100644 --- a/lib/sqlalchemy/orm/exc.py +++ b/lib/sqlalchemy/orm/exc.py @@ -65,6 +65,15 @@ class FlushError(sa_exc.SQLAlchemyError): """A invalid condition was detected during flush().""" +class MappedAnnotationError(sa_exc.ArgumentError): + """Raised when ORM annotated declarative cannot interpret the + expression present inside of the :class:`.Mapped` construct. + + .. versionadded:: 2.0.40 + + """ + + class UnmappedError(sa_exc.InvalidRequestError): """Base for exceptions that involve expected mappings not present.""" diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index a41c520cdb2..218285cab88 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -28,6 +28,7 @@ from typing import Union from . import attributes +from . import exc as orm_exc from . import strategy_options from .base import _DeclarativeMapped from .base import class_mapper @@ -56,6 +57,7 @@ from ..util.typing import de_optionalize_union_types from ..util.typing import get_args from ..util.typing import includes_none +from ..util.typing import is_a_type from ..util.typing import is_fwd_ref from ..util.typing import is_pep593 from ..util.typing import is_pep695 @@ -862,16 +864,23 @@ def _init_column_for_annotation( isinstance(our_type, type) and issubclass(our_type, TypeEngine) ): - raise sa_exc.ArgumentError( + raise orm_exc.MappedAnnotationError( f"The type provided inside the {self.column.key!r} " "attribute Mapped annotation is the SQLAlchemy type " f"{our_type}. Expected a Python type instead" ) - else: - raise sa_exc.ArgumentError( + elif is_a_type(our_type): + raise orm_exc.MappedAnnotationError( "Could not locate SQLAlchemy Core type for Python " f"type {our_type} inside the {self.column.key!r} " "attribute Mapped annotation" ) + else: + raise orm_exc.MappedAnnotationError( + f"The object provided inside the {self.column.key!r} " + "attribute Mapped annotation is not a Python type, " + f"it's the object {our_type!r}. Expected a Python " + "type." + ) self.column._set_type(new_sqltype) diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 48282b2d562..874c4f53b15 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -35,6 +35,7 @@ from . import attributes # noqa from . import exc +from . import exc as orm_exc from ._typing import _O from ._typing import insp_is_aliased_class from ._typing import insp_is_mapper @@ -2306,7 +2307,7 @@ def _extract_mapped_subtype( if raw_annotation is None: if required: - raise sa_exc.ArgumentError( + raise orm_exc.MappedAnnotationError( f"Python typing annotation is required for attribute " f'"{cls.__name__}.{key}" when primary argument(s) for ' f'"{attr_cls.__name__}" construct are None or not present' @@ -2326,14 +2327,14 @@ def _extract_mapped_subtype( str_cleanup_fn=_cleanup_mapped_str_annotation, ) except _CleanupError as ce: - raise sa_exc.ArgumentError( + raise orm_exc.MappedAnnotationError( f"Could not interpret annotation {raw_annotation}. " "Check that it uses names that are correctly imported at the " "module level. See chained stack trace for more hints." ) from ce except NameError as ne: if raiseerr and "Mapped[" in raw_annotation: # type: ignore - raise sa_exc.ArgumentError( + raise orm_exc.MappedAnnotationError( f"Could not interpret annotation {raw_annotation}. " "Check that it uses names that are correctly imported at the " "module level. See chained stack trace for more hints." @@ -2362,7 +2363,7 @@ def _extract_mapped_subtype( ): return None - raise sa_exc.ArgumentError( + raise orm_exc.MappedAnnotationError( f'Type annotation for "{cls.__name__}.{key}" ' "can't be correctly interpreted for " "Annotated Declarative Table form. ORM annotations " @@ -2383,7 +2384,7 @@ def _extract_mapped_subtype( return annotated, None if len(annotated.__args__) != 1: - raise sa_exc.ArgumentError( + raise orm_exc.MappedAnnotationError( "Expected sub-type for Mapped[] annotation" ) diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index 32237833e78..c0f04f385b8 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -572,7 +572,22 @@ def includes_none(type_: Any) -> bool: return any(includes_none(t) for t in pep695_values(type_)) if is_newtype(type_): return includes_none(type_.__supertype__) - return type_ in (NoneFwd, NoneType, None) + try: + return type_ in (NoneFwd, NoneType, None) + except TypeError: + # if type_ is Column, mapped_column(), etc. the use of "in" + # resolves to ``__eq__()`` which then gives us an expression object + # that can't resolve to boolean. just catch it all via exception + return False + + +def is_a_type(type_: Any) -> bool: + return ( + isinstance(type_, type) + or hasattr(type_, "__origin__") + or type_.__module__ in ("typing", "typing_extensions") + or type(type_).__mro__[0].__module__ in ("typing", "typing_extensions") + ) def is_union(type_: Any) -> TypeGuard[ArgsTypeProtocol]: diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index d0e5e05ac69..f41f9e2d985 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -13,6 +13,7 @@ from decimal import Decimal import enum import inspect as _py_inspect +import re import typing from typing import Any from typing import cast @@ -67,6 +68,7 @@ from sqlalchemy.orm import declared_attr from sqlalchemy.orm import deferred from sqlalchemy.orm import DynamicMapped +from sqlalchemy.orm import exc as orm_exc from sqlalchemy.orm import foreign from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column @@ -614,19 +616,179 @@ class User(decl_base): id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[MyClass] = mapped_column() - def test_construct_lhs_sqlalchemy_type(self, decl_base): - with expect_raises_message( - sa_exc.ArgumentError, - "The type provided inside the 'data' attribute Mapped " - "annotation is the SQLAlchemy type .*BigInteger.*. Expected " - "a Python type instead", - ): + @testing.variation( + "argtype", + [ + "type", + ("column", testing.requires.python310), + ("mapped_column", testing.requires.python310), + "column_class", + "ref_to_type", + ("ref_to_column", testing.requires.python310), + ], + ) + def test_construct_lhs_sqlalchemy_type(self, decl_base, argtype): + """test for #12329. - class User(decl_base): - __tablename__ = "users" + of note here are all the different messages we have for when the + wrong thing is put into Mapped[], and in fact in #12329 we added + another one. - id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[BigInteger] = mapped_column() + This is a lot of different messages, but at the same time they + occur at different places in the interpretation of types. If + we were to centralize all these messages, we'd still likely end up + doing distinct messages for each scenario, so instead we added + a new ArgumentError subclass MappedAnnotationError that provides + some commonality to all of these cases. + + + """ + expect_future_annotations = "annotations" in globals() + + if argtype.type: + with expect_raises_message( + orm_exc.MappedAnnotationError, + # properties.py -> _init_column_for_annotation, type is + # a SQL type + "The type provided inside the 'data' attribute Mapped " + "annotation is the SQLAlchemy type .*BigInteger.*. Expected " + "a Python type instead", + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[BigInteger] = mapped_column() + + elif argtype.column: + with expect_raises_message( + orm_exc.MappedAnnotationError, + # util.py -> _extract_mapped_subtype + ( + re.escape( + "Could not interpret annotation " + "Mapped[Column('q', BigInteger)]." + ) + if expect_future_annotations + # properties.py -> _init_column_for_annotation, object is + # not a SQL type or a python type, it's just some object + else re.escape( + "The object provided inside the 'data' attribute " + "Mapped annotation is not a Python type, it's the " + "object Column('q', BigInteger(), table=None). " + "Expected a Python type." + ) + ), + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[Column("q", BigInteger)] = ( # noqa: F821 + mapped_column() + ) + + elif argtype.mapped_column: + with expect_raises_message( + orm_exc.MappedAnnotationError, + # properties.py -> _init_column_for_annotation, object is + # not a SQL type or a python type, it's just some object + # interestingly, this raises at the same point for both + # future annotations mode and legacy annotations mode + r"The object provided inside the 'data' attribute " + "Mapped annotation is not a Python type, it's the object " + r"\. " + "Expected a Python type.", + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + big_integer: Mapped[int] = mapped_column() + data: Mapped[big_integer] = mapped_column() + + elif argtype.column_class: + with expect_raises_message( + orm_exc.MappedAnnotationError, + # properties.py -> _init_column_for_annotation, type is not + # a SQL type + re.escape( + "Could not locate SQLAlchemy Core type for Python type " + " inside the " + "'data' attribute Mapped annotation" + ), + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[Column] = mapped_column() + + elif argtype.ref_to_type: + mytype = BigInteger + with expect_raises_message( + orm_exc.MappedAnnotationError, + ( + # decl_base.py -> _exract_mappable_attributes + re.escape( + "Could not resolve all types within mapped " + 'annotation: "Mapped[mytype]"' + ) + if expect_future_annotations + # properties.py -> _init_column_for_annotation, type is + # a SQL type + else re.escape( + "The type provided inside the 'data' attribute Mapped " + "annotation is the SQLAlchemy type " + ". " + "Expected a Python type instead" + ) + ), + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[mytype] = mapped_column() + + elif argtype.ref_to_column: + mycol = Column("q", BigInteger) + + with expect_raises_message( + orm_exc.MappedAnnotationError, + # decl_base.py -> _exract_mappable_attributes + ( + re.escape( + "Could not resolve all types within mapped " + 'annotation: "Mapped[mycol]"' + ) + if expect_future_annotations + else + # properties.py -> _init_column_for_annotation, object is + # not a SQL type or a python type, it's just some object + re.escape( + "The object provided inside the 'data' attribute " + "Mapped " + "annotation is not a Python type, it's the object " + "Column('q', BigInteger(), table=None). " + "Expected a Python type." + ) + ), + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[mycol] = mapped_column() + + else: + argtype.fail() def test_construct_rhs_type_override_lhs(self, decl_base): class Element(decl_base): @@ -933,8 +1095,9 @@ class Test(decl_base): length = 99 if in_map.value else None else: with expect_raises_message( - exc.ArgumentError, - "Could not locate SQLAlchemy Core type for Python type", + orm_exc.MappedAnnotationError, + r"Could not locate SQLAlchemy Core type for Python type .*tat " + "inside the 'data' attribute Mapped annotation", ): declare() return @@ -2382,7 +2545,8 @@ class int_sub(int): ) with expect_raises_message( - sa_exc.ArgumentError, "Could not locate SQLAlchemy Core type" + orm_exc.MappedAnnotationError, + "Could not locate SQLAlchemy Core type", ): class MyClass(Base): diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index f44e5cd63b0..0ff4bc60398 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -4,6 +4,7 @@ from decimal import Decimal import enum import inspect as _py_inspect +import re import typing from typing import Any from typing import cast @@ -58,6 +59,7 @@ from sqlalchemy.orm import declared_attr from sqlalchemy.orm import deferred from sqlalchemy.orm import DynamicMapped +from sqlalchemy.orm import exc as orm_exc from sqlalchemy.orm import foreign from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column @@ -605,19 +607,179 @@ class User(decl_base): id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[MyClass] = mapped_column() - def test_construct_lhs_sqlalchemy_type(self, decl_base): - with expect_raises_message( - sa_exc.ArgumentError, - "The type provided inside the 'data' attribute Mapped " - "annotation is the SQLAlchemy type .*BigInteger.*. Expected " - "a Python type instead", - ): + @testing.variation( + "argtype", + [ + "type", + ("column", testing.requires.python310), + ("mapped_column", testing.requires.python310), + "column_class", + "ref_to_type", + ("ref_to_column", testing.requires.python310), + ], + ) + def test_construct_lhs_sqlalchemy_type(self, decl_base, argtype): + """test for #12329. - class User(decl_base): - __tablename__ = "users" + of note here are all the different messages we have for when the + wrong thing is put into Mapped[], and in fact in #12329 we added + another one. - id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[BigInteger] = mapped_column() + This is a lot of different messages, but at the same time they + occur at different places in the interpretation of types. If + we were to centralize all these messages, we'd still likely end up + doing distinct messages for each scenario, so instead we added + a new ArgumentError subclass MappedAnnotationError that provides + some commonality to all of these cases. + + + """ + expect_future_annotations = "annotations" in globals() + + if argtype.type: + with expect_raises_message( + orm_exc.MappedAnnotationError, + # properties.py -> _init_column_for_annotation, type is + # a SQL type + "The type provided inside the 'data' attribute Mapped " + "annotation is the SQLAlchemy type .*BigInteger.*. Expected " + "a Python type instead", + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[BigInteger] = mapped_column() + + elif argtype.column: + with expect_raises_message( + orm_exc.MappedAnnotationError, + # util.py -> _extract_mapped_subtype + ( + re.escape( + "Could not interpret annotation " + "Mapped[Column('q', BigInteger)]." + ) + if expect_future_annotations + # properties.py -> _init_column_for_annotation, object is + # not a SQL type or a python type, it's just some object + else re.escape( + "The object provided inside the 'data' attribute " + "Mapped annotation is not a Python type, it's the " + "object Column('q', BigInteger(), table=None). " + "Expected a Python type." + ) + ), + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[Column("q", BigInteger)] = ( # noqa: F821 + mapped_column() + ) + + elif argtype.mapped_column: + with expect_raises_message( + orm_exc.MappedAnnotationError, + # properties.py -> _init_column_for_annotation, object is + # not a SQL type or a python type, it's just some object + # interestingly, this raises at the same point for both + # future annotations mode and legacy annotations mode + r"The object provided inside the 'data' attribute " + "Mapped annotation is not a Python type, it's the object " + r"\. " + "Expected a Python type.", + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + big_integer: Mapped[int] = mapped_column() + data: Mapped[big_integer] = mapped_column() + + elif argtype.column_class: + with expect_raises_message( + orm_exc.MappedAnnotationError, + # properties.py -> _init_column_for_annotation, type is not + # a SQL type + re.escape( + "Could not locate SQLAlchemy Core type for Python type " + " inside the " + "'data' attribute Mapped annotation" + ), + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[Column] = mapped_column() + + elif argtype.ref_to_type: + mytype = BigInteger + with expect_raises_message( + orm_exc.MappedAnnotationError, + ( + # decl_base.py -> _exract_mappable_attributes + re.escape( + "Could not resolve all types within mapped " + 'annotation: "Mapped[mytype]"' + ) + if expect_future_annotations + # properties.py -> _init_column_for_annotation, type is + # a SQL type + else re.escape( + "The type provided inside the 'data' attribute Mapped " + "annotation is the SQLAlchemy type " + ". " + "Expected a Python type instead" + ) + ), + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[mytype] = mapped_column() + + elif argtype.ref_to_column: + mycol = Column("q", BigInteger) + + with expect_raises_message( + orm_exc.MappedAnnotationError, + # decl_base.py -> _exract_mappable_attributes + ( + re.escape( + "Could not resolve all types within mapped " + 'annotation: "Mapped[mycol]"' + ) + if expect_future_annotations + else + # properties.py -> _init_column_for_annotation, object is + # not a SQL type or a python type, it's just some object + re.escape( + "The object provided inside the 'data' attribute " + "Mapped " + "annotation is not a Python type, it's the object " + "Column('q', BigInteger(), table=None). " + "Expected a Python type." + ) + ), + ): + + class User(decl_base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[mycol] = mapped_column() + + else: + argtype.fail() def test_construct_rhs_type_override_lhs(self, decl_base): class Element(decl_base): @@ -924,8 +1086,9 @@ class Test(decl_base): length = 99 if in_map.value else None else: with expect_raises_message( - exc.ArgumentError, - "Could not locate SQLAlchemy Core type for Python type", + orm_exc.MappedAnnotationError, + r"Could not locate SQLAlchemy Core type for Python type .*tat " + "inside the 'data' attribute Mapped annotation", ): declare() return @@ -2373,7 +2536,8 @@ class int_sub(int): ) with expect_raises_message( - sa_exc.ArgumentError, "Could not locate SQLAlchemy Core type" + orm_exc.MappedAnnotationError, + "Could not locate SQLAlchemy Core type", ): class MyClass(Base): From 370fe6b6b7a4dbbdf44e8c48ba7ab85b0655cbf6 Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Tue, 18 Mar 2025 12:23:01 -0400 Subject: [PATCH 474/544] Make ARRAY generic on the item_type Now `Column(type_=ARRAY(Integer)` is inferred as `Column[Sequence[int]]` instead as `Column[Sequence[Any]]` previously. This only works with the `type_` argument to Column, but that's not new. This follows from a suggestion at https://github.com/sqlalchemy/sqlalchemy/pull/12386#issuecomment-2694056069. Related to #6810. Closes: #12443 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12443 Pull-request-sha: 2fff4e89cd0b72d9444ce3f3d845b152770fc55d Change-Id: I87b828fd82d10fbf157141db3c31f0ec8149caad (cherry picked from commit 500adfafcb782c5b22ff49e00192a2ed42ed09b6) --- lib/sqlalchemy/dialects/postgresql/array.py | 8 ++++---- lib/sqlalchemy/sql/sqltypes.py | 10 +++++----- .../typing/plain_files/dialects/postgresql/pg_stuff.py | 6 ++++++ 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 8cbe0c48cf9..54590ad7660 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -199,7 +199,7 @@ def self_group( return self -class ARRAY(sqltypes.ARRAY): +class ARRAY(sqltypes.ARRAY[_T]): """PostgreSQL ARRAY type. The :class:`_postgresql.ARRAY` type is constructed in the same way @@ -273,7 +273,7 @@ class SomeOrmClass(Base): def __init__( self, - item_type: _TypeEngineArgument[typing_Any], + item_type: _TypeEngineArgument[_T], as_tuple: bool = False, dimensions: Optional[int] = None, zero_indexes: bool = False, @@ -322,7 +322,7 @@ def __init__( self.dimensions = dimensions self.zero_indexes = zero_indexes - class Comparator(sqltypes.ARRAY.Comparator): + class Comparator(sqltypes.ARRAY.Comparator[_T]): """Define comparison operations for :class:`_types.ARRAY`. Note that these operations are in addition to those provided @@ -363,7 +363,7 @@ def overlap(self, other: typing_Any) -> ColumnElement[bool]: def _against_native_enum(self) -> bool: return ( isinstance(self.item_type, sqltypes.Enum) - and self.item_type.native_enum + and self.item_type.native_enum # type: ignore[attr-defined] ) def literal_processor( diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 1c316eecf62..d0d89e73168 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -2801,7 +2801,7 @@ def process(value): class ARRAY( - SchemaEventTarget, Indexable, Concatenable, TypeEngine[Sequence[Any]] + SchemaEventTarget, Indexable, Concatenable, TypeEngine[Sequence[_T]] ): """Represent a SQL Array type. @@ -2924,7 +2924,7 @@ class SomeOrmClass(Base): def __init__( self, - item_type: _TypeEngineArgument[Any], + item_type: _TypeEngineArgument[_T], as_tuple: bool = False, dimensions: Optional[int] = None, zero_indexes: bool = False, @@ -2973,8 +2973,8 @@ def __init__( self.zero_indexes = zero_indexes class Comparator( - Indexable.Comparator[Sequence[Any]], - Concatenable.Comparator[Sequence[Any]], + Indexable.Comparator[Sequence[_T]], + Concatenable.Comparator[Sequence[_T]], ): """Define comparison operations for :class:`_types.ARRAY`. @@ -2985,7 +2985,7 @@ class Comparator( __slots__ = () - type: ARRAY + type: ARRAY[_T] @overload def _setup_getitem( diff --git a/test/typing/plain_files/dialects/postgresql/pg_stuff.py b/test/typing/plain_files/dialects/postgresql/pg_stuff.py index 45ec981bbae..bc05ef8c441 100644 --- a/test/typing/plain_files/dialects/postgresql/pg_stuff.py +++ b/test/typing/plain_files/dialects/postgresql/pg_stuff.py @@ -117,3 +117,9 @@ class Test(Base): # EXPECTED_MYPY: Cannot infer type argument 1 of "array" array([0], type_=Text) + +# EXPECTED_TYPE: ARRAY[str] +reveal_type(ARRAY(Text)) + +# EXPECTED_TYPE: Column[Sequence[int]] +reveal_type(Column(type_=ARRAY(Integer))) From ee4721c542601632f50539bc4daed549a4029f37 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 19 Mar 2025 08:59:54 -0400 Subject: [PATCH 475/544] remove attrs w/ orm annotated declarative example as pointed out at https://github.com/sqlalchemy/sqlalchemy/discussions/12449, ORM annotated declarative is not compatible with attrs, declarative cannot be used with attrs. Change-Id: Ief6d1dca65b96164f48264a999c85bcae8dc3bb1 (cherry picked from commit 780d37777ea26bf88fa36388b516664fa0c11955) --- doc/build/orm/dataclasses.rst | 110 ++++++---------------------------- 1 file changed, 17 insertions(+), 93 deletions(-) diff --git a/doc/build/orm/dataclasses.rst b/doc/build/orm/dataclasses.rst index 7f6c2670d96..7f377ca3996 100644 --- a/doc/build/orm/dataclasses.rst +++ b/doc/build/orm/dataclasses.rst @@ -933,6 +933,11 @@ applies when using this mapping style. Applying ORM mappings to an existing attrs class ------------------------------------------------- +.. warning:: The ``attrs`` library is not part of SQLAlchemy's continuous + integration testing, and compatibility with this library may change without + notice due to incompatibilities introduced by either side. + + The attrs_ library is a popular third party library that provides similar features as dataclasses, with many additional features provided not found in ordinary dataclasses. @@ -942,103 +947,27 @@ initiates a process to scan the class for attributes that define the class' behavior, which are then used to generate methods, documentation, and annotations. -The SQLAlchemy ORM supports mapping an attrs_ class using **Declarative with -Imperative Table** or **Imperative** mapping. The general form of these two -styles is fully equivalent to the -:ref:`orm_declarative_dataclasses_declarative_table` and -:ref:`orm_declarative_dataclasses_imperative_table` mapping forms used with -dataclasses, where the inline attribute directives used by dataclasses or attrs -are unchanged, and SQLAlchemy's table-oriented instrumentation is applied at -runtime. +The SQLAlchemy ORM supports mapping an attrs_ class using **Imperative** mapping. +The general form of this style is equivalent to the +:ref:`orm_imperative_dataclasses` mapping form used with +dataclasses, where the class construction uses ``attrs`` alone, with ORM mappings +applied after the fact without any class attribute scanning. The ``@define`` decorator of attrs_ by default replaces the annotated class with a new __slots__ based class, which is not supported. When using the old style annotation ``@attr.s`` or using ``define(slots=False)``, the class -does not get replaced. Furthermore attrs removes its own class-bound attributes +does not get replaced. Furthermore ``attrs`` removes its own class-bound attributes after the decorator runs, so that SQLAlchemy's mapping process takes over these attributes without any issue. Both decorators, ``@attr.s`` and ``@define(slots=False)`` work with SQLAlchemy. -Mapping attrs with Declarative "Imperative Table" -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -In the "Declarative with Imperative Table" style, a :class:`_schema.Table` -object is declared inline with the declarative class. The -``@define`` decorator is applied to the class first, then the -:meth:`_orm.registry.mapped` decorator second:: - - from __future__ import annotations - - from typing import List - from typing import Optional - - from attrs import define - from sqlalchemy import Column - from sqlalchemy import ForeignKey - from sqlalchemy import Integer - from sqlalchemy import MetaData - from sqlalchemy import String - from sqlalchemy import Table - from sqlalchemy.orm import Mapped - from sqlalchemy.orm import registry - from sqlalchemy.orm import relationship - - mapper_registry = registry() - - - @mapper_registry.mapped - @define(slots=False) - class User: - __table__ = Table( - "user", - mapper_registry.metadata, - Column("id", Integer, primary_key=True), - Column("name", String(50)), - Column("FullName", String(50), key="fullname"), - Column("nickname", String(12)), - ) - id: Mapped[int] - name: Mapped[str] - fullname: Mapped[str] - nickname: Mapped[str] - addresses: Mapped[List[Address]] - - __mapper_args__ = { # type: ignore - "properties": { - "addresses": relationship("Address"), - } - } - - - @mapper_registry.mapped - @define(slots=False) - class Address: - __table__ = Table( - "address", - mapper_registry.metadata, - Column("id", Integer, primary_key=True), - Column("user_id", Integer, ForeignKey("user.id")), - Column("email_address", String(50)), - ) - id: Mapped[int] - user_id: Mapped[int] - email_address: Mapped[Optional[str]] - -.. note:: The ``attrs`` ``slots=True`` option, which enables ``__slots__`` on - a mapped class, cannot be used with SQLAlchemy mappings without fully - implementing alternative - :ref:`attribute instrumentation `, as mapped - classes normally rely upon direct access to ``__dict__`` for state storage. - Behavior is undefined when this option is present. +.. versionchanged:: 2.0 SQLAlchemy integration with ``attrs`` works only + with imperative mapping style, that is, not using Declarative. + The introduction of ORM Annotated Declarative style is not cross-compatible + with ``attrs``. - - -Mapping attrs with Imperative Mapping -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Just as is the case with dataclasses, we can make use of -:meth:`_orm.registry.map_imperatively` to map an existing ``attrs`` class -as well:: +The ``attrs`` class is built first. The SQLAlchemy ORM mapping can be +applied after the fact using :meth:`_orm.registry.map_imperatively`:: from __future__ import annotations @@ -1102,11 +1031,6 @@ as well:: mapper_registry.map_imperatively(Address, address) -The above form is equivalent to the previous example using -Declarative with Imperative Table. - - - .. _dataclass: https://docs.python.org/3/library/dataclasses.html .. _dataclasses: https://docs.python.org/3/library/dataclasses.html .. _attrs: https://pypi.org/project/attrs/ From a20334aa7d556003ae936015c44df5b090240d21 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 19 Mar 2025 18:30:21 -0400 Subject: [PATCH 476/544] skip FROM disambiguation for immediate alias of table Fixed regression caused by :ticket:`7471` leading to a SQL compilation issue where name disambiguation for two same-named FROM clauses with table aliasing in use at the same time would produce invalid SQL in the FROM clause with two "AS" clauses for the aliased table, due to double aliasing. Fixes: #12451 Change-Id: I981823f8f2cdf3992d65ace93a21fc20d1d74cda (cherry picked from commit 9ea3be0681dc09338e53b63cea4803de80ebcdc7) --- doc/build/changelog/unreleased_20/12451.rst | 8 ++ lib/sqlalchemy/sql/compiler.py | 7 +- test/sql/test_compiler.py | 111 ++++++++++++++------ 3 files changed, 92 insertions(+), 34 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12451.rst diff --git a/doc/build/changelog/unreleased_20/12451.rst b/doc/build/changelog/unreleased_20/12451.rst new file mode 100644 index 00000000000..71b6983ad32 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12451.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, sql + :tickets: 12451 + + Fixed regression caused by :ticket:`7471` leading to a SQL compilation + issue where name disambiguation for two same-named FROM clauses with table + aliasing in use at the same time would produce invalid SQL in the FROM + clause with two "AS" clauses for the aliased table, due to double aliasing. diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index f55f0d96bb7..9130cdb2c38 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -5263,6 +5263,7 @@ def visit_table( use_schema=True, from_linter=None, ambiguous_table_name_map=None, + enclosing_alias=None, **kwargs, ): if from_linter: @@ -5281,7 +5282,11 @@ def visit_table( ret = self.preparer.quote(table.name) if ( - not effective_schema + ( + enclosing_alias is None + or enclosing_alias.element is not table + ) + and not effective_schema and ambiguous_table_name_map and table.name in ambiguous_table_name_map ): diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 4d546b57b38..12ba24e170d 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -6905,65 +6905,59 @@ def test_schema_translate_crud(self): render_schema_translate=True, ) - def test_schema_non_schema_disambiguation(self): - """test #7471""" - - t1 = table("some_table", column("id"), column("q")) - t2 = table("some_table", column("id"), column("p"), schema="foo") - - self.assert_compile( - select(t1, t2), + @testing.combinations( + ( + lambda t1, t2: select(t1, t2), "SELECT some_table_1.id, some_table_1.q, " "foo.some_table.id AS id_1, foo.some_table.p " "FROM some_table AS some_table_1, foo.some_table", - ) - - self.assert_compile( - select(t1, t2).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL), + ), + ( + lambda t1, t2: select(t1, t2).set_label_style( + LABEL_STYLE_TABLENAME_PLUS_COL + ), # the original "tablename_colname" label is preserved despite # the alias of some_table "SELECT some_table_1.id AS some_table_id, some_table_1.q AS " "some_table_q, foo.some_table.id AS foo_some_table_id, " "foo.some_table.p AS foo_some_table_p " "FROM some_table AS some_table_1, foo.some_table", - ) - - self.assert_compile( - select(t1, t2).join_from(t1, t2, t1.c.id == t2.c.id), + ), + ( + lambda t1, t2: select(t1, t2).join_from( + t1, t2, t1.c.id == t2.c.id + ), "SELECT some_table_1.id, some_table_1.q, " "foo.some_table.id AS id_1, foo.some_table.p " "FROM some_table AS some_table_1 " "JOIN foo.some_table ON some_table_1.id = foo.some_table.id", - ) - - self.assert_compile( - select(t1, t2).where(t1.c.id == t2.c.id), + ), + ( + lambda t1, t2: select(t1, t2).where(t1.c.id == t2.c.id), "SELECT some_table_1.id, some_table_1.q, " "foo.some_table.id AS id_1, foo.some_table.p " "FROM some_table AS some_table_1, foo.some_table " "WHERE some_table_1.id = foo.some_table.id", - ) - - self.assert_compile( - select(t1).where(t1.c.id == t2.c.id), + ), + ( + lambda t1, t2: select(t1).where(t1.c.id == t2.c.id), "SELECT some_table_1.id, some_table_1.q " "FROM some_table AS some_table_1, foo.some_table " "WHERE some_table_1.id = foo.some_table.id", - ) - - subq = select(t1).where(t1.c.id == t2.c.id).subquery() - self.assert_compile( - select(t2).select_from(t2).join(subq, t2.c.id == subq.c.id), + ), + ( + lambda t2, subq: select(t2) + .select_from(t2) + .join(subq, t2.c.id == subq.c.id), "SELECT foo.some_table.id, foo.some_table.p " "FROM foo.some_table JOIN " "(SELECT some_table_1.id AS id, some_table_1.q AS q " "FROM some_table AS some_table_1, foo.some_table " "WHERE some_table_1.id = foo.some_table.id) AS anon_1 " "ON foo.some_table.id = anon_1.id", - ) - - self.assert_compile( - select(t1, subq.c.id) + ), + ( + lambda t1, subq: select(t1, subq.c.id) .select_from(t1) .join(subq, t1.c.id == subq.c.id), # some_table is only aliased inside the subquery. this is not @@ -6975,8 +6969,59 @@ def test_schema_non_schema_disambiguation(self): "FROM some_table AS some_table_1, foo.some_table " "WHERE some_table_1.id = foo.some_table.id) AS anon_1 " "ON some_table.id = anon_1.id", + ), + ( + # issue #12451 + lambda t1alias, t2: select(t2, t1alias), + "SELECT foo.some_table.id, foo.some_table.p, " + "some_table_1.id AS id_1, some_table_1.q FROM foo.some_table, " + "some_table AS some_table_1", + ), + ( + # issue #12451 + lambda t1alias, t2: select(t2).join( + t1alias, t1alias.c.q == t2.c.p + ), + "SELECT foo.some_table.id, foo.some_table.p FROM foo.some_table " + "JOIN some_table AS some_table_1 " + "ON some_table_1.q = foo.some_table.p", + ), + ( + # issue #12451 + lambda t1alias, t2: select(t1alias).join( + t2, t1alias.c.q == t2.c.p + ), + "SELECT some_table_1.id, some_table_1.q " + "FROM some_table AS some_table_1 " + "JOIN foo.some_table ON some_table_1.q = foo.some_table.p", + ), + ( + # issue #12451 + lambda t1alias, t2alias: select(t1alias, t2alias).join( + t2alias, t1alias.c.q == t2alias.c.p + ), + "SELECT some_table_1.id, some_table_1.q, " + "some_table_2.id AS id_1, some_table_2.p " + "FROM some_table AS some_table_1 " + "JOIN foo.some_table AS some_table_2 " + "ON some_table_1.q = some_table_2.p", + ), + ) + def test_schema_non_schema_disambiguation(self, stmt, expected): + """test #7471, and its regression #12451""" + + t1 = table("some_table", column("id"), column("q")) + t2 = table("some_table", column("id"), column("p"), schema="foo") + t1alias = t1.alias() + t2alias = t2.alias() + subq = select(t1).where(t1.c.id == t2.c.id).subquery() + + stmt = testing.resolve_lambda( + stmt, t1=t1, t2=t2, subq=subq, t1alias=t1alias, t2alias=t2alias ) + self.assert_compile(stmt, expected) + def test_alias(self): a = alias(table4, "remtable") self.assert_compile( From cd2b95192bd292cbfe99a0eaae2a6b86bece5a7a Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Wed, 19 Mar 2025 04:17:27 -0400 Subject: [PATCH 477/544] Cast empty PostgreSQL ARRAY from the type specified to array() When building a PostgreSQL ``ARRAY`` literal using :class:`_postgresql.array` with an empty ``clauses`` argument, the :paramref:`_postgresql.array.type_` parameter is now significant in that it will be used to render the resulting ``ARRAY[]`` SQL expression with a cast, such as ``ARRAY[]::INTEGER``. Pull request courtesy Denis Laxalde. Fixes: #12432 Closes: #12435 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12435 Pull-request-sha: 9633d3c15d42026f8f45f5a4d201a5d72e57b8d4 Change-Id: I29ed7bd0562b82351d22de0658fb46c31cfe44f6 (cherry picked from commit 588cc6ed8e95f3fdd0920fd49a0992e7739662fc) --- doc/build/changelog/unreleased_20/12432.rst | 9 ++++ lib/sqlalchemy/dialects/postgresql/array.py | 41 +++++++++++++-- lib/sqlalchemy/dialects/postgresql/base.py | 2 + test/dialect/postgresql/test_compiler.py | 57 +++++++++++++++++++++ test/dialect/postgresql/test_query.py | 4 ++ test/sql/test_compare.py | 2 + 6 files changed, 112 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12432.rst diff --git a/doc/build/changelog/unreleased_20/12432.rst b/doc/build/changelog/unreleased_20/12432.rst new file mode 100644 index 00000000000..ff781fbd803 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12432.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: usecase, postgresql + :tickets: 12432 + + When building a PostgreSQL ``ARRAY`` literal using + :class:`_postgresql.array` with an empty ``clauses`` argument, the + :paramref:`_postgresql.array.type_` parameter is now significant in that it + will be used to render the resulting ``ARRAY[]`` SQL expression with a + cast, such as ``ARRAY[]::INTEGER``. Pull request courtesy Denis Laxalde. diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 54590ad7660..9e094973c34 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -24,6 +24,7 @@ from ... import util from ...sql import expression from ...sql import operators +from ...sql.visitors import InternalTraversal if TYPE_CHECKING: from ...engine.interfaces import Dialect @@ -38,6 +39,7 @@ from ...sql.type_api import _LiteralProcessorType from ...sql.type_api import _ResultProcessorType from ...sql.type_api import TypeEngine + from ...sql.visitors import _TraverseInternalsType from ...util.typing import Self @@ -91,11 +93,32 @@ class array(expression.ExpressionClauseList[_T]): ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1 An instance of :class:`.array` will always have the datatype - :class:`_types.ARRAY`. The "inner" type of the array is inferred from - the values present, unless the ``type_`` keyword argument is passed:: + :class:`_types.ARRAY`. The "inner" type of the array is inferred from the + values present, unless the :paramref:`_postgresql.array.type_` keyword + argument is passed:: array(["foo", "bar"], type_=CHAR) + When constructing an empty array, the :paramref:`_postgresql.array.type_` + argument is particularly important as PostgreSQL server typically requires + a cast to be rendered for the inner type in order to render an empty array. + SQLAlchemy's compilation for the empty array will produce this cast so + that:: + + stmt = array([], type_=Integer) + print(stmt.compile(dialect=postgresql.dialect())) + + Produces: + + .. sourcecode:: sql + + ARRAY[]::INTEGER[] + + As required by PostgreSQL for empty arrays. + + .. versionadded:: 2.0.40 added support to render empty PostgreSQL array + literals with a required cast. + Multidimensional arrays are produced by nesting :class:`.array` constructs. The dimensionality of the final :class:`_types.ARRAY` type is calculated by @@ -130,7 +153,11 @@ class array(expression.ExpressionClauseList[_T]): __visit_name__ = "array" stringify_dialect = "postgresql" - inherit_cache = True + + _traverse_internals: _TraverseInternalsType = [ + ("clauses", InternalTraversal.dp_clauseelement_tuple), + ("type", InternalTraversal.dp_type), + ] def __init__( self, @@ -139,6 +166,14 @@ def __init__( type_: Optional[_TypeEngineArgument[_T]] = None, **kw: typing_Any, ): + r"""Construct an ARRAY literal. + + :param clauses: iterable, such as a list, containing elements to be + rendered in the array + :param type\_: optional type. If omitted, the type is inferred + from the contents of the array. + + """ super().__init__(operators.comma_op, *clauses, **kw) main_type = ( diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 45c83d7209c..ae97cc0caa5 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1813,6 +1813,8 @@ def render_bind_cast(self, type_, dbapi_type, sqltext): }""" def visit_array(self, element, **kw): + if not element.clauses and not element.type.item_type._isnull: + return "ARRAY[]::%s" % element.type.compile(self.dialect) return "ARRAY[%s]" % self.visit_clauselist(element, **kw) def visit_slice(self, element, **kw): diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index 34ab31395f2..2bb8d292655 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -1,3 +1,5 @@ +import random + from sqlalchemy import and_ from sqlalchemy import BigInteger from sqlalchemy import bindparam @@ -35,6 +37,7 @@ from sqlalchemy import types as sqltypes from sqlalchemy import UniqueConstraint from sqlalchemy import update +from sqlalchemy import VARCHAR from sqlalchemy.dialects import postgresql from sqlalchemy.dialects.postgresql import aggregate_order_by from sqlalchemy.dialects.postgresql import ARRAY as PG_ARRAY @@ -1986,6 +1989,14 @@ def test_array_literal_type(self): String, ) + @testing.combinations( + ("with type_", Date, "ARRAY[]::DATE[]"), + ("no type_", None, "ARRAY[]"), + id_="iaa", + ) + def test_array_literal_empty(self, type_, expected): + self.assert_compile(postgresql.array([], type_=type_), expected) + def test_array_literal(self): self.assert_compile( func.array_dims( @@ -4101,3 +4112,49 @@ def test_aggregate_order_by(self): ), compare_values=False, ) + + def test_array_equivalent_keys_one_element(self): + self._run_cache_key_equal_fixture( + lambda: ( + array([random.randint(0, 10)]), + array([random.randint(0, 10)], type_=Integer), + array([random.randint(0, 10)], type_=Integer), + ), + compare_values=False, + ) + + def test_array_equivalent_keys_two_elements(self): + self._run_cache_key_equal_fixture( + lambda: ( + array([random.randint(0, 10), random.randint(0, 10)]), + array( + [random.randint(0, 10), random.randint(0, 10)], + type_=Integer, + ), + array( + [random.randint(0, 10), random.randint(0, 10)], + type_=Integer, + ), + ), + compare_values=False, + ) + + def test_array_heterogeneous(self): + self._run_cache_key_fixture( + lambda: ( + array([], type_=Integer), + array([], type_=Text), + array([]), + array([random.choice(["t1", "t2", "t3"])]), + array( + [ + random.choice(["t1", "t2", "t3"]), + random.choice(["t1", "t2", "t3"]), + ] + ), + array([random.choice(["t1", "t2", "t3"])], type_=Text), + array([random.choice(["t1", "t2", "t3"])], type_=VARCHAR(30)), + array([random.randint(0, 10), random.randint(0, 10)]), + ), + compare_values=False, + ) diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py index f8bb9dbc79d..c55cd0a5d7c 100644 --- a/test/dialect/postgresql/test_query.py +++ b/test/dialect/postgresql/test_query.py @@ -1640,6 +1640,10 @@ def test_with_ordinality_star(self, connection): eq_(connection.execute(stmt).all(), [(4, 1), (3, 2), (2, 3), (1, 4)]) + def test_array_empty_with_type(self, connection): + stmt = select(postgresql.array([], type_=Integer)) + eq_(connection.execute(stmt).all(), [([],)]) + def test_plain_old_unnest(self, connection): fn = func.unnest( postgresql.array(["one", "two", "three", "four"]) diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index 1adfdbf14d7..96e3e7661d2 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -1449,6 +1449,7 @@ class HasCacheKeySubclass(fixtures.TestBase): "modifiers", }, "next_value": {"sequence"}, + "array": ({"type", "clauses"}), } ignore_keys = { @@ -1627,6 +1628,7 @@ def test_traverse_internals(self, cls: type): {"_with_options", "_raw_columns", "_setup_joins"}, {"args"}, ), + "array": ({"type", "clauses"}, {"clauses", "type_"}), "next_value": ({"sequence"}, {"seq"}), } From 00665af1d51603052ab894d1b9c18d49711e8863 Mon Sep 17 00:00:00 2001 From: Stefanie Molin <24376333+stefmolin@users.noreply.github.com> Date: Tue, 25 Mar 2025 15:05:44 -0400 Subject: [PATCH 478/544] Add missing imports to example (#12453) (cherry picked from commit aae34df0b5aa7dfe02bdc19744b1b6bc8533ee91) --- lib/sqlalchemy/sql/_selectable_constructors.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/sqlalchemy/sql/_selectable_constructors.py b/lib/sqlalchemy/sql/_selectable_constructors.py index 69427334a32..ae83efa5d79 100644 --- a/lib/sqlalchemy/sql/_selectable_constructors.py +++ b/lib/sqlalchemy/sql/_selectable_constructors.py @@ -691,6 +691,8 @@ def values( from sqlalchemy import column from sqlalchemy import values + from sqlalchemy import Integer + from sqlalchemy import String value_expr = values( column("id", Integer), From 6476a87d822c5db8cb771ce85c7926222f419a5d Mon Sep 17 00:00:00 2001 From: Chris Withers Date: Tue, 25 Mar 2025 15:05:23 -0400 Subject: [PATCH 479/544] Increase minimum required greenlet version Add a lower bound constraint on the greenlet version to 1. Closes: #12459 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12459 Pull-request-sha: 4bd856b9c164df984f05c094c977686470ed4244 Change-Id: I200861f1706bf261c2e586b96e8cc35dceb7670b (cherry picked from commit 938e0fee9b834aca8b22034c75ffadefdfbaaf5f) --- setup.cfg | 4 ++-- tox.ini | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/setup.cfg b/setup.cfg index 21ab1374257..9b42a19a037 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,12 +39,12 @@ package_dir = install_requires = importlib-metadata;python_version<"3.8" - greenlet != 0.4.17;(python_version<"3.14" and (platform_machine=='aarch64' or (platform_machine=='ppc64le' or (platform_machine=='x86_64' or (platform_machine=='amd64' or (platform_machine=='AMD64' or (platform_machine=='win32' or platform_machine=='WIN32'))))))) + greenlet >= 1;(python_version<"3.14" and (platform_machine=='aarch64' or (platform_machine=='ppc64le' or (platform_machine=='x86_64' or (platform_machine=='amd64' or (platform_machine=='AMD64' or (platform_machine=='win32' or platform_machine=='WIN32'))))))) typing-extensions >= 4.6.0 [options.extras_require] asyncio = - greenlet!=0.4.17 + greenlet >= 1 mypy = mypy >= 0.910 mssql = pyodbc diff --git a/tox.ini b/tox.ini index ae11373548a..576346aec62 100644 --- a/tox.ini +++ b/tox.ini @@ -190,7 +190,7 @@ commands= [testenv:pep484] deps= - greenlet != 0.4.17 + greenlet >= 1 importlib_metadata; python_version < '3.8' mypy >= 1.14.0 types-greenlet @@ -207,7 +207,7 @@ extras = deps= pytest>=7.0.0rc1,<8.4 pytest-xdist - greenlet != 0.4.17 + greenlet >= 1 importlib_metadata; python_version < '3.8' mypy >= 1.2.0,<1.11.0 patch==1.* From 56600630ffec6929c167c053fb852b0d77d55f14 Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Mon, 24 Mar 2025 16:35:07 -0400 Subject: [PATCH 480/544] Type array_agg() The return type of `array_agg()` is declared as a `Sequence[T]` where `T` is bound to the type of input argument. This is implemented by making `array_agg()` inheriting from `ReturnTypeFromArgs` which provides appropriate overloads of `__init__()` to support this. This usage of ReturnTypeFromArgs is a bit different from previous ones as the return type of the function is not exactly the same as that of its arguments, but a "collection" (a generic, namely a Sequence here) of the argument types. Accordingly, we adjust the code of `tools/generate_sql_functions.py` to retrieve the "collection" type from 'fn_class' annotation and generate expected return type. Also add a couple of hand-written typing tests for PostgreSQL. Related to #6810 Closes: #12461 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12461 Pull-request-sha: ba27cbb8639dcd35127ab6a2928b7b5b3667e287 Change-Id: I3fd538cc7092a0492c26970f0b825bf70ddb66cd (cherry picked from commit 543acbd8d1c7e3037877ca74a6b05f62592ef153) --- lib/sqlalchemy/sql/functions.py | 47 ++++++++-- .../dialects/postgresql/pg_stuff.py | 8 ++ test/typing/plain_files/sql/functions.py | 86 ++++++++++--------- tools/generate_sql_functions.py | 22 ++++- 4 files changed, 112 insertions(+), 51 deletions(-) diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index ea02279d480..bd7d6877c3e 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -6,9 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""SQL function API, factories, and built-in functions. - -""" +"""SQL function API, factories, and built-in functions.""" from __future__ import annotations @@ -990,8 +988,41 @@ def aggregate_strings(self) -> Type[aggregate_strings]: ... @property def ansifunction(self) -> Type[AnsiFunction[Any]]: ... - @property - def array_agg(self) -> Type[array_agg[Any]]: ... + # set ColumnElement[_T] as a separate overload, to appease mypy + # which seems to not want to accept _T from _ColumnExpressionArgument. + # this is even if all non-generic types are removed from it, so + # reasons remain unclear for why this does not work + + @overload + def array_agg( + self, + col: ColumnElement[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ) -> array_agg[_T]: ... + + @overload + def array_agg( + self, + col: _ColumnExpressionArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ) -> array_agg[_T]: ... + + @overload + def array_agg( + self, + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ) -> array_agg[_T]: ... + + def array_agg( + self, + col: _ColumnExpressionOrLiteralArgument[_T], + *args: _ColumnExpressionOrLiteralArgument[Any], + **kwargs: Any, + ) -> array_agg[_T]: ... @property def cast(self) -> Type[Cast[Any]]: ... @@ -1575,7 +1606,9 @@ def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any): class ReturnTypeFromArgs(GenericFunction[_T]): - """Define a function whose return type is the same as its arguments.""" + """Define a function whose return type is bound to the type of its + arguments. + """ inherit_cache = True @@ -1807,7 +1840,7 @@ class user(AnsiFunction[str]): inherit_cache = True -class array_agg(GenericFunction[_T]): +class array_agg(ReturnTypeFromArgs[Sequence[_T]]): """Support for the ARRAY_AGG function. The ``func.array_agg(expr)`` construct returns an expression of diff --git a/test/typing/plain_files/dialects/postgresql/pg_stuff.py b/test/typing/plain_files/dialects/postgresql/pg_stuff.py index bc05ef8c441..3dbb9498787 100644 --- a/test/typing/plain_files/dialects/postgresql/pg_stuff.py +++ b/test/typing/plain_files/dialects/postgresql/pg_stuff.py @@ -123,3 +123,11 @@ class Test(Base): # EXPECTED_TYPE: Column[Sequence[int]] reveal_type(Column(type_=ARRAY(Integer))) + +stmt_array_agg = select(func.array_agg(Column("num", type_=Integer))) + +# EXPECTED_TYPE: Select[Tuple[Sequence[int]]] +reveal_type(stmt_array_agg) + +# EXPECTED_TYPE: Select[Tuple[Sequence[str]]] +reveal_type(select(func.array_agg(Test.ident_str))) diff --git a/test/typing/plain_files/sql/functions.py b/test/typing/plain_files/sql/functions.py index f657a48571a..e1cea4193e4 100644 --- a/test/typing/plain_files/sql/functions.py +++ b/test/typing/plain_files/sql/functions.py @@ -21,137 +21,143 @@ reveal_type(stmt1) -stmt2 = select(func.char_length(column("x"))) +stmt2 = select(func.array_agg(column("x", Integer))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Sequence\[.*int\]\]\] reveal_type(stmt2) -stmt3 = select(func.coalesce(column("x", Integer))) +stmt3 = select(func.char_length(column("x"))) # EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt3) -stmt4 = select(func.concat()) +stmt4 = select(func.coalesce(column("x", Integer))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt4) -stmt5 = select(func.count(column("x"))) +stmt5 = select(func.concat()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] reveal_type(stmt5) -stmt6 = select(func.cume_dist()) +stmt6 = select(func.count(column("x"))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Decimal\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt6) -stmt7 = select(func.current_date()) +stmt7 = select(func.cume_dist()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*date\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Decimal\]\] reveal_type(stmt7) -stmt8 = select(func.current_time()) +stmt8 = select(func.current_date()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*time\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*date\]\] reveal_type(stmt8) -stmt9 = select(func.current_timestamp()) +stmt9 = select(func.current_time()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*time\]\] reveal_type(stmt9) -stmt10 = select(func.current_user()) +stmt10 = select(func.current_timestamp()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt10) -stmt11 = select(func.dense_rank()) +stmt11 = select(func.current_user()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] reveal_type(stmt11) -stmt12 = select(func.localtime()) +stmt12 = select(func.dense_rank()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt12) -stmt13 = select(func.localtimestamp()) +stmt13 = select(func.localtime()) # EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt13) -stmt14 = select(func.max(column("x", Integer))) +stmt14 = select(func.localtimestamp()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt14) -stmt15 = select(func.min(column("x", Integer))) +stmt15 = select(func.max(column("x", Integer))) # EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt15) -stmt16 = select(func.next_value(Sequence("x_seq"))) +stmt16 = select(func.min(column("x", Integer))) # EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt16) -stmt17 = select(func.now()) +stmt17 = select(func.next_value(Sequence("x_seq"))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt17) -stmt18 = select(func.percent_rank()) +stmt18 = select(func.now()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Decimal\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt18) -stmt19 = select(func.rank()) +stmt19 = select(func.percent_rank()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*Decimal\]\] reveal_type(stmt19) -stmt20 = select(func.session_user()) +stmt20 = select(func.rank()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt20) -stmt21 = select(func.sum(column("x", Integer))) +stmt21 = select(func.session_user()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] reveal_type(stmt21) -stmt22 = select(func.sysdate()) +stmt22 = select(func.sum(column("x", Integer))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] reveal_type(stmt22) -stmt23 = select(func.user()) +stmt23 = select(func.sysdate()) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*datetime\]\] reveal_type(stmt23) + +stmt24 = select(func.user()) + +# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*str\]\] +reveal_type(stmt24) + # END GENERATED FUNCTION TYPING TESTS stmt_count: Select[Tuple[int, int, int]] = select( diff --git a/tools/generate_sql_functions.py b/tools/generate_sql_functions.py index 0e5104352f5..5049ce52066 100644 --- a/tools/generate_sql_functions.py +++ b/tools/generate_sql_functions.py @@ -1,6 +1,4 @@ -"""Generate inline stubs for generic functions on func - -""" +"""Generate inline stubs for generic functions on func""" # mypy: ignore-errors @@ -10,6 +8,9 @@ import re from tempfile import NamedTemporaryFile import textwrap +import typing + +import typing_extensions from sqlalchemy.sql.functions import _registry from sqlalchemy.sql.functions import ReturnTypeFromArgs @@ -168,12 +169,25 @@ def {key}(self) -> Type[{_type}]:{_reserved_word} if issubclass(fn_class, ReturnTypeFromArgs): count += 1 + # Would be ReturnTypeFromArgs + (orig_base,) = typing_extensions.get_original_bases( + fn_class + ) + # Type parameter of ReturnTypeFromArgs + (rtype,) = typing.get_args(orig_base) + # The origin type, if rtype is a generic + orig_type = typing.get_origin(rtype) + if orig_type is not None: + coltype = rf".*{orig_type.__name__}\[.*int\]" + else: + coltype = ".*int" + buf.write( textwrap.indent( rf""" stmt{count} = select(func.{key}(column('x', Integer))) -# EXPECTED_RE_TYPE: .*Select\[Tuple\[.*int\]\] +# EXPECTED_RE_TYPE: .*Select\[Tuple\[{coltype}\]\] reveal_type(stmt{count}) """, From 4655b1bbb5e810c3627a70135bf2a4b1d7af5eee Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Wed, 26 Mar 2025 21:43:10 +0100 Subject: [PATCH 481/544] document sqlite truncate_microseconds in DATETIME and TIME Change-Id: I93412d951b466343f2cf9b6d513ad46d17f5d8ee (cherry picked from commit a9b37199133eea81ebdf062439352ef2745d3c00) --- lib/sqlalchemy/dialects/sqlite/base.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index c09fbb32ccc..bf632f1fa4f 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1048,6 +1048,10 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime): regexp=r"(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)", ) + :param truncate_microseconds: when ``True`` microseconds will be truncated + from the datetime. Can't be specified together with ``storage_format`` + or ``regexp``. + :param storage_format: format string which will be applied to the dict with keys year, month, day, hour, minute, second, and microsecond. @@ -1234,6 +1238,10 @@ class TIME(_DateTimeMixin, sqltypes.Time): regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?"), ) + :param truncate_microseconds: when ``True`` microseconds will be truncated + from the time. Can't be specified together with ``storage_format`` + or ``regexp``. + :param storage_format: format string which will be applied to the dict with keys hour, minute, second, and microsecond. From 8f875ae8640e7b996092c5b3fcb86d9cc1980b1d Mon Sep 17 00:00:00 2001 From: Daraan Date: Wed, 26 Mar 2025 14:27:46 -0400 Subject: [PATCH 482/544] compatibility with typing_extensions 4.13 and type statement Fixed regression caused by ``typing_extension==4.13.0`` that introduced a different implementation for ``TypeAliasType`` while SQLAlchemy assumed that it would be equivalent to the ``typing`` version. Added test regarding generic TypeAliasType Fixes: #12473 Closes: #12472 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12472 Pull-request-sha: 8861a5acfb8e81663413ff144b41abf64779b6fd Change-Id: I053019a222546a625ed6d588314ae9f5b34c2f8a (cherry picked from commit 61970f9d2b7809116b5a9d339b45d910e276b428) --- doc/build/changelog/unreleased_20/12473.rst | 7 + lib/sqlalchemy/orm/decl_api.py | 2 +- lib/sqlalchemy/util/typing.py | 67 +++-- test/base/test_typing_utils.py | 233 ++++++++++++++++-- .../test_tm_future_annotations_sync.py | 104 +++++++- test/orm/declarative/test_typed_mapping.py | 104 +++++++- 6 files changed, 465 insertions(+), 52 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12473.rst diff --git a/doc/build/changelog/unreleased_20/12473.rst b/doc/build/changelog/unreleased_20/12473.rst new file mode 100644 index 00000000000..5127d92dd2a --- /dev/null +++ b/doc/build/changelog/unreleased_20/12473.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, typing + :tickets: 12473 + + Fixed regression caused by ``typing_extension==4.13.0`` that introduced + a different implementation for ``TypeAliasType`` while SQLAlchemy assumed + that it would be equivalent to the ``typing`` version. diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index c32851deab2..60468237ee0 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -1244,7 +1244,7 @@ def _resolve_type( search = ( (python_type, python_type_type), - *((lt, python_type_type) for lt in LITERAL_TYPES), # type: ignore[arg-type] # noqa: E501 + *((lt, python_type_type) for lt in LITERAL_TYPES), ) else: python_type_type = python_type.__origin__ diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index c0f04f385b8..e68b42b7ed0 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -33,6 +33,8 @@ from typing import TypeVar from typing import Union +import typing_extensions + from . import compat if True: # zimports removes the tailing comments @@ -65,14 +67,6 @@ _VT = TypeVar("_VT") _VT_co = TypeVar("_VT_co", covariant=True) -if compat.py38: - # typing_extensions.Literal is different from typing.Literal until - # Python 3.10.1 - LITERAL_TYPES = frozenset([typing.Literal, Literal]) -else: - LITERAL_TYPES = frozenset([Literal]) - - if compat.py310: # why they took until py310 to put this in stdlib is beyond me, # I've been wanting it since py27 @@ -331,7 +325,7 @@ def resolve_name_to_real_class_name(name: str, module_name: str) -> str: def is_pep593(type_: Optional[Any]) -> bool: - return type_ is not None and get_origin(type_) is Annotated + return type_ is not None and get_origin(type_) in _type_tuples.Annotated def is_non_string_iterable(obj: Any) -> TypeGuard[Iterable[Any]]: @@ -341,7 +335,7 @@ def is_non_string_iterable(obj: Any) -> TypeGuard[Iterable[Any]]: def is_literal(type_: Any) -> bool: - return get_origin(type_) in LITERAL_TYPES + return get_origin(type_) in _type_tuples.Literal def is_newtype(type_: Optional[_AnnotationScanType]) -> TypeGuard[NewType]: @@ -349,7 +343,7 @@ def is_newtype(type_: Optional[_AnnotationScanType]) -> TypeGuard[NewType]: # doesn't work in 3.8, 3.7 as it passes a closure, not an # object instance - # return isinstance(type_, NewType) + # isinstance(type, type_instances.NewType) def is_generic(type_: _AnnotationScanType) -> TypeGuard[GenericProtocol[Any]]: @@ -357,7 +351,13 @@ def is_generic(type_: _AnnotationScanType) -> TypeGuard[GenericProtocol[Any]]: def is_pep695(type_: _AnnotationScanType) -> TypeGuard[TypeAliasType]: - return isinstance(type_, TypeAliasType) + # NOTE: a generic TAT does not instance check as TypeAliasType outside of + # python 3.10. For sqlalchemy use cases it's fine to consider it a TAT + # though. + # NOTE: things seems to work also without this additional check + if is_generic(type_): + return is_pep695(type_.__origin__) + return isinstance(type_, _type_instances.TypeAliasType) def flatten_newtype(type_: NewType) -> Type[Any]: @@ -376,15 +376,15 @@ def pep695_values(type_: _AnnotationScanType) -> Set[Any]: """ _seen = set() - def recursive_value(type_): - if type_ in _seen: + def recursive_value(inner_type): + if inner_type in _seen: # recursion are not supported (at least it's flagged as # an error by pyright). Just avoid infinite loop - return type_ - _seen.add(type_) - if not is_pep695(type_): - return type_ - value = type_.__value__ + return inner_type + _seen.add(inner_type) + if not is_pep695(inner_type): + return inner_type + value = inner_type.__value__ if not is_union(value): return value return [recursive_value(t) for t in value.__args__] @@ -411,7 +411,7 @@ def is_fwd_ref( ) -> TypeGuard[ForwardRef]: if check_for_plain_string and isinstance(type_, str): return True - elif isinstance(type_, ForwardRef): + elif isinstance(type_, _type_instances.ForwardRef): return True elif check_generic and is_generic(type_): return any( @@ -703,3 +703,30 @@ def __get__(self, instance: object, owner: Any) -> _FN: ... def __set__(self, instance: Any, value: _FN) -> None: ... def __delete__(self, instance: Any) -> None: ... + + +class _TypingInstances: + def __getattr__(self, key: str) -> tuple[type, ...]: + types = tuple( + { + t + for t in [ + getattr(typing, key, None), + getattr(typing_extensions, key, None), + ] + if t is not None + } + ) + if not types: + raise AttributeError(key) + self.__dict__[key] = types + return types + + +_type_tuples = _TypingInstances() +if TYPE_CHECKING: + _type_instances = typing_extensions +else: + _type_instances = _type_tuples + +LITERAL_TYPES = _type_tuples.Literal diff --git a/test/base/test_typing_utils.py b/test/base/test_typing_utils.py index 67e7bf41432..f6afed47eed 100644 --- a/test/base/test_typing_utils.py +++ b/test/base/test_typing_utils.py @@ -39,63 +39,144 @@ def null_union_types(): return res +def generic_unions(): + # remove new-style unions `int | str` that are not generic + res = union_types() + null_union_types() + if py310: + new_ut = type(int | str) + res = [t for t in res if not isinstance(t, new_ut)] + return res + + def make_fw_ref(anno: str) -> typing.ForwardRef: return typing.Union[anno] -TA_int = typing_extensions.TypeAliasType("TA_int", int) -TA_union = typing_extensions.TypeAliasType("TA_union", typing.Union[int, str]) -TA_null_union = typing_extensions.TypeAliasType( - "TA_null_union", typing.Union[int, str, None] +TypeAliasType = getattr( + typing, "TypeAliasType", typing_extensions.TypeAliasType ) -TA_null_union2 = typing_extensions.TypeAliasType( + +TA_int = TypeAliasType("TA_int", int) +TAext_int = typing_extensions.TypeAliasType("TAext_int", int) +TA_union = TypeAliasType("TA_union", typing.Union[int, str]) +TAext_union = typing_extensions.TypeAliasType( + "TAext_union", typing.Union[int, str] +) +TA_null_union = TypeAliasType("TA_null_union", typing.Union[int, str, None]) +TAext_null_union = typing_extensions.TypeAliasType( + "TAext_null_union", typing.Union[int, str, None] +) +TA_null_union2 = TypeAliasType( "TA_null_union2", typing.Union[int, str, "None"] ) -TA_null_union3 = typing_extensions.TypeAliasType( +TAext_null_union2 = typing_extensions.TypeAliasType( + "TAext_null_union2", typing.Union[int, str, "None"] +) +TA_null_union3 = TypeAliasType( "TA_null_union3", typing.Union[int, "typing.Union[None, bool]"] ) -TA_null_union4 = typing_extensions.TypeAliasType( +TAext_null_union3 = typing_extensions.TypeAliasType( + "TAext_null_union3", typing.Union[int, "typing.Union[None, bool]"] +) +TA_null_union4 = TypeAliasType( "TA_null_union4", typing.Union[int, "TA_null_union2"] ) -TA_union_ta = typing_extensions.TypeAliasType( - "TA_union_ta", typing.Union[TA_int, str] +TAext_null_union4 = typing_extensions.TypeAliasType( + "TAext_null_union4", typing.Union[int, "TAext_null_union2"] +) +TA_union_ta = TypeAliasType("TA_union_ta", typing.Union[TA_int, str]) +TAext_union_ta = typing_extensions.TypeAliasType( + "TAext_union_ta", typing.Union[TAext_int, str] ) -TA_null_union_ta = typing_extensions.TypeAliasType( +TA_null_union_ta = TypeAliasType( "TA_null_union_ta", typing.Union[TA_null_union, float] ) -TA_list = typing_extensions.TypeAliasType( +TAext_null_union_ta = typing_extensions.TypeAliasType( + "TAext_null_union_ta", typing.Union[TAext_null_union, float] +) +TA_list = TypeAliasType( "TA_list", typing.Union[int, str, typing.List["TA_list"]] ) +TAext_list = typing_extensions.TypeAliasType( + "TAext_list", typing.Union[int, str, typing.List["TAext_list"]] +) # these below not valid. Verify that it does not cause exceptions in any case -TA_recursive = typing_extensions.TypeAliasType( - "TA_recursive", typing.Union["TA_recursive", str] +TA_recursive = TypeAliasType("TA_recursive", typing.Union["TA_recursive", str]) +TAext_recursive = typing_extensions.TypeAliasType( + "TAext_recursive", typing.Union["TAext_recursive", str] ) -TA_null_recursive = typing_extensions.TypeAliasType( +TA_null_recursive = TypeAliasType( "TA_null_recursive", typing.Union[TA_recursive, None] ) -TA_recursive_a = typing_extensions.TypeAliasType( +TAext_null_recursive = typing_extensions.TypeAliasType( + "TAext_null_recursive", typing.Union[TAext_recursive, None] +) +TA_recursive_a = TypeAliasType( "TA_recursive_a", typing.Union["TA_recursive_b", int] ) -TA_recursive_b = typing_extensions.TypeAliasType( +TAext_recursive_a = typing_extensions.TypeAliasType( + "TAext_recursive_a", typing.Union["TAext_recursive_b", int] +) +TA_recursive_b = TypeAliasType( "TA_recursive_b", typing.Union["TA_recursive_a", str] ) +TAext_recursive_b = typing_extensions.TypeAliasType( + "TAext_recursive_b", typing.Union["TAext_recursive_a", str] +) +TA_generic = TypeAliasType("TA_generic", typing.List[TV], type_params=(TV,)) +TAext_generic = typing_extensions.TypeAliasType( + "TAext_generic", typing.List[TV], type_params=(TV,) +) +TA_generic_typed = TA_generic[int] +TAext_generic_typed = TAext_generic[int] +TA_generic_null = TypeAliasType( + "TA_generic_null", typing.Union[typing.List[TV], None], type_params=(TV,) +) +TAext_generic_null = typing_extensions.TypeAliasType( + "TAext_generic_null", + typing.Union[typing.List[TV], None], + type_params=(TV,), +) +TA_generic_null_typed = TA_generic_null[str] +TAext_generic_null_typed = TAext_generic_null[str] def type_aliases(): return [ TA_int, + TAext_int, TA_union, + TAext_union, TA_null_union, + TAext_null_union, TA_null_union2, + TAext_null_union2, TA_null_union3, + TAext_null_union3, TA_null_union4, + TAext_null_union4, TA_union_ta, + TAext_union_ta, TA_null_union_ta, + TAext_null_union_ta, TA_list, + TAext_list, TA_recursive, + TAext_recursive, TA_null_recursive, + TAext_null_recursive, TA_recursive_a, + TAext_recursive_a, TA_recursive_b, + TAext_recursive_b, + TA_generic, + TAext_generic, + TA_generic_typed, + TAext_generic_typed, + TA_generic_null, + TAext_generic_null, + TA_generic_null_typed, + TAext_generic_null_typed, ] @@ -144,11 +225,14 @@ def exec_code(code: str, *vars: str) -> typing.Any: class TestTestingThings(fixtures.TestBase): def test_unions_are_the_same(self): + # the point of this test is to reduce the cases to test since + # some symbols are the same in typing and typing_extensions. + # If a test starts failing then additional cases should be added, + # similar to what it's done for TypeAliasType + # no need to test typing_extensions.Union, typing_extensions.Optional is_(typing.Union, typing_extensions.Union) is_(typing.Optional, typing_extensions.Optional) - if py312: - is_(typing.TypeAliasType, typing_extensions.TypeAliasType) def test_make_union(self): v = int, str @@ -223,8 +307,19 @@ class W(typing.Generic[TV]): eq_(sa_typing.is_generic(t), False) eq_(sa_typing.is_generic(t[int]), True) + generics = [ + TA_generic_typed, + TAext_generic_typed, + TA_generic_null_typed, + TAext_generic_null_typed, + *annotated_l(), + *generic_unions(), + ] + for t in all_types(): - eq_(sa_typing.is_literal(t), False) + # use is since union compare equal between new/old style + exp = any(t is k for k in generics) + eq_(sa_typing.is_generic(t), exp, t) def test_is_pep695(self): eq_(sa_typing.is_pep695(str), False) @@ -235,6 +330,7 @@ def test_is_pep695(self): for t in type_aliases(): eq_(sa_typing.is_pep695(t), True) + @requires.python38 def test_pep695_value(self): eq_(sa_typing.pep695_values(int), {int}) eq_( @@ -251,41 +347,100 @@ def test_pep695_value(self): sa_typing.pep695_values(typing.Union[int, TA_int]), {typing.Union[int, TA_int]}, ) + eq_( + sa_typing.pep695_values(typing.Union[int, TAext_int]), + {typing.Union[int, TAext_int]}, + ) eq_(sa_typing.pep695_values(TA_int), {int}) + eq_(sa_typing.pep695_values(TAext_int), {int}) eq_(sa_typing.pep695_values(TA_union), {int, str}) + eq_(sa_typing.pep695_values(TAext_union), {int, str}) eq_(sa_typing.pep695_values(TA_null_union), {int, str, None}) + eq_(sa_typing.pep695_values(TAext_null_union), {int, str, None}) eq_(sa_typing.pep695_values(TA_null_union2), {int, str, None}) + eq_(sa_typing.pep695_values(TAext_null_union2), {int, str, None}) eq_( sa_typing.pep695_values(TA_null_union3), {int, typing.ForwardRef("typing.Union[None, bool]")}, ) + eq_( + sa_typing.pep695_values(TAext_null_union3), + {int, typing.ForwardRef("typing.Union[None, bool]")}, + ) eq_( sa_typing.pep695_values(TA_null_union4), {int, typing.ForwardRef("TA_null_union2")}, ) + eq_( + sa_typing.pep695_values(TAext_null_union4), + {int, typing.ForwardRef("TAext_null_union2")}, + ) eq_(sa_typing.pep695_values(TA_union_ta), {int, str}) + eq_(sa_typing.pep695_values(TAext_union_ta), {int, str}) eq_(sa_typing.pep695_values(TA_null_union_ta), {int, str, None, float}) + eq_( + sa_typing.pep695_values(TAext_null_union_ta), + {int, str, None, float}, + ) eq_( sa_typing.pep695_values(TA_list), {int, str, typing.List[typing.ForwardRef("TA_list")]}, ) + eq_( + sa_typing.pep695_values(TAext_list), + {int, str, typing.List[typing.ForwardRef("TAext_list")]}, + ) eq_( sa_typing.pep695_values(TA_recursive), {typing.ForwardRef("TA_recursive"), str}, ) + eq_( + sa_typing.pep695_values(TAext_recursive), + {typing.ForwardRef("TAext_recursive"), str}, + ) eq_( sa_typing.pep695_values(TA_null_recursive), {typing.ForwardRef("TA_recursive"), str, None}, ) + eq_( + sa_typing.pep695_values(TAext_null_recursive), + {typing.ForwardRef("TAext_recursive"), str, None}, + ) eq_( sa_typing.pep695_values(TA_recursive_a), {typing.ForwardRef("TA_recursive_b"), int}, ) + eq_( + sa_typing.pep695_values(TAext_recursive_a), + {typing.ForwardRef("TAext_recursive_b"), int}, + ) eq_( sa_typing.pep695_values(TA_recursive_b), {typing.ForwardRef("TA_recursive_a"), str}, ) + eq_( + sa_typing.pep695_values(TAext_recursive_b), + {typing.ForwardRef("TAext_recursive_a"), str}, + ) + # generics + eq_(sa_typing.pep695_values(TA_generic), {typing.List[TV]}) + eq_(sa_typing.pep695_values(TAext_generic), {typing.List[TV]}) + eq_(sa_typing.pep695_values(TA_generic_typed), {typing.List[TV]}) + eq_(sa_typing.pep695_values(TAext_generic_typed), {typing.List[TV]}) + eq_(sa_typing.pep695_values(TA_generic_null), {None, typing.List[TV]}) + eq_( + sa_typing.pep695_values(TAext_generic_null), + {None, typing.List[TV]}, + ) + eq_( + sa_typing.pep695_values(TA_generic_null_typed), + {None, typing.List[TV]}, + ) + eq_( + sa_typing.pep695_values(TAext_generic_null_typed), + {None, typing.List[TV]}, + ) def test_is_fwd_ref(self): eq_(sa_typing.is_fwd_ref(int), False) @@ -348,7 +503,12 @@ def test_make_union_type(self): sa_typing.make_union_type(bool, TA_int, NT_str), typing.Union[bool, TA_int, NT_str], ) + eq_( + sa_typing.make_union_type(bool, TAext_int, NT_str), + typing.Union[bool, TAext_int, NT_str], + ) + @requires.python38 def test_includes_none(self): eq_(sa_typing.includes_none(None), True) eq_(sa_typing.includes_none(type(None)), True) @@ -361,11 +521,12 @@ def test_includes_none(self): eq_(sa_typing.includes_none(t), True, str(t)) # TODO: these are false negatives - false_negative = { + false_negatives = { TA_null_union4, # does not evaluate FW ref + TAext_null_union4, # does not evaluate FW ref } for t in type_aliases() + new_types(): - if t in false_negative: + if t in false_negatives: exp = False else: exp = "null" in t.__name__ @@ -380,6 +541,9 @@ def test_includes_none(self): # nested things eq_(sa_typing.includes_none(typing.Union[int, "None"]), True) eq_(sa_typing.includes_none(typing.Union[bool, TA_null_union]), True) + eq_( + sa_typing.includes_none(typing.Union[bool, TAext_null_union]), True + ) eq_(sa_typing.includes_none(typing.Union[bool, NT_null]), True) # nested fw eq_( @@ -399,6 +563,10 @@ def test_includes_none(self): eq_( sa_typing.includes_none(typing.Union[bool, "TA_null_union"]), False ) + eq_( + sa_typing.includes_none(typing.Union[bool, "TAext_null_union"]), + False, + ) eq_(sa_typing.includes_none(typing.Union[bool, "NT_null"]), False) def test_is_union(self): @@ -407,3 +575,26 @@ def test_is_union(self): eq_(sa_typing.is_union(t), True) for t in type_aliases() + new_types() + annotated_l(): eq_(sa_typing.is_union(t), False) + + def test_TypingInstances(self): + is_(sa_typing._type_tuples, sa_typing._type_instances) + is_( + isinstance(sa_typing._type_instances, sa_typing._TypingInstances), + True, + ) + + # cached + is_( + sa_typing._type_instances.Literal, + sa_typing._type_instances.Literal, + ) + + for k in ["Literal", "Annotated", "TypeAliasType"]: + types = set() + ti = getattr(sa_typing._type_instances, k) + for lib in [typing, typing_extensions]: + lt = getattr(lib, k, None) + if lt is not None: + types.add(lt) + is_(lt in ti, True) + eq_(len(ti), len(types), k) diff --git a/test/orm/declarative/test_tm_future_annotations_sync.py b/test/orm/declarative/test_tm_future_annotations_sync.py index f41f9e2d985..5b17e3e6e54 100644 --- a/test/orm/declarative/test_tm_future_annotations_sync.py +++ b/test/orm/declarative/test_tm_future_annotations_sync.py @@ -105,6 +105,8 @@ from sqlalchemy.util import compat from sqlalchemy.util.typing import Annotated +TV = typing.TypeVar("TV") + class _SomeDict1(TypedDict): type: Literal["1"] @@ -137,7 +139,16 @@ class _SomeDict2(TypedDict): ) _JsonPep695 = TypeAliasType("_JsonPep695", _JsonPep604) +TypingTypeAliasType = getattr(typing, "TypeAliasType", TypeAliasType) + _StrPep695 = TypeAliasType("_StrPep695", str) +_TypingStrPep695 = TypingTypeAliasType("_TypingStrPep695", str) +_GenericPep695 = TypeAliasType("_GenericPep695", List[TV], type_params=(TV,)) +_TypingGenericPep695 = TypingTypeAliasType( + "_TypingGenericPep695", List[TV], type_params=(TV,) +) +_GenericPep695Typed = _GenericPep695[int] +_TypingGenericPep695Typed = _TypingGenericPep695[int] _UnionPep695 = TypeAliasType("_UnionPep695", Union[_SomeDict1, _SomeDict2]) strtypalias_keyword = TypeAliasType( "strtypalias_keyword", Annotated[str, mapped_column(info={"hi": "there"})] @@ -152,6 +163,9 @@ class _SomeDict2(TypedDict): _Literal695 = TypeAliasType( "_Literal695", Literal["to-do", "in-progress", "done"] ) +_TypingLiteral695 = TypingTypeAliasType( + "_TypingLiteral695", Literal["to-do", "in-progress", "done"] +) _RecursiveLiteral695 = TypeAliasType("_RecursiveLiteral695", _Literal695) @@ -1107,20 +1121,52 @@ class Test(decl_base): nullable = "null" in option.name or "optional" in option.name eq_(col.nullable, nullable) + @testing.variation( + "type_", + [ + "str_extension", + "str_typing", + "generic_extension", + "generic_typing", + "generic_typed_extension", + "generic_typed_typing", + ], + ) @testing.requires.python312 def test_pep695_typealias_as_typemap_keys( - self, decl_base: Type[DeclarativeBase] + self, decl_base: Type[DeclarativeBase], type_ ): """test #10807""" decl_base.registry.update_type_annotation_map( - {_UnionPep695: JSON, _StrPep695: String(30)} + { + _UnionPep695: JSON, + _StrPep695: String(30), + _TypingStrPep695: String(30), + _GenericPep695: String(30), + _TypingGenericPep695: String(30), + _GenericPep695Typed: String(30), + _TypingGenericPep695Typed: String(30), + } ) class Test(decl_base): __tablename__ = "test" id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[_StrPep695] + if type_.str_extension: + data: Mapped[_StrPep695] + elif type_.str_typing: + data: Mapped[_TypingStrPep695] + elif type_.generic_extension: + data: Mapped[_GenericPep695] + elif type_.generic_typing: + data: Mapped[_TypingGenericPep695] + elif type_.generic_typed_extension: + data: Mapped[_GenericPep695Typed] + elif type_.generic_typed_typing: + data: Mapped[_TypingGenericPep695Typed] + else: + type_.fail() structure: Mapped[_UnionPep695] eq_(Test.__table__.c.data.type._type_affinity, String) @@ -1177,7 +1223,20 @@ class MyClass(decl_base): else: eq_(MyClass.data_one.type.length, None) - @testing.variation("type_", ["literal", "recursive", "not_literal"]) + @testing.variation( + "type_", + [ + "literal", + "literal_typing", + "recursive", + "not_literal", + "not_literal_typing", + "generic", + "generic_typing", + "generic_typed", + "generic_typed_typing", + ], + ) @testing.combinations(True, False, argnames="in_map") @testing.requires.python312 def test_pep695_literal_defaults_to_enum(self, decl_base, type_, in_map): @@ -1192,8 +1251,20 @@ class Foo(decl_base): status: Mapped[_RecursiveLiteral695] # noqa: F821 elif type_.literal: status: Mapped[_Literal695] # noqa: F821 + elif type_.literal_typing: + status: Mapped[_TypingLiteral695] # noqa: F821 elif type_.not_literal: status: Mapped[_StrPep695] # noqa: F821 + elif type_.not_literal_typing: + status: Mapped[_TypingStrPep695] # noqa: F821 + elif type_.generic: + status: Mapped[_GenericPep695] # noqa: F821 + elif type_.generic_typing: + status: Mapped[_TypingGenericPep695] # noqa: F821 + elif type_.generic_typed: + status: Mapped[_GenericPep695Typed] # noqa: F821 + elif type_.generic_typed_typing: + status: Mapped[_TypingGenericPep695Typed] # noqa: F821 else: type_.fail() @@ -1203,8 +1274,14 @@ class Foo(decl_base): decl_base.registry.update_type_annotation_map( { _Literal695: Enum(enum.Enum), # noqa: F821 + _TypingLiteral695: Enum(enum.Enum), # noqa: F821 _RecursiveLiteral695: Enum(enum.Enum), # noqa: F821 _StrPep695: Enum(enum.Enum), # noqa: F821 + _TypingStrPep695: Enum(enum.Enum), # noqa: F821 + _GenericPep695: Enum(enum.Enum), # noqa: F821 + _TypingGenericPep695: Enum(enum.Enum), # noqa: F821 + _GenericPep695Typed: Enum(enum.Enum), # noqa: F821 + _TypingGenericPep695Typed: Enum(enum.Enum), # noqa: F821 } ) if type_.recursive: @@ -1215,7 +1292,7 @@ class Foo(decl_base): "recursing TypeAliasType", ): Foo = declare() - elif type_.literal: + elif type_.literal or type_.literal_typing: Foo = declare() else: with expect_raises_message( @@ -1227,6 +1304,23 @@ class Foo(decl_base): ): declare() return + elif ( + type_.generic + or type_.generic_typing + or type_.generic_typed + or type_.generic_typed_typing + ): + # This behaves like 2.1 -> rationale is that no-one asked to + # support such types and in 2.1 will already be like this + # so it makes little sense to add support this late in the 2.0 + # series + with expect_raises_message( + exc.ArgumentError, + "Could not locate SQLAlchemy Core type for Python type " + ".+ inside the 'status' attribute Mapped annotation", + ): + declare() + return else: with expect_deprecated( "Matching the provided TypeAliasType '.*' on its " diff --git a/test/orm/declarative/test_typed_mapping.py b/test/orm/declarative/test_typed_mapping.py index 0ff4bc60398..acc07ba7d4c 100644 --- a/test/orm/declarative/test_typed_mapping.py +++ b/test/orm/declarative/test_typed_mapping.py @@ -96,6 +96,8 @@ from sqlalchemy.util import compat from sqlalchemy.util.typing import Annotated +TV = typing.TypeVar("TV") + class _SomeDict1(TypedDict): type: Literal["1"] @@ -128,7 +130,16 @@ class _SomeDict2(TypedDict): ) _JsonPep695 = TypeAliasType("_JsonPep695", _JsonPep604) +TypingTypeAliasType = getattr(typing, "TypeAliasType", TypeAliasType) + _StrPep695 = TypeAliasType("_StrPep695", str) +_TypingStrPep695 = TypingTypeAliasType("_TypingStrPep695", str) +_GenericPep695 = TypeAliasType("_GenericPep695", List[TV], type_params=(TV,)) +_TypingGenericPep695 = TypingTypeAliasType( + "_TypingGenericPep695", List[TV], type_params=(TV,) +) +_GenericPep695Typed = _GenericPep695[int] +_TypingGenericPep695Typed = _TypingGenericPep695[int] _UnionPep695 = TypeAliasType("_UnionPep695", Union[_SomeDict1, _SomeDict2]) strtypalias_keyword = TypeAliasType( "strtypalias_keyword", Annotated[str, mapped_column(info={"hi": "there"})] @@ -143,6 +154,9 @@ class _SomeDict2(TypedDict): _Literal695 = TypeAliasType( "_Literal695", Literal["to-do", "in-progress", "done"] ) +_TypingLiteral695 = TypingTypeAliasType( + "_TypingLiteral695", Literal["to-do", "in-progress", "done"] +) _RecursiveLiteral695 = TypeAliasType("_RecursiveLiteral695", _Literal695) @@ -1098,20 +1112,52 @@ class Test(decl_base): nullable = "null" in option.name or "optional" in option.name eq_(col.nullable, nullable) + @testing.variation( + "type_", + [ + "str_extension", + "str_typing", + "generic_extension", + "generic_typing", + "generic_typed_extension", + "generic_typed_typing", + ], + ) @testing.requires.python312 def test_pep695_typealias_as_typemap_keys( - self, decl_base: Type[DeclarativeBase] + self, decl_base: Type[DeclarativeBase], type_ ): """test #10807""" decl_base.registry.update_type_annotation_map( - {_UnionPep695: JSON, _StrPep695: String(30)} + { + _UnionPep695: JSON, + _StrPep695: String(30), + _TypingStrPep695: String(30), + _GenericPep695: String(30), + _TypingGenericPep695: String(30), + _GenericPep695Typed: String(30), + _TypingGenericPep695Typed: String(30), + } ) class Test(decl_base): __tablename__ = "test" id: Mapped[int] = mapped_column(primary_key=True) - data: Mapped[_StrPep695] + if type_.str_extension: + data: Mapped[_StrPep695] + elif type_.str_typing: + data: Mapped[_TypingStrPep695] + elif type_.generic_extension: + data: Mapped[_GenericPep695] + elif type_.generic_typing: + data: Mapped[_TypingGenericPep695] + elif type_.generic_typed_extension: + data: Mapped[_GenericPep695Typed] + elif type_.generic_typed_typing: + data: Mapped[_TypingGenericPep695Typed] + else: + type_.fail() structure: Mapped[_UnionPep695] eq_(Test.__table__.c.data.type._type_affinity, String) @@ -1168,7 +1214,20 @@ class MyClass(decl_base): else: eq_(MyClass.data_one.type.length, None) - @testing.variation("type_", ["literal", "recursive", "not_literal"]) + @testing.variation( + "type_", + [ + "literal", + "literal_typing", + "recursive", + "not_literal", + "not_literal_typing", + "generic", + "generic_typing", + "generic_typed", + "generic_typed_typing", + ], + ) @testing.combinations(True, False, argnames="in_map") @testing.requires.python312 def test_pep695_literal_defaults_to_enum(self, decl_base, type_, in_map): @@ -1183,8 +1242,20 @@ class Foo(decl_base): status: Mapped[_RecursiveLiteral695] # noqa: F821 elif type_.literal: status: Mapped[_Literal695] # noqa: F821 + elif type_.literal_typing: + status: Mapped[_TypingLiteral695] # noqa: F821 elif type_.not_literal: status: Mapped[_StrPep695] # noqa: F821 + elif type_.not_literal_typing: + status: Mapped[_TypingStrPep695] # noqa: F821 + elif type_.generic: + status: Mapped[_GenericPep695] # noqa: F821 + elif type_.generic_typing: + status: Mapped[_TypingGenericPep695] # noqa: F821 + elif type_.generic_typed: + status: Mapped[_GenericPep695Typed] # noqa: F821 + elif type_.generic_typed_typing: + status: Mapped[_TypingGenericPep695Typed] # noqa: F821 else: type_.fail() @@ -1194,8 +1265,14 @@ class Foo(decl_base): decl_base.registry.update_type_annotation_map( { _Literal695: Enum(enum.Enum), # noqa: F821 + _TypingLiteral695: Enum(enum.Enum), # noqa: F821 _RecursiveLiteral695: Enum(enum.Enum), # noqa: F821 _StrPep695: Enum(enum.Enum), # noqa: F821 + _TypingStrPep695: Enum(enum.Enum), # noqa: F821 + _GenericPep695: Enum(enum.Enum), # noqa: F821 + _TypingGenericPep695: Enum(enum.Enum), # noqa: F821 + _GenericPep695Typed: Enum(enum.Enum), # noqa: F821 + _TypingGenericPep695Typed: Enum(enum.Enum), # noqa: F821 } ) if type_.recursive: @@ -1206,7 +1283,7 @@ class Foo(decl_base): "recursing TypeAliasType", ): Foo = declare() - elif type_.literal: + elif type_.literal or type_.literal_typing: Foo = declare() else: with expect_raises_message( @@ -1218,6 +1295,23 @@ class Foo(decl_base): ): declare() return + elif ( + type_.generic + or type_.generic_typing + or type_.generic_typed + or type_.generic_typed_typing + ): + # This behaves like 2.1 -> rationale is that no-one asked to + # support such types and in 2.1 will already be like this + # so it makes little sense to add support this late in the 2.0 + # series + with expect_raises_message( + exc.ArgumentError, + "Could not locate SQLAlchemy Core type for Python type " + ".+ inside the 'status' attribute Mapped annotation", + ): + declare() + return else: with expect_deprecated( "Matching the provided TypeAliasType '.*' on its " From 1dedb8bf9bbdbab2317f484b8c26e9ee58480370 Mon Sep 17 00:00:00 2001 From: Kaan Date: Wed, 19 Mar 2025 11:58:30 -0400 Subject: [PATCH 483/544] Implement GROUPS frame spec for window functions Implemented support for the GROUPS frame specification in window functions by adding :paramref:`_sql.over.groups` option to :func:`_sql.over` and :meth:`.FunctionElement.over`. Pull request courtesy Kaan Dikmen. Fixes: #12450 Closes: #12445 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12445 Pull-request-sha: c0808e135f15c7fef3a3abcf28465673f38eb428 Change-Id: I9ff504a9c9650485830c4a0eaf44162898a3a2ad (cherry picked from commit 0bbc515f904446d3f0beede54321b628f32fbdad) --- doc/build/changelog/unreleased_20/12450.rst | 7 +++ lib/sqlalchemy/sql/_elements_constructors.py | 18 ++++-- lib/sqlalchemy/sql/compiler.py | 8 ++- lib/sqlalchemy/sql/elements.py | 27 +++++---- lib/sqlalchemy/sql/functions.py | 2 + test/ext/test_serializer.py | 10 ++++ test/sql/test_compare.py | 1 + test/sql/test_compiler.py | 62 +++++++++++++++++++- test/sql/test_functions.py | 28 +++++++++ 9 files changed, 144 insertions(+), 19 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12450.rst diff --git a/doc/build/changelog/unreleased_20/12450.rst b/doc/build/changelog/unreleased_20/12450.rst new file mode 100644 index 00000000000..dde46985a57 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12450.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: sql, usecase + :tickets: 12450 + + Implemented support for the GROUPS frame specification in window functions + by adding :paramref:`_sql.over.groups` option to :func:`_sql.over` + and :meth:`.FunctionElement.over`. Pull request courtesy Kaan Dikmen. diff --git a/lib/sqlalchemy/sql/_elements_constructors.py b/lib/sqlalchemy/sql/_elements_constructors.py index b628fcc9b52..3359998f3d8 100644 --- a/lib/sqlalchemy/sql/_elements_constructors.py +++ b/lib/sqlalchemy/sql/_elements_constructors.py @@ -1508,6 +1508,7 @@ def over( order_by: Optional[_ByArgument] = None, range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, + groups: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, ) -> Over[_T]: r"""Produce an :class:`.Over` object against a function. @@ -1525,8 +1526,9 @@ def over( ROW_NUMBER() OVER(ORDER BY some_column) - Ranges are also possible using the :paramref:`.expression.over.range_` - and :paramref:`.expression.over.rows` parameters. These + Ranges are also possible using the :paramref:`.expression.over.range_`, + :paramref:`.expression.over.rows`, and :paramref:`.expression.over.groups` + parameters. These mutually-exclusive parameters each accept a 2-tuple, which contains a combination of integers and None:: @@ -1559,6 +1561,10 @@ def over( func.row_number().over(order_by="x", range_=(1, 3)) + * GROUPS BETWEEN 1 FOLLOWING AND 3 FOLLOWING:: + + func.row_number().over(order_by="x", groups=(1, 3)) + :param element: a :class:`.FunctionElement`, :class:`.WithinGroup`, or other compatible construct. :param partition_by: a column element or string, or a list @@ -1570,10 +1576,14 @@ def over( :param range\_: optional range clause for the window. This is a tuple value which can contain integer values or ``None``, and will render a RANGE BETWEEN PRECEDING / FOLLOWING clause. - :param rows: optional rows clause for the window. This is a tuple value which can contain integer values or None, and will render a ROWS BETWEEN PRECEDING / FOLLOWING clause. + :param groups: optional groups clause for the window. This is a + tuple value which can contain integer values or ``None``, + and will render a GROUPS BETWEEN PRECEDING / FOLLOWING clause. + + .. versionadded:: 2.0.40 This function is also available from the :data:`~.expression.func` construct itself via the :meth:`.FunctionElement.over` method. @@ -1587,7 +1597,7 @@ def over( :func:`_expression.within_group` """ # noqa: E501 - return Over(element, partition_by, order_by, range_, rows) + return Over(element, partition_by, order_by, range_, rows, groups) @_document_text_coercion("text", ":func:`.text`", ":paramref:`.text.text`") diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 9130cdb2c38..098667f92b6 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -2896,14 +2896,18 @@ def _format_frame_clause(self, range_, **kw): def visit_over(self, over, **kwargs): text = over.element._compiler_dispatch(self, **kwargs) - if over.range_: + if over.range_ is not None: range_ = "RANGE BETWEEN %s" % self._format_frame_clause( over.range_, **kwargs ) - elif over.rows: + elif over.rows is not None: range_ = "ROWS BETWEEN %s" % self._format_frame_clause( over.rows, **kwargs ) + elif over.groups is not None: + range_ = "GROUPS BETWEEN %s" % self._format_frame_clause( + over.groups, **kwargs + ) else: range_ = None diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index b259f96463e..88cb2529d88 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -4194,6 +4194,7 @@ class Over(ColumnElement[_T]): ("partition_by", InternalTraversal.dp_clauseelement), ("range_", InternalTraversal.dp_plain_obj), ("rows", InternalTraversal.dp_plain_obj), + ("groups", InternalTraversal.dp_plain_obj), ] order_by: Optional[ClauseList] = None @@ -4205,6 +4206,7 @@ class Over(ColumnElement[_T]): range_: Optional[typing_Tuple[_IntOrRange, _IntOrRange]] rows: Optional[typing_Tuple[_IntOrRange, _IntOrRange]] + groups: Optional[typing_Tuple[_IntOrRange, _IntOrRange]] def __init__( self, @@ -4213,6 +4215,7 @@ def __init__( order_by: Optional[_ByArgument] = None, range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, + groups: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, ): self.element = element if order_by is not None: @@ -4225,19 +4228,14 @@ def __init__( _literal_as_text_role=roles.ByOfRole, ) - if range_: - self.range_ = self._interpret_range(range_) - if rows: - raise exc.ArgumentError( - "'range_' and 'rows' are mutually exclusive" - ) - else: - self.rows = None - elif rows: - self.rows = self._interpret_range(rows) - self.range_ = None + if sum(bool(item) for item in (range_, rows, groups)) > 1: + raise exc.ArgumentError( + "only one of 'rows', 'range_', or 'groups' may be provided" + ) else: - self.rows = self.range_ = None + self.range_ = self._interpret_range(range_) if range_ else None + self.rows = self._interpret_range(rows) if rows else None + self.groups = self._interpret_range(groups) if groups else None def __reduce__(self): return self.__class__, ( @@ -4246,6 +4244,7 @@ def __reduce__(self): self.order_by, self.range_, self.rows, + self.groups, ) def _interpret_range( @@ -4360,6 +4359,7 @@ def over( order_by: Optional[_ByArgument] = None, rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, + groups: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, ) -> Over[_T]: """Produce an OVER clause against this :class:`.WithinGroup` construct. @@ -4374,6 +4374,7 @@ def over( order_by=order_by, range_=range_, rows=rows, + groups=groups, ) @overload @@ -4491,6 +4492,7 @@ def over( ] = None, range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, + groups: Optional[typing_Tuple[Optional[int], Optional[int]]] = None, ) -> Over[_T]: """Produce an OVER clause against this filtered function. @@ -4516,6 +4518,7 @@ def over( order_by=order_by, range_=range_, rows=rows, + groups=groups, ) def within_group( diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index ea02279d480..b0f6655cb16 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -435,6 +435,7 @@ def over( order_by: Optional[_ByArgument] = None, rows: Optional[Tuple[Optional[int], Optional[int]]] = None, range_: Optional[Tuple[Optional[int], Optional[int]]] = None, + groups: Optional[Tuple[Optional[int], Optional[int]]] = None, ) -> Over[_T]: """Produce an OVER clause against this function. @@ -466,6 +467,7 @@ def over( order_by=order_by, rows=rows, range_=range_, + groups=groups, ) def within_group( diff --git a/test/ext/test_serializer.py b/test/ext/test_serializer.py index 40544f3ba03..fb92c752a67 100644 --- a/test/ext/test_serializer.py +++ b/test/ext/test_serializer.py @@ -301,6 +301,16 @@ def test_unicode(self): "max(users.name) OVER (ROWS BETWEEN CURRENT " "ROW AND UNBOUNDED FOLLOWING)", ), + ( + lambda: func.max(users.c.name).over(groups=(None, 0)), + "max(users.name) OVER (GROUPS BETWEEN UNBOUNDED " + "PRECEDING AND CURRENT ROW)", + ), + ( + lambda: func.max(users.c.name).over(groups=(0, None)), + "max(users.name) OVER (GROUPS BETWEEN CURRENT " + "ROW AND UNBOUNDED FOLLOWING)", + ), ) def test_over(self, over_fn, sql): o = over_fn() diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index 96e3e7661d2..04577704421 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -440,6 +440,7 @@ class CoreFixtures: func.row_number().over(order_by=table_a.c.a, range_=(0, 10)), func.row_number().over(order_by=table_a.c.a, range_=(None, 10)), func.row_number().over(order_by=table_a.c.a, rows=(None, 20)), + func.row_number().over(order_by=table_a.c.a, groups=(None, 20)), func.row_number().over(order_by=table_a.c.b), func.row_number().over( order_by=table_a.c.a, partition_by=table_a.c.b diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 12ba24e170d..7c43e60db3f 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -3213,6 +3213,41 @@ def test_over_framespec(self): checkparams={"param_1": 10, "param_2": 1}, ) + self.assert_compile( + select(func.row_number().over(order_by=expr, groups=(None, 0))), + "SELECT row_number() OVER " + "(ORDER BY mytable.myid GROUPS BETWEEN " + "UNBOUNDED PRECEDING AND CURRENT ROW)" + " AS anon_1 FROM mytable", + ) + + self.assert_compile( + select(func.row_number().over(order_by=expr, groups=(-5, 10))), + "SELECT row_number() OVER " + "(ORDER BY mytable.myid GROUPS BETWEEN " + ":param_1 PRECEDING AND :param_2 FOLLOWING)" + " AS anon_1 FROM mytable", + checkparams={"param_1": 5, "param_2": 10}, + ) + + self.assert_compile( + select(func.row_number().over(order_by=expr, groups=(1, 10))), + "SELECT row_number() OVER " + "(ORDER BY mytable.myid GROUPS BETWEEN " + ":param_1 FOLLOWING AND :param_2 FOLLOWING)" + " AS anon_1 FROM mytable", + checkparams={"param_1": 1, "param_2": 10}, + ) + + self.assert_compile( + select(func.row_number().over(order_by=expr, groups=(-10, -1))), + "SELECT row_number() OVER " + "(ORDER BY mytable.myid GROUPS BETWEEN " + ":param_1 PRECEDING AND :param_2 PRECEDING)" + " AS anon_1 FROM mytable", + checkparams={"param_1": 10, "param_2": 1}, + ) + def test_over_invalid_framespecs(self): assert_raises_message( exc.ArgumentError, @@ -3230,10 +3265,35 @@ def test_over_invalid_framespecs(self): assert_raises_message( exc.ArgumentError, - "'range_' and 'rows' are mutually exclusive", + "only one of 'rows', 'range_', or 'groups' may be provided", + func.row_number().over, + range_=(-5, 8), + rows=(-2, 5), + ) + + assert_raises_message( + exc.ArgumentError, + "only one of 'rows', 'range_', or 'groups' may be provided", + func.row_number().over, + range_=(-5, 8), + groups=(None, None), + ) + + assert_raises_message( + exc.ArgumentError, + "only one of 'rows', 'range_', or 'groups' may be provided", + func.row_number().over, + rows=(-2, 5), + groups=(None, None), + ) + + assert_raises_message( + exc.ArgumentError, + "only one of 'rows', 'range_', or 'groups' may be provided", func.row_number().over, range_=(-5, 8), rows=(-2, 5), + groups=(None, None), ) def test_over_within_group(self): diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index 163df0a0d71..28cdb03a965 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -844,6 +844,34 @@ def test_funcfilter_windowing_rows(self): "AS anon_1 FROM mytable", ) + def test_funcfilter_windowing_groups(self): + self.assert_compile( + select( + func.rank() + .filter(table1.c.name > "foo") + .over(groups=(1, 5), partition_by=["description"]) + ), + "SELECT rank() FILTER (WHERE mytable.name > :name_1) " + "OVER (PARTITION BY mytable.description GROUPS BETWEEN :param_1 " + "FOLLOWING AND :param_2 FOLLOWING) " + "AS anon_1 FROM mytable", + ) + + def test_funcfilter_windowing_groups_positional(self): + self.assert_compile( + select( + func.rank() + .filter(table1.c.name > "foo") + .over(groups=(1, 5), partition_by=["description"]) + ), + "SELECT rank() FILTER (WHERE mytable.name > ?) " + "OVER (PARTITION BY mytable.description GROUPS BETWEEN ? " + "FOLLOWING AND ? FOLLOWING) " + "AS anon_1 FROM mytable", + checkpositional=("foo", 1, 5), + dialect="default_qmark", + ) + def test_funcfilter_more_criteria(self): ff = func.rank().filter(table1.c.name > "foo") ff2 = ff.filter(table1.c.myid == 1) From 171dadaa6ab7d0c6bfa8d09ae947654863238042 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 26 Mar 2025 13:55:46 -0400 Subject: [PATCH 484/544] implement AsyncSessionTransaction._regenerate_proxy_for_target Fixed issue where :meth:`.AsyncSession.get_transaction` and :meth:`.AsyncSession.get_nested_transaction` would fail with ``NotImplementedError`` if the "proxy transaction" used by :class:`.AsyncSession` were garbage collected and needed regeneration. Fixes: #12471 Change-Id: Ia8055524618df706d7958786a500cdd25d9d8eaf (cherry picked from commit 0202673a34b1b0cbbda6e2cb06012f77df642085) --- doc/build/changelog/unreleased_20/12471.rst | 8 +++++ lib/sqlalchemy/ext/asyncio/base.py | 14 ++++----- lib/sqlalchemy/ext/asyncio/engine.py | 8 +++-- lib/sqlalchemy/ext/asyncio/session.py | 23 ++++++++++++-- test/ext/asyncio/test_session_py3k.py | 33 +++++++++++++++++++++ 5 files changed, 74 insertions(+), 12 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12471.rst diff --git a/doc/build/changelog/unreleased_20/12471.rst b/doc/build/changelog/unreleased_20/12471.rst new file mode 100644 index 00000000000..d3178b712a1 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12471.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, asyncio + :tickets: 12471 + + Fixed issue where :meth:`.AsyncSession.get_transaction` and + :meth:`.AsyncSession.get_nested_transaction` would fail with + ``NotImplementedError`` if the "proxy transaction" used by + :class:`.AsyncSession` were garbage collected and needed regeneration. diff --git a/lib/sqlalchemy/ext/asyncio/base.py b/lib/sqlalchemy/ext/asyncio/base.py index b53d53b1a4e..ce2c439f160 100644 --- a/lib/sqlalchemy/ext/asyncio/base.py +++ b/lib/sqlalchemy/ext/asyncio/base.py @@ -71,26 +71,26 @@ def _target_gced( cls._proxy_objects.pop(ref, None) @classmethod - def _regenerate_proxy_for_target(cls, target: _PT) -> Self: + def _regenerate_proxy_for_target( + cls, target: _PT, **additional_kw: Any + ) -> Self: raise NotImplementedError() @overload @classmethod def _retrieve_proxy_for_target( - cls, - target: _PT, - regenerate: Literal[True] = ..., + cls, target: _PT, regenerate: Literal[True] = ..., **additional_kw: Any ) -> Self: ... @overload @classmethod def _retrieve_proxy_for_target( - cls, target: _PT, regenerate: bool = True + cls, target: _PT, regenerate: bool = True, **additional_kw: Any ) -> Optional[Self]: ... @classmethod def _retrieve_proxy_for_target( - cls, target: _PT, regenerate: bool = True + cls, target: _PT, regenerate: bool = True, **additional_kw: Any ) -> Optional[Self]: try: proxy_ref = cls._proxy_objects[weakref.ref(target)] @@ -102,7 +102,7 @@ def _retrieve_proxy_for_target( return proxy # type: ignore if regenerate: - return cls._regenerate_proxy_for_target(target) + return cls._regenerate_proxy_for_target(target, **additional_kw) else: return None diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 2c9b499f534..65c019954c2 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -255,7 +255,7 @@ def __init__( @classmethod def _regenerate_proxy_for_target( - cls, target: Connection + cls, target: Connection, **additional_kw: Any # noqa: U100 ) -> AsyncConnection: return AsyncConnection( AsyncEngine._retrieve_proxy_for_target(target.engine), target @@ -1041,7 +1041,9 @@ def _proxied(self) -> Engine: return self.sync_engine @classmethod - def _regenerate_proxy_for_target(cls, target: Engine) -> AsyncEngine: + def _regenerate_proxy_for_target( + cls, target: Engine, **additional_kw: Any # noqa: U100 + ) -> AsyncEngine: return AsyncEngine(target) @contextlib.asynccontextmanager @@ -1344,7 +1346,7 @@ def __init__(self, connection: AsyncConnection, nested: bool = False): @classmethod def _regenerate_proxy_for_target( - cls, target: Transaction + cls, target: Transaction, **additional_kw: Any # noqa: U100 ) -> AsyncTransaction: sync_connection = target.connection sync_transaction = target diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index bb276943cdb..1b8c9695c7f 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -812,7 +812,9 @@ def get_transaction(self) -> Optional[AsyncSessionTransaction]: """ trans = self.sync_session.get_transaction() if trans is not None: - return AsyncSessionTransaction._retrieve_proxy_for_target(trans) + return AsyncSessionTransaction._retrieve_proxy_for_target( + trans, async_session=self + ) else: return None @@ -828,7 +830,9 @@ def get_nested_transaction(self) -> Optional[AsyncSessionTransaction]: trans = self.sync_session.get_nested_transaction() if trans is not None: - return AsyncSessionTransaction._retrieve_proxy_for_target(trans) + return AsyncSessionTransaction._retrieve_proxy_for_target( + trans, async_session=self + ) else: return None @@ -1865,6 +1869,21 @@ async def commit(self) -> None: await greenlet_spawn(self._sync_transaction().commit) + @classmethod + def _regenerate_proxy_for_target( # type: ignore[override] + cls, + target: SessionTransaction, + async_session: AsyncSession, + **additional_kw: Any, # noqa: U100 + ) -> AsyncSessionTransaction: + sync_transaction = target + nested = target.nested + obj = cls.__new__(cls) + obj.session = async_session + obj.sync_transaction = obj._assign_proxied(sync_transaction) + obj.nested = nested + return obj + async def start( self, is_ctxmanager: bool = False ) -> AsyncSessionTransaction: diff --git a/test/ext/asyncio/test_session_py3k.py b/test/ext/asyncio/test_session_py3k.py index 2d6ce09da3a..5f9bf2e089e 100644 --- a/test/ext/asyncio/test_session_py3k.py +++ b/test/ext/asyncio/test_session_py3k.py @@ -38,6 +38,7 @@ from sqlalchemy.testing import expect_raises_message from sqlalchemy.testing import fixtures from sqlalchemy.testing import is_ +from sqlalchemy.testing import is_not from sqlalchemy.testing import is_true from sqlalchemy.testing import mock from sqlalchemy.testing.assertions import expect_deprecated @@ -934,6 +935,38 @@ async def test_get_transaction(self, async_session): is_(async_session.get_transaction(), None) is_(async_session.get_nested_transaction(), None) + @async_test + async def test_get_transaction_gced(self, async_session): + """test #12471 + + this tests that the AsyncSessionTransaction is regenerated if + we don't have any reference to it beforehand. + + """ + is_(async_session.get_transaction(), None) + is_(async_session.get_nested_transaction(), None) + + await async_session.begin() + + trans = async_session.get_transaction() + is_not(trans, None) + is_(trans.session, async_session) + is_false(trans.nested) + is_( + trans.sync_transaction, + async_session.sync_session.get_transaction(), + ) + + await async_session.begin_nested() + nested = async_session.get_nested_transaction() + is_not(nested, None) + is_true(nested.nested) + is_(nested.session, async_session) + is_( + nested.sync_transaction, + async_session.sync_session.get_nested_transaction(), + ) + @async_test async def test_async_object_session(self, async_engine): User = self.classes.User From 7163b9dd3091167d9be2be0510ff313f479d6ae2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 27 Mar 2025 12:47:43 -0400 Subject: [PATCH 485/544] changelog update Change-Id: I03202183f4045030bc2940c43d637edc3524b5d4 (cherry picked from commit dd0b44b123738ba9289e120d3e3d8238d7741ea7) --- doc/build/changelog/unreleased_20/12473.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/build/changelog/unreleased_20/12473.rst b/doc/build/changelog/unreleased_20/12473.rst index 5127d92dd2a..a09a5fbfba2 100644 --- a/doc/build/changelog/unreleased_20/12473.rst +++ b/doc/build/changelog/unreleased_20/12473.rst @@ -1,7 +1,9 @@ .. change:: - :tags: bug, typing + :tags: bug, orm :tickets: 12473 - Fixed regression caused by ``typing_extension==4.13.0`` that introduced - a different implementation for ``TypeAliasType`` while SQLAlchemy assumed - that it would be equivalent to the ``typing`` version. + Fixed regression in ORM Annotated Declarative class interpretation caused + by ``typing_extension==4.13.0`` that introduced a different implementation + for ``TypeAliasType`` while SQLAlchemy assumed that it would be equivalent + to the ``typing`` version, leading to pep-695 type annotations not + resolving to SQL types as expected. From b4d7376b4c074121917ac9ea509ac60346328acc Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 27 Mar 2025 12:48:32 -0400 Subject: [PATCH 486/544] - 2.0.40 --- doc/build/changelog/changelog_20.rst | 117 +++++++++++++++++++- doc/build/changelog/unreleased_20/11595.rst | 11 -- doc/build/changelog/unreleased_20/12329.rst | 16 --- doc/build/changelog/unreleased_20/12332.rst | 10 -- doc/build/changelog/unreleased_20/12363.rst | 9 -- doc/build/changelog/unreleased_20/12425.rst | 18 --- doc/build/changelog/unreleased_20/12432.rst | 9 -- doc/build/changelog/unreleased_20/12450.rst | 7 -- doc/build/changelog/unreleased_20/12451.rst | 8 -- doc/build/changelog/unreleased_20/12471.rst | 8 -- doc/build/changelog/unreleased_20/12473.rst | 9 -- doc/build/conf.py | 4 +- 12 files changed, 118 insertions(+), 108 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/11595.rst delete mode 100644 doc/build/changelog/unreleased_20/12329.rst delete mode 100644 doc/build/changelog/unreleased_20/12332.rst delete mode 100644 doc/build/changelog/unreleased_20/12363.rst delete mode 100644 doc/build/changelog/unreleased_20/12425.rst delete mode 100644 doc/build/changelog/unreleased_20/12432.rst delete mode 100644 doc/build/changelog/unreleased_20/12450.rst delete mode 100644 doc/build/changelog/unreleased_20/12451.rst delete mode 100644 doc/build/changelog/unreleased_20/12471.rst delete mode 100644 doc/build/changelog/unreleased_20/12473.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 38ed6399c9a..86be90b42a8 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,122 @@ .. changelog:: :version: 2.0.40 - :include_notes_from: unreleased_20 + :released: March 27, 2025 + + .. change:: + :tags: usecase, postgresql + :tickets: 11595 + + Added support for specifying a list of columns for ``SET NULL`` and ``SET + DEFAULT`` actions of ``ON DELETE`` clause of foreign key definition on + PostgreSQL. Pull request courtesy Denis Laxalde. + + .. seealso:: + + :ref:`postgresql_constraint_options` + + .. change:: + :tags: bug, orm + :tickets: 12329 + + Fixed regression which occurred as of 2.0.37 where the checked + :class:`.ArgumentError` that's raised when an inappropriate type or object + is used inside of a :class:`.Mapped` annotation would raise ``TypeError`` + with "boolean value of this clause is not defined" if the object resolved + into a SQL expression in a boolean context, for programs where future + annotations mode was not enabled. This case is now handled explicitly and + a new error message has also been tailored for this case. In addition, as + there are at least half a dozen distinct error scenarios for intepretation + of the :class:`.Mapped` construct, these scenarios have all been unified + under a new subclass of :class:`.ArgumentError` called + :class:`.MappedAnnotationError`, to provide some continuity between these + different scenarios, even though specific messaging remains distinct. + + .. change:: + :tags: bug, mysql + :tickets: 12332 + + Support has been re-added for the MySQL-Connector/Python DBAPI using the + ``mysql+mysqlconnector://`` URL scheme. The DBAPI now works against + modern MySQL versions as well as MariaDB versions (in the latter case it's + required to pass charset/collation explicitly). Note however that + server side cursor support is disabled due to unresolved issues with this + driver. + + .. change:: + :tags: bug, sql + :tickets: 12363 + + Fixed issue in :class:`.CTE` constructs involving multiple DDL + :class:`_sql.Insert` statements with multiple VALUES parameter sets where the + bound parameter names generated for these parameter sets would conflict, + generating a compile time error. + + + .. change:: + :tags: bug, sqlite + :tickets: 12425 + + Expanded the rules for when to apply parenthesis to a server default in DDL + to suit the general case of a default string that contains non-word + characters such as spaces or operators and is not a string literal. + + .. change:: + :tags: bug, mysql + :tickets: 12425 + + Fixed issue in MySQL server default reflection where a default that has + spaces would not be correctly reflected. Additionally, expanded the rules + for when to apply parenthesis to a server default in DDL to suit the + general case of a default string that contains non-word characters such as + spaces or operators and is not a string literal. + + + .. change:: + :tags: usecase, postgresql + :tickets: 12432 + + When building a PostgreSQL ``ARRAY`` literal using + :class:`_postgresql.array` with an empty ``clauses`` argument, the + :paramref:`_postgresql.array.type_` parameter is now significant in that it + will be used to render the resulting ``ARRAY[]`` SQL expression with a + cast, such as ``ARRAY[]::INTEGER``. Pull request courtesy Denis Laxalde. + + .. change:: + :tags: sql, usecase + :tickets: 12450 + + Implemented support for the GROUPS frame specification in window functions + by adding :paramref:`_sql.over.groups` option to :func:`_sql.over` + and :meth:`.FunctionElement.over`. Pull request courtesy Kaan Dikmen. + + .. change:: + :tags: bug, sql + :tickets: 12451 + + Fixed regression caused by :ticket:`7471` leading to a SQL compilation + issue where name disambiguation for two same-named FROM clauses with table + aliasing in use at the same time would produce invalid SQL in the FROM + clause with two "AS" clauses for the aliased table, due to double aliasing. + + .. change:: + :tags: bug, asyncio + :tickets: 12471 + + Fixed issue where :meth:`.AsyncSession.get_transaction` and + :meth:`.AsyncSession.get_nested_transaction` would fail with + ``NotImplementedError`` if the "proxy transaction" used by + :class:`.AsyncSession` were garbage collected and needed regeneration. + + .. change:: + :tags: bug, orm + :tickets: 12473 + + Fixed regression in ORM Annotated Declarative class interpretation caused + by ``typing_extension==4.13.0`` that introduced a different implementation + for ``TypeAliasType`` while SQLAlchemy assumed that it would be equivalent + to the ``typing`` version, leading to pep-695 type annotations not + resolving to SQL types as expected. .. changelog:: :version: 2.0.39 diff --git a/doc/build/changelog/unreleased_20/11595.rst b/doc/build/changelog/unreleased_20/11595.rst deleted file mode 100644 index faefd245c04..00000000000 --- a/doc/build/changelog/unreleased_20/11595.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: usecase, postgresql - :tickets: 11595 - - Added support for specifying a list of columns for ``SET NULL`` and ``SET - DEFAULT`` actions of ``ON DELETE`` clause of foreign key definition on - PostgreSQL. Pull request courtesy Denis Laxalde. - - .. seealso:: - - :ref:`postgresql_constraint_options` diff --git a/doc/build/changelog/unreleased_20/12329.rst b/doc/build/changelog/unreleased_20/12329.rst deleted file mode 100644 index 9e4d1519a5c..00000000000 --- a/doc/build/changelog/unreleased_20/12329.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 12329 - - Fixed regression which occurred as of 2.0.37 where the checked - :class:`.ArgumentError` that's raised when an inappropriate type or object - is used inside of a :class:`.Mapped` annotation would raise ``TypeError`` - with "boolean value of this clause is not defined" if the object resolved - into a SQL expression in a boolean context, for programs where future - annotations mode was not enabled. This case is now handled explicitly and - a new error message has also been tailored for this case. In addition, as - there are at least half a dozen distinct error scenarios for intepretation - of the :class:`.Mapped` construct, these scenarios have all been unified - under a new subclass of :class:`.ArgumentError` called - :class:`.MappedAnnotationError`, to provide some continuity between these - different scenarios, even though specific messaging remains distinct. diff --git a/doc/build/changelog/unreleased_20/12332.rst b/doc/build/changelog/unreleased_20/12332.rst deleted file mode 100644 index a6c1d4e2fb1..00000000000 --- a/doc/build/changelog/unreleased_20/12332.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 12332 - - Support has been re-added for the MySQL-Connector/Python DBAPI using the - ``mysql+mysqlconnector://`` URL scheme. The DBAPI now works against - modern MySQL versions as well as MariaDB versions (in the latter case it's - required to pass charset/collation explicitly). Note however that - server side cursor support is disabled due to unresolved issues with this - driver. diff --git a/doc/build/changelog/unreleased_20/12363.rst b/doc/build/changelog/unreleased_20/12363.rst deleted file mode 100644 index 35aa9dbdf0d..00000000000 --- a/doc/build/changelog/unreleased_20/12363.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 12363 - - Fixed issue in :class:`.CTE` constructs involving multiple DDL - :class:`_sql.Insert` statements with multiple VALUES parameter sets where the - bound parameter names generated for these parameter sets would conflict, - generating a compile time error. - diff --git a/doc/build/changelog/unreleased_20/12425.rst b/doc/build/changelog/unreleased_20/12425.rst deleted file mode 100644 index fbc1f8a4ef2..00000000000 --- a/doc/build/changelog/unreleased_20/12425.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. change:: - :tags: bug, sqlite - :tickets: 12425 - - Expanded the rules for when to apply parenthesis to a server default in DDL - to suit the general case of a default string that contains non-word - characters such as spaces or operators and is not a string literal. - -.. change:: - :tags: bug, mysql - :tickets: 12425 - - Fixed issue in MySQL server default reflection where a default that has - spaces would not be correctly reflected. Additionally, expanded the rules - for when to apply parenthesis to a server default in DDL to suit the - general case of a default string that contains non-word characters such as - spaces or operators and is not a string literal. - diff --git a/doc/build/changelog/unreleased_20/12432.rst b/doc/build/changelog/unreleased_20/12432.rst deleted file mode 100644 index ff781fbd803..00000000000 --- a/doc/build/changelog/unreleased_20/12432.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: usecase, postgresql - :tickets: 12432 - - When building a PostgreSQL ``ARRAY`` literal using - :class:`_postgresql.array` with an empty ``clauses`` argument, the - :paramref:`_postgresql.array.type_` parameter is now significant in that it - will be used to render the resulting ``ARRAY[]`` SQL expression with a - cast, such as ``ARRAY[]::INTEGER``. Pull request courtesy Denis Laxalde. diff --git a/doc/build/changelog/unreleased_20/12450.rst b/doc/build/changelog/unreleased_20/12450.rst deleted file mode 100644 index dde46985a57..00000000000 --- a/doc/build/changelog/unreleased_20/12450.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: sql, usecase - :tickets: 12450 - - Implemented support for the GROUPS frame specification in window functions - by adding :paramref:`_sql.over.groups` option to :func:`_sql.over` - and :meth:`.FunctionElement.over`. Pull request courtesy Kaan Dikmen. diff --git a/doc/build/changelog/unreleased_20/12451.rst b/doc/build/changelog/unreleased_20/12451.rst deleted file mode 100644 index 71b6983ad32..00000000000 --- a/doc/build/changelog/unreleased_20/12451.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, sql - :tickets: 12451 - - Fixed regression caused by :ticket:`7471` leading to a SQL compilation - issue where name disambiguation for two same-named FROM clauses with table - aliasing in use at the same time would produce invalid SQL in the FROM - clause with two "AS" clauses for the aliased table, due to double aliasing. diff --git a/doc/build/changelog/unreleased_20/12471.rst b/doc/build/changelog/unreleased_20/12471.rst deleted file mode 100644 index d3178b712a1..00000000000 --- a/doc/build/changelog/unreleased_20/12471.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, asyncio - :tickets: 12471 - - Fixed issue where :meth:`.AsyncSession.get_transaction` and - :meth:`.AsyncSession.get_nested_transaction` would fail with - ``NotImplementedError`` if the "proxy transaction" used by - :class:`.AsyncSession` were garbage collected and needed regeneration. diff --git a/doc/build/changelog/unreleased_20/12473.rst b/doc/build/changelog/unreleased_20/12473.rst deleted file mode 100644 index a09a5fbfba2..00000000000 --- a/doc/build/changelog/unreleased_20/12473.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, orm - :tickets: 12473 - - Fixed regression in ORM Annotated Declarative class interpretation caused - by ``typing_extension==4.13.0`` that introduced a different implementation - for ``TypeAliasType`` while SQLAlchemy assumed that it would be equivalent - to the ``typing`` version, leading to pep-695 type annotations not - resolving to SQL types as expected. diff --git a/doc/build/conf.py b/doc/build/conf.py index 999ea7672f9..846f1bd3304 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.39" +release = "2.0.40" -release_date = "March 11, 2025" +release_date = "March 27, 2025" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 2ca3b424fbbdf4891d6173ce07073aed09bab481 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 27 Mar 2025 13:52:56 -0400 Subject: [PATCH 487/544] Version 2.0.41 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 86be90b42a8..b87bce8e239 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.41 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.40 :released: March 27, 2025 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index f219838dcd4..71b701c920c 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.40" +__version__ = "2.0.41" def __go(lcls: Any) -> None: From 654f71bbf718981906b94c04d2a9ea7de71c624f Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Tue, 25 Mar 2025 04:51:30 -0400 Subject: [PATCH 488/544] Add type annotations to postgresql.pg_catalog Related to #6810. Closes: #12462 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12462 Pull-request-sha: 5a131cc9a94a2c9efa0e888fe504ebc03d84c7f0 Change-Id: Ie4494d61f815edefef6a896499db4292fd94a22a (cherry picked from commit 864f79d7c421cfa01b6e01eb95b76ffe77ff44d1) --- .../dialects/postgresql/pg_catalog.py | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py index 78f390a2118..4841056cf9d 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py +++ b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py @@ -4,7 +4,13 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors + +from __future__ import annotations + +from typing import Any +from typing import Optional +from typing import Sequence +from typing import TYPE_CHECKING from .array import ARRAY from .types import OID @@ -23,31 +29,37 @@ from ...types import Text from ...types import TypeDecorator +if TYPE_CHECKING: + from ...engine.interfaces import Dialect + from ...sql.type_api import _ResultProcessorType + # types -class NAME(TypeDecorator): +class NAME(TypeDecorator[str]): impl = String(64, collation="C") cache_ok = True -class PG_NODE_TREE(TypeDecorator): +class PG_NODE_TREE(TypeDecorator[str]): impl = Text(collation="C") cache_ok = True -class INT2VECTOR(TypeDecorator): +class INT2VECTOR(TypeDecorator[Sequence[int]]): impl = ARRAY(SmallInteger) cache_ok = True -class OIDVECTOR(TypeDecorator): +class OIDVECTOR(TypeDecorator[Sequence[int]]): impl = ARRAY(OID) cache_ok = True class _SpaceVector: - def result_processor(self, dialect, coltype): - def process(value): + def result_processor( + self, dialect: Dialect, coltype: object + ) -> _ResultProcessorType[list[int]]: + def process(value: Any) -> Optional[list[int]]: if value is None: return value return [int(p) for p in value.split(" ")] From a9411d95afe80f45b06080b74de56120c57e4dea Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 1 Apr 2025 23:49:36 +0200 Subject: [PATCH 489/544] minor cleanup of postgresql index reflection query Change-Id: I669ea8e99c6b69cb70263b0cacd80d3ed0fab39c (cherry picked from commit 08619693794ebcd6671448658ce4c8bce7763ff0) --- lib/sqlalchemy/dialects/postgresql/base.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index ae97cc0caa5..98265c5d6a0 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -4414,7 +4414,10 @@ def get_indexes(self, connection, table_name, schema=None, **kw): @util.memoized_property def _index_query(self): - pg_class_index = pg_catalog.pg_class.alias("cls_idx") + # NOTE: pg_index is used as from two times to improve performance, + # since extraing all the index information from `idx_sq` to avoid + # the second pg_index use leads to a worse performing query in + # particular when querying for a single table (as of pg 17) # NOTE: repeating oids clause improve query performance # subquery to get the columns @@ -4496,13 +4499,13 @@ def _index_query(self): return ( select( pg_catalog.pg_index.c.indrelid, - pg_class_index.c.relname.label("relname_index"), + pg_catalog.pg_class.c.relname, pg_catalog.pg_index.c.indisunique, pg_catalog.pg_constraint.c.conrelid.is_not(None).label( "has_constraint" ), pg_catalog.pg_index.c.indoption, - pg_class_index.c.reloptions, + pg_catalog.pg_class.c.reloptions, pg_catalog.pg_am.c.amname, # NOTE: pg_get_expr is very fast so this case has almost no # performance impact @@ -4527,12 +4530,12 @@ def _index_query(self): ~pg_catalog.pg_index.c.indisprimary, ) .join( - pg_class_index, - pg_catalog.pg_index.c.indexrelid == pg_class_index.c.oid, + pg_catalog.pg_class, + pg_catalog.pg_index.c.indexrelid == pg_catalog.pg_class.c.oid, ) .join( pg_catalog.pg_am, - pg_class_index.c.relam == pg_catalog.pg_am.c.oid, + pg_catalog.pg_class.c.relam == pg_catalog.pg_am.c.oid, ) .outerjoin( cols_sq, @@ -4549,7 +4552,9 @@ def _index_query(self): == sql.any_(_array.array(("p", "u", "x"))), ), ) - .order_by(pg_catalog.pg_index.c.indrelid, pg_class_index.c.relname) + .order_by( + pg_catalog.pg_index.c.indrelid, pg_catalog.pg_class.c.relname + ) ) def get_multi_indexes( @@ -4584,7 +4589,7 @@ def get_multi_indexes( continue for row in result_by_oid[oid]: - index_name = row["relname_index"] + index_name = row["relname"] table_indexes = indexes[(schema, table_name)] From fd506ea168a22853c4fdc1b6325e2a8c6b769a8d Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Tue, 1 Apr 2025 13:30:48 -0400 Subject: [PATCH 490/544] Support postgresql_include in UniqueConstraint and PrimaryKeyConstraint This is supported both for schema definition and reflection. Fixes #10665. Closes: #12485 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12485 Pull-request-sha: 1aabea7b55ece9fc0c6e069b777d4404ac01f964 Change-Id: I81d23966f84390dd1b03f0d13284ce6d883ee24e (cherry picked from commit 3b7725dd1243134341cf1bfb331ed4501fc882e8) --- doc/build/changelog/unreleased_20/10665.rst | 11 + lib/sqlalchemy/dialects/postgresql/base.py | 217 ++++++++++++------ lib/sqlalchemy/engine/reflection.py | 5 +- .../testing/suite/test_reflection.py | 2 + test/dialect/postgresql/test_compiler.py | 35 +++ test/dialect/postgresql/test_reflection.py | 46 ++++ 6 files changed, 251 insertions(+), 65 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/10665.rst diff --git a/doc/build/changelog/unreleased_20/10665.rst b/doc/build/changelog/unreleased_20/10665.rst new file mode 100644 index 00000000000..967dda14b1d --- /dev/null +++ b/doc/build/changelog/unreleased_20/10665.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: usecase, postgresql + :tickets: 10665 + + Added support for ``postgresql_include`` keyword argument to + :class:`_schema.UniqueConstraint` and :class:`_schema.PrimaryKeyConstraint`. + Pull request courtesy Denis Laxalde. + + .. seealso:: + + :ref:`postgresql_constraint_options` diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 98265c5d6a0..0fd030b49bb 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -978,6 +978,8 @@ def set_search_path(dbapi_connection, connection_record): Several extensions to the :class:`.Index` construct are available, specific to the PostgreSQL dialect. +.. _postgresql_covering_indexes: + Covering Indexes ^^^^^^^^^^^^^^^^ @@ -990,6 +992,10 @@ def set_search_path(dbapi_connection, connection_record): Note that this feature requires PostgreSQL 11 or later. +.. seealso:: + + :ref:`postgresql_constraint_options` + .. versionadded:: 1.4 .. _postgresql_partial_indexes: @@ -1264,6 +1270,42 @@ def update(): `_ - in the PostgreSQL documentation. +* ``INCLUDE``: This option adds one or more columns as a "payload" to the + unique index created automatically by PostgreSQL for the constraint. + For example, the following table definition:: + + Table( + "mytable", + metadata, + Column("id", Integer, nullable=False), + Column("value", Integer, nullable=False), + UniqueConstraint("id", postgresql_include=["value"]), + ) + + would produce the DDL statement + + .. sourcecode:: sql + + CREATE TABLE mytable ( + id INTEGER NOT NULL, + value INTEGER NOT NULL, + UNIQUE (id) INCLUDE (value) + ) + + Note that this feature requires PostgreSQL 11 or later. + + .. versionadded:: 2.0.41 + + .. seealso:: + + :ref:`postgresql_covering_indexes` + + .. seealso:: + + `PostgreSQL CREATE TABLE options + `_ - + in the PostgreSQL documentation. + * Column list with foreign key ``ON DELETE SET`` actions: This applies to :class:`.ForeignKey` and :class:`.ForeignKeyConstraint`, the :paramref:`.ForeignKey.ondelete` parameter will accept on the PostgreSQL backend only a string list of column @@ -2260,6 +2302,18 @@ def _define_constraint_validity(self, constraint): not_valid = constraint.dialect_options["postgresql"]["not_valid"] return " NOT VALID" if not_valid else "" + def _define_include(self, obj): + includeclause = obj.dialect_options["postgresql"]["include"] + if not includeclause: + return "" + inclusions = [ + obj.table.c[col] if isinstance(col, str) else col + for col in includeclause + ] + return " INCLUDE (%s)" % ", ".join( + [self.preparer.quote(c.name) for c in inclusions] + ) + def visit_check_constraint(self, constraint, **kw): if constraint._type_bound: typ = list(constraint.columns)[0].type @@ -2283,6 +2337,16 @@ def visit_foreign_key_constraint(self, constraint, **kw): text += self._define_constraint_validity(constraint) return text + def visit_primary_key_constraint(self, constraint, **kw): + text = super().visit_primary_key_constraint(constraint) + text += self._define_include(constraint) + return text + + def visit_unique_constraint(self, constraint, **kw): + text = super().visit_unique_constraint(constraint) + text += self._define_include(constraint) + return text + @util.memoized_property def _fk_ondelete_pattern(self): return re.compile( @@ -2397,15 +2461,7 @@ def visit_create_index(self, create, **kw): ) ) - includeclause = index.dialect_options["postgresql"]["include"] - if includeclause: - inclusions = [ - index.table.c[col] if isinstance(col, str) else col - for col in includeclause - ] - text += " INCLUDE (%s)" % ", ".join( - [preparer.quote(c.name) for c in inclusions] - ) + text += self._define_include(index) nulls_not_distinct = index.dialect_options["postgresql"][ "nulls_not_distinct" @@ -3153,9 +3209,16 @@ class PGDialect(default.DefaultDialect): "not_valid": False, }, ), + ( + schema.PrimaryKeyConstraint, + {"include": None}, + ), ( schema.UniqueConstraint, - {"nulls_not_distinct": None}, + { + "include": None, + "nulls_not_distinct": None, + }, ), ] @@ -4037,21 +4100,35 @@ def _get_table_oids( result = connection.execute(oid_q, params) return result.all() - @lru_cache() - def _constraint_query(self, is_unique): + @util.memoized_property + def _constraint_query(self): + if self.server_version_info >= (11, 0): + indnkeyatts = pg_catalog.pg_index.c.indnkeyatts + else: + indnkeyatts = sql.null().label("indnkeyatts") + + if self.server_version_info >= (15,): + indnullsnotdistinct = pg_catalog.pg_index.c.indnullsnotdistinct + else: + indnullsnotdistinct = sql.false().label("indnullsnotdistinct") + con_sq = ( select( pg_catalog.pg_constraint.c.conrelid, pg_catalog.pg_constraint.c.conname, - pg_catalog.pg_constraint.c.conindid, - sql.func.unnest(pg_catalog.pg_constraint.c.conkey).label( - "attnum" - ), + sql.func.unnest(pg_catalog.pg_index.c.indkey).label("attnum"), sql.func.generate_subscripts( - pg_catalog.pg_constraint.c.conkey, 1 + pg_catalog.pg_index.c.indkey, 1 ).label("ord"), + indnkeyatts, + indnullsnotdistinct, pg_catalog.pg_description.c.description, ) + .join( + pg_catalog.pg_index, + pg_catalog.pg_constraint.c.conindid + == pg_catalog.pg_index.c.indexrelid, + ) .outerjoin( pg_catalog.pg_description, pg_catalog.pg_description.c.objoid @@ -4060,6 +4137,9 @@ def _constraint_query(self, is_unique): .where( pg_catalog.pg_constraint.c.contype == bindparam("contype"), pg_catalog.pg_constraint.c.conrelid.in_(bindparam("oids")), + # NOTE: filtering also on pg_index.indrelid for oids does + # not seem to have a performance effect, but it may be an + # option if perf problems are reported ) .subquery("con") ) @@ -4068,9 +4148,10 @@ def _constraint_query(self, is_unique): select( con_sq.c.conrelid, con_sq.c.conname, - con_sq.c.conindid, con_sq.c.description, con_sq.c.ord, + con_sq.c.indnkeyatts, + con_sq.c.indnullsnotdistinct, pg_catalog.pg_attribute.c.attname, ) .select_from(pg_catalog.pg_attribute) @@ -4093,7 +4174,7 @@ def _constraint_query(self, is_unique): .subquery("attr") ) - constraint_query = ( + return ( select( attr_sq.c.conrelid, sql.func.array_agg( @@ -4105,31 +4186,15 @@ def _constraint_query(self, is_unique): ).label("cols"), attr_sq.c.conname, sql.func.min(attr_sq.c.description).label("description"), + sql.func.min(attr_sq.c.indnkeyatts).label("indnkeyatts"), + sql.func.bool_and(attr_sq.c.indnullsnotdistinct).label( + "indnullsnotdistinct" + ), ) .group_by(attr_sq.c.conrelid, attr_sq.c.conname) .order_by(attr_sq.c.conrelid, attr_sq.c.conname) ) - if is_unique: - if self.server_version_info >= (15,): - constraint_query = constraint_query.join( - pg_catalog.pg_index, - attr_sq.c.conindid == pg_catalog.pg_index.c.indexrelid, - ).add_columns( - sql.func.bool_and( - pg_catalog.pg_index.c.indnullsnotdistinct - ).label("indnullsnotdistinct") - ) - else: - constraint_query = constraint_query.add_columns( - sql.false().label("indnullsnotdistinct") - ) - else: - constraint_query = constraint_query.add_columns( - sql.null().label("extra") - ) - return constraint_query - def _reflect_constraint( self, connection, contype, schema, filter_names, scope, kind, **kw ): @@ -4145,26 +4210,45 @@ def _reflect_constraint( batches[0:3000] = [] result = connection.execute( - self._constraint_query(is_unique), + self._constraint_query, {"oids": [r[0] for r in batch], "contype": contype}, - ) + ).mappings() result_by_oid = defaultdict(list) - for oid, cols, constraint_name, comment, extra in result: - result_by_oid[oid].append( - (cols, constraint_name, comment, extra) - ) + for row_dict in result: + result_by_oid[row_dict["conrelid"]].append(row_dict) for oid, tablename in batch: for_oid = result_by_oid.get(oid, ()) if for_oid: - for cols, constraint, comment, extra in for_oid: - if is_unique: - yield tablename, cols, constraint, comment, { - "nullsnotdistinct": extra - } + for row in for_oid: + # See note in get_multi_indexes + all_cols = row["cols"] + indnkeyatts = row["indnkeyatts"] + if ( + indnkeyatts is not None + and len(all_cols) > indnkeyatts + ): + inc_cols = all_cols[indnkeyatts:] + cst_cols = all_cols[:indnkeyatts] else: - yield tablename, cols, constraint, comment, None + inc_cols = [] + cst_cols = all_cols + + opts = {} + if self.server_version_info >= (11,): + opts["postgresql_include"] = inc_cols + if is_unique: + opts["postgresql_nulls_not_distinct"] = row[ + "indnullsnotdistinct" + ] + yield ( + tablename, + cst_cols, + row["conname"], + row["description"], + opts, + ) else: yield tablename, None, None, None, None @@ -4190,20 +4274,27 @@ def get_multi_pk_constraint( # only a single pk can be present for each table. Return an entry # even if a table has no primary key default = ReflectionDefaults.pk_constraint + + def pk_constraint(pk_name, cols, comment, opts): + info = { + "constrained_columns": cols, + "name": pk_name, + "comment": comment, + } + if opts: + info["dialect_options"] = opts + return info + return ( ( (schema, table_name), ( - { - "constrained_columns": [] if cols is None else cols, - "name": pk_name, - "comment": comment, - } + pk_constraint(pk_name, cols, comment, opts) if pk_name is not None else default() ), ) - for table_name, cols, pk_name, comment, _ in result + for table_name, cols, pk_name, comment, opts in result ) @reflection.cache @@ -4599,7 +4690,10 @@ def get_multi_indexes( # "The number of key columns in the index, not counting any # included columns, which are merely stored and do not # participate in the index semantics" - if indnkeyatts and len(all_elements) > indnkeyatts: + if ( + indnkeyatts is not None + and len(all_elements) > indnkeyatts + ): # this is a "covering index" which has INCLUDE columns # as well as regular index columns inc_cols = all_elements[indnkeyatts:] @@ -4729,12 +4823,7 @@ def get_multi_unique_constraints( "comment": comment, } if options: - if options["nullsnotdistinct"]: - uc_dict["dialect_options"] = { - "postgresql_nulls_not_distinct": options[ - "nullsnotdistinct" - ] - } + uc_dict["dialect_options"] = options uniques[(schema, table_name)].append(uc_dict) return uniques.items() diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 5d754c6703d..23009c64a4c 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -1714,9 +1714,12 @@ def _reflect_pk( if pk in cols_by_orig_name and pk not in exclude_columns ] - # update pk constraint name and comment + # update pk constraint name, comment and dialect_kwargs table.primary_key.name = pk_cons.get("name") table.primary_key.comment = pk_cons.get("comment", None) + dialect_options = pk_cons.get("dialect_options") + if dialect_options: + table.primary_key.dialect_kwargs.update(dialect_options) # tell the PKConstraint to re-initialize # its column collection diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 0f2a2062a8e..47e012aba3b 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -1954,6 +1954,8 @@ def test_get_unique_constraints(self, metadata, connection, use_schema): if dupe: names_that_duplicate_index.add(dupe) eq_(refl.pop("comment", None), None) + # ignore dialect_options + refl.pop("dialect_options", None) eq_(orig, refl) reflected_metadata = MetaData() diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index 2bb8d292655..2a763593b2e 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -22,6 +22,7 @@ from sqlalchemy import literal from sqlalchemy import MetaData from sqlalchemy import null +from sqlalchemy import PrimaryKeyConstraint from sqlalchemy import schema from sqlalchemy import select from sqlalchemy import Sequence @@ -793,6 +794,40 @@ def test_nulls_not_distinct(self, expr_fn, expected): expr = testing.resolve_lambda(expr_fn, tbl=tbl) self.assert_compile(expr, expected, dialect=dd) + @testing.combinations( + ( + lambda tbl: schema.AddConstraint( + UniqueConstraint(tbl.c.id, postgresql_include=[tbl.c.value]) + ), + "ALTER TABLE foo ADD UNIQUE (id) INCLUDE (value)", + ), + ( + lambda tbl: schema.AddConstraint( + PrimaryKeyConstraint( + tbl.c.id, postgresql_include=[tbl.c.value, "misc"] + ) + ), + "ALTER TABLE foo ADD PRIMARY KEY (id) INCLUDE (value, misc)", + ), + ( + lambda tbl: schema.CreateIndex( + Index("idx", tbl.c.id, postgresql_include=[tbl.c.value]) + ), + "CREATE INDEX idx ON foo (id) INCLUDE (value)", + ), + ) + def test_include(self, expr_fn, expected): + m = MetaData() + tbl = Table( + "foo", + m, + Column("id", Integer, nullable=False), + Column("value", Integer, nullable=False), + Column("misc", String), + ) + expr = testing.resolve_lambda(expr_fn, tbl=tbl) + self.assert_compile(expr, expected) + def test_create_index_with_labeled_ops(self): m = MetaData() tbl = Table( diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 20844a0eaea..ebe751b5b34 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -1770,6 +1770,7 @@ def test_nullsnotdistinct(self, metadata, connection): "column_names": ["y"], "name": "unq1", "dialect_options": { + "postgresql_include": [], "postgresql_nulls_not_distinct": True, }, "comment": None, @@ -2602,6 +2603,51 @@ def all_none(): connection.execute(sa_ddl.DropConstraintComment(cst)) all_none() + @testing.skip_if("postgresql < 11.0", "not supported") + def test_reflection_constraints_with_include(self, connection, metadata): + Table( + "foo", + metadata, + Column("id", Integer, nullable=False), + Column("value", Integer, nullable=False), + Column("foo", String), + Column("arr", ARRAY(Integer)), + Column("bar", SmallInteger), + ) + metadata.create_all(connection) + connection.exec_driver_sql( + "ALTER TABLE foo ADD UNIQUE (id) INCLUDE (value)" + ) + connection.exec_driver_sql( + "ALTER TABLE foo " + "ADD PRIMARY KEY (id) INCLUDE (arr, foo, bar, value)" + ) + + unq = inspect(connection).get_unique_constraints("foo") + expected_unq = [ + { + "column_names": ["id"], + "name": "foo_id_value_key", + "dialect_options": { + "postgresql_nulls_not_distinct": False, + "postgresql_include": ["value"], + }, + "comment": None, + } + ] + eq_(unq, expected_unq) + + pk = inspect(connection).get_pk_constraint("foo") + expected_pk = { + "comment": None, + "constrained_columns": ["id"], + "dialect_options": { + "postgresql_include": ["arr", "foo", "bar", "value"] + }, + "name": "foo_pkey", + } + eq_(pk, expected_pk) + class CustomTypeReflectionTest(fixtures.TestBase): class CustomType: From ade87b49f5878ecceb1b4cc6f2f11013a6d268f2 Mon Sep 17 00:00:00 2001 From: Alexander Ruehe Date: Tue, 1 Apr 2025 17:52:12 -0400 Subject: [PATCH 491/544] ensure ON UPDATE test is case insensitive Fixed regression caused by the DEFAULT rendering changes in 2.0.40 :ticket:`12425` where using lowercase `on update` in a MySQL server default would incorrectly apply parenthesis, leading to errors when MySQL interpreted the rendered DDL. Pull request courtesy Alexander Ruehe. Fixes: #12488 Closes: #12489 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12489 Pull-request-sha: b9008f747d21bc06a4006c99a47fc6aa99407636 Change-Id: If5281c52415e4ddb6c2f8aee191d2335f6673b35 (cherry picked from commit 6f8f4a7d620f19afce8b8d43c25ff5ca5a466038) --- doc/build/changelog/unreleased_20/12488.rst | 8 +++++++ lib/sqlalchemy/dialects/mysql/base.py | 2 +- test/dialect/mysql/test_compiler.py | 25 +++++++++++++++++++-- test/dialect/mysql/test_query.py | 15 +++++++++++++ 4 files changed, 47 insertions(+), 3 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12488.rst diff --git a/doc/build/changelog/unreleased_20/12488.rst b/doc/build/changelog/unreleased_20/12488.rst new file mode 100644 index 00000000000..d81d025bdd8 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12488.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, mysql + :tickets: 12488 + + Fixed regression caused by the DEFAULT rendering changes in 2.0.40 + :ticket:`12425` where using lowercase `on update` in a MySQL server default + would incorrectly apply parenthesis, leading to errors when MySQL + interpreted the rendered DDL. Pull request courtesy Alexander Ruehe. diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 122a7cb2e5e..50976310a75 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1933,7 +1933,7 @@ def get_column_specification(self, column, **kw): if ( self.dialect._support_default_function and not re.match(r"^\s*[\'\"\(]", default) - and "ON UPDATE" not in default + and not re.search(r"ON +UPDATE", default, re.I) and re.match(r".*\W.*", default) ): colspec.append(f"DEFAULT ({default})") diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index f9cfeba05b8..4a799234d93 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -438,6 +438,21 @@ def test_create_server_default_with_function_using( "description", String(255), server_default=func.lower("hi") ), Column("data", JSON, server_default=func.json_object()), + Column( + "updated1", + DateTime, + server_default=text("now() on update now()"), + ), + Column( + "updated2", + DateTime, + server_default=text("now() On UpDate now()"), + ), + Column( + "updated3", + DateTime, + server_default=text("now() ON UPDATE now()"), + ), ) eq_(dialect._support_default_function, has_brackets) @@ -449,7 +464,10 @@ def test_create_server_default_with_function_using( "time DATETIME DEFAULT CURRENT_TIMESTAMP, " "name VARCHAR(255) DEFAULT 'some str', " "description VARCHAR(255) DEFAULT (lower('hi')), " - "data JSON DEFAULT (json_object()))", + "data JSON DEFAULT (json_object()), " + "updated1 DATETIME DEFAULT now() on update now(), " + "updated2 DATETIME DEFAULT now() On UpDate now(), " + "updated3 DATETIME DEFAULT now() ON UPDATE now())", dialect=dialect, ) else: @@ -459,7 +477,10 @@ def test_create_server_default_with_function_using( "time DATETIME DEFAULT CURRENT_TIMESTAMP, " "name VARCHAR(255) DEFAULT 'some str', " "description VARCHAR(255) DEFAULT lower('hi'), " - "data JSON DEFAULT json_object())", + "data JSON DEFAULT json_object(), " + "updated1 DATETIME DEFAULT now() on update now(), " + "updated2 DATETIME DEFAULT now() On UpDate now(), " + "updated3 DATETIME DEFAULT now() ON UPDATE now())", dialect=dialect, ) diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py index 96650dab564..a5111931312 100644 --- a/test/dialect/mysql/test_query.py +++ b/test/dialect/mysql/test_query.py @@ -56,6 +56,9 @@ def test_is_boolean_symbols_despite_no_native(self, connection): class ServerDefaultCreateTest(fixtures.TestBase): + __only_on__ = "mysql", "mariadb" + __backend__ = True + @testing.combinations( (Integer, text("10")), (Integer, text("'10'")), @@ -70,6 +73,18 @@ class ServerDefaultCreateTest(fixtures.TestBase): literal_column("3") + literal_column("5"), testing.requires.mysql_expression_defaults, ), + ( + DateTime, + text("now() ON UPDATE now()"), + ), + ( + DateTime, + text("now() on update now()"), + ), + ( + DateTime, + text("now() ON UPDATE now()"), + ), argnames="datatype, default", ) def test_create_server_defaults( From 0f90792d8deae227b2f7ba7b1259d9f2d791f426 Mon Sep 17 00:00:00 2001 From: Adriaan Joubert <45142747+adriaanjoubert@users.noreply.github.com> Date: Thu, 3 Apr 2025 20:56:29 +0300 Subject: [PATCH 492/544] Fix typo (#12495) (cherry picked from commit 51007fe428d87e5d5bfc2c04cd4224fda2e00879) --- doc/build/errors.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/errors.rst b/doc/build/errors.rst index 237d5d0ab3b..99701ba2790 100644 --- a/doc/build/errors.rst +++ b/doc/build/errors.rst @@ -136,7 +136,7 @@ What causes an application to use up all the connections that it has available? upon to release resources in a timely manner. A common reason this can occur is that the application uses ORM sessions and - does not call :meth:`.Session.close` upon them one the work involving that + does not call :meth:`.Session.close` upon them once the work involving that session is complete. Solution is to make sure ORM sessions if using the ORM, or engine-bound :class:`_engine.Connection` objects if using Core, are explicitly closed at the end of the work being done, either via the appropriate From 25558376887903302886e6539903126ca881769d Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 24 Mar 2025 21:50:45 +0100 Subject: [PATCH 493/544] improve overloads applied to generic functions try again to remove the overloads to the generic functionn generator (like coalesce, array_agg, etc). As of mypy 1.15 it still does now work, but a simpler version is added in this change Change-Id: I8b97ae00298ec6f6bf8580090e5defff71e1ceb0 (cherry picked from commit 5cc6a65c61798078959455f5d74f535681c119b7) --- lib/sqlalchemy/sql/functions.py | 107 ++++++++++-------- .../typing/plain_files/sql/functions_again.py | 6 + tools/generate_sql_functions.py | 12 +- 3 files changed, 68 insertions(+), 57 deletions(-) diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index 0e52a8bb736..cd63e82339e 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -5,7 +5,6 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php - """SQL function API, factories, and built-in functions.""" from __future__ import annotations @@ -153,7 +152,9 @@ class FunctionElement(Executable, ColumnElement[_T], FromClause, Generative): clause_expr: Grouping[Any] - def __init__(self, *clauses: _ColumnExpressionOrLiteralArgument[Any]): + def __init__( + self, *clauses: _ColumnExpressionOrLiteralArgument[Any] + ) -> None: r"""Construct a :class:`.FunctionElement`. :param \*clauses: list of column expressions that form the arguments @@ -779,7 +780,7 @@ def _gen_cache_key(self, anon_map: Any, bindparams: Any) -> Any: def __init__( self, fn: FunctionElement[Any], left_index: int, right_index: int - ): + ) -> None: self.sql_function = fn self.left_index = left_index self.right_index = right_index @@ -831,7 +832,7 @@ def __init__( fn: FunctionElement[_T], name: str, type_: Optional[_TypeEngineArgument[_T]] = None, - ): + ) -> None: self.fn = fn self.name = name @@ -930,7 +931,7 @@ class _FunctionGenerator: """ # noqa - def __init__(self, **opts: Any): + def __init__(self, **opts: Any) -> None: self.__names: List[str] = [] self.opts = opts @@ -990,10 +991,10 @@ def aggregate_strings(self) -> Type[aggregate_strings]: ... @property def ansifunction(self) -> Type[AnsiFunction[Any]]: ... - # set ColumnElement[_T] as a separate overload, to appease mypy - # which seems to not want to accept _T from _ColumnExpressionArgument. - # this is even if all non-generic types are removed from it, so - # reasons remain unclear for why this does not work + # set ColumnElement[_T] as a separate overload, to appease + # mypy which seems to not want to accept _T from + # _ColumnExpressionArgument. Seems somewhat related to the covariant + # _HasClauseElement as of mypy 1.15 @overload def array_agg( @@ -1014,7 +1015,7 @@ def array_agg( @overload def array_agg( self, - col: _ColumnExpressionOrLiteralArgument[_T], + col: _T, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> array_agg[_T]: ... @@ -1032,10 +1033,10 @@ def cast(self) -> Type[Cast[Any]]: ... @property def char_length(self) -> Type[char_length]: ... - # set ColumnElement[_T] as a separate overload, to appease mypy - # which seems to not want to accept _T from _ColumnExpressionArgument. - # this is even if all non-generic types are removed from it, so - # reasons remain unclear for why this does not work + # set ColumnElement[_T] as a separate overload, to appease + # mypy which seems to not want to accept _T from + # _ColumnExpressionArgument. Seems somewhat related to the covariant + # _HasClauseElement as of mypy 1.15 @overload def coalesce( @@ -1056,7 +1057,7 @@ def coalesce( @overload def coalesce( self, - col: _ColumnExpressionOrLiteralArgument[_T], + col: _T, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> coalesce[_T]: ... @@ -1107,10 +1108,10 @@ def localtime(self) -> Type[localtime]: ... @property def localtimestamp(self) -> Type[localtimestamp]: ... - # set ColumnElement[_T] as a separate overload, to appease mypy - # which seems to not want to accept _T from _ColumnExpressionArgument. - # this is even if all non-generic types are removed from it, so - # reasons remain unclear for why this does not work + # set ColumnElement[_T] as a separate overload, to appease + # mypy which seems to not want to accept _T from + # _ColumnExpressionArgument. Seems somewhat related to the covariant + # _HasClauseElement as of mypy 1.15 @overload def max( # noqa: A001 @@ -1131,7 +1132,7 @@ def max( # noqa: A001 @overload def max( # noqa: A001 self, - col: _ColumnExpressionOrLiteralArgument[_T], + col: _T, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> max[_T]: ... @@ -1143,10 +1144,10 @@ def max( # noqa: A001 **kwargs: Any, ) -> max[_T]: ... - # set ColumnElement[_T] as a separate overload, to appease mypy - # which seems to not want to accept _T from _ColumnExpressionArgument. - # this is even if all non-generic types are removed from it, so - # reasons remain unclear for why this does not work + # set ColumnElement[_T] as a separate overload, to appease + # mypy which seems to not want to accept _T from + # _ColumnExpressionArgument. Seems somewhat related to the covariant + # _HasClauseElement as of mypy 1.15 @overload def min( # noqa: A001 @@ -1167,7 +1168,7 @@ def min( # noqa: A001 @overload def min( # noqa: A001 self, - col: _ColumnExpressionOrLiteralArgument[_T], + col: _T, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> min[_T]: ... @@ -1212,10 +1213,10 @@ def rollup(self) -> Type[rollup[Any]]: ... @property def session_user(self) -> Type[session_user]: ... - # set ColumnElement[_T] as a separate overload, to appease mypy - # which seems to not want to accept _T from _ColumnExpressionArgument. - # this is even if all non-generic types are removed from it, so - # reasons remain unclear for why this does not work + # set ColumnElement[_T] as a separate overload, to appease + # mypy which seems to not want to accept _T from + # _ColumnExpressionArgument. Seems somewhat related to the covariant + # _HasClauseElement as of mypy 1.15 @overload def sum( # noqa: A001 @@ -1236,7 +1237,7 @@ def sum( # noqa: A001 @overload def sum( # noqa: A001 self, - col: _ColumnExpressionOrLiteralArgument[_T], + col: _T, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> sum[_T]: ... @@ -1332,7 +1333,7 @@ def __init__( *clauses: _ColumnExpressionOrLiteralArgument[_T], type_: None = ..., packagenames: Optional[Tuple[str, ...]] = ..., - ): ... + ) -> None: ... @overload def __init__( @@ -1341,7 +1342,7 @@ def __init__( *clauses: _ColumnExpressionOrLiteralArgument[Any], type_: _TypeEngineArgument[_T] = ..., packagenames: Optional[Tuple[str, ...]] = ..., - ): ... + ) -> None: ... def __init__( self, @@ -1349,7 +1350,7 @@ def __init__( *clauses: _ColumnExpressionOrLiteralArgument[Any], type_: Optional[_TypeEngineArgument[_T]] = None, packagenames: Optional[Tuple[str, ...]] = None, - ): + ) -> None: """Construct a :class:`.Function`. The :data:`.func` construct is normally used to construct @@ -1531,7 +1532,7 @@ def _register_generic_function( def __init__( self, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any - ): + ) -> None: parsed_args = kwargs.pop("_parsed_args", None) if parsed_args is None: parsed_args = [ @@ -1578,7 +1579,7 @@ class next_value(GenericFunction[int]): ("sequence", InternalTraversal.dp_named_ddl_element) ] - def __init__(self, seq: schema.Sequence, **kw: Any): + def __init__(self, seq: schema.Sequence, **kw: Any) -> None: assert isinstance( seq, schema.Sequence ), "next_value() accepts a Sequence object as input." @@ -1603,7 +1604,9 @@ class AnsiFunction(GenericFunction[_T]): inherit_cache = True - def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any): + def __init__( + self, *args: _ColumnExpressionArgument[Any], **kwargs: Any + ) -> None: GenericFunction.__init__(self, *args, **kwargs) @@ -1614,10 +1617,10 @@ class ReturnTypeFromArgs(GenericFunction[_T]): inherit_cache = True - # set ColumnElement[_T] as a separate overload, to appease mypy which seems - # to not want to accept _T from _ColumnExpressionArgument. this is even if - # all non-generic types are removed from it, so reasons remain unclear for - # why this does not work + # set ColumnElement[_T] as a separate overload, to appease + # mypy which seems to not want to accept _T from + # _ColumnExpressionArgument. Seems somewhat related to the covariant + # _HasClauseElement as of mypy 1.15 @overload def __init__( @@ -1625,7 +1628,7 @@ def __init__( col: ColumnElement[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ): ... + ) -> None: ... @overload def __init__( @@ -1633,19 +1636,19 @@ def __init__( col: _ColumnExpressionArgument[_T], *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ): ... + ) -> None: ... @overload def __init__( self, - col: _ColumnExpressionOrLiteralArgument[_T], + col: _T, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, - ): ... + ) -> None: ... def __init__( - self, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any - ): + self, *args: _ColumnExpressionOrLiteralArgument[_T], **kwargs: Any + ) -> None: fn_args: Sequence[ColumnElement[Any]] = [ coercions.expect( roles.ExpressionElementRole, @@ -1727,7 +1730,7 @@ class char_length(GenericFunction[int]): type = sqltypes.Integer() inherit_cache = True - def __init__(self, arg: _ColumnExpressionArgument[str], **kw: Any): + def __init__(self, arg: _ColumnExpressionArgument[str], **kw: Any) -> None: # slight hack to limit to just one positional argument # not sure why this one function has this special treatment super().__init__(arg, **kw) @@ -1773,7 +1776,7 @@ def __init__( _ColumnExpressionArgument[Any], _StarOrOne, None ] = None, **kwargs: Any, - ): + ) -> None: if expression is None: expression = literal_column("*") super().__init__(expression, **kwargs) @@ -1862,7 +1865,9 @@ class array_agg(ReturnTypeFromArgs[Sequence[_T]]): inherit_cache = True - def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any): + def __init__( + self, *args: _ColumnExpressionArgument[Any], **kwargs: Any + ) -> None: fn_args: Sequence[ColumnElement[Any]] = [ coercions.expect( roles.ExpressionElementRole, c, apply_propagate_attrs=self @@ -2095,5 +2100,7 @@ class aggregate_strings(GenericFunction[str]): _has_args = True inherit_cache = True - def __init__(self, clause: _ColumnExpressionArgument[Any], separator: str): + def __init__( + self, clause: _ColumnExpressionArgument[Any], separator: str + ) -> None: super().__init__(clause, separator) diff --git a/test/typing/plain_files/sql/functions_again.py b/test/typing/plain_files/sql/functions_again.py index 67888790f6b..24b720f6710 100644 --- a/test/typing/plain_files/sql/functions_again.py +++ b/test/typing/plain_files/sql/functions_again.py @@ -1,4 +1,6 @@ +from sqlalchemy import column from sqlalchemy import func +from sqlalchemy import Integer from sqlalchemy import select from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped @@ -53,6 +55,10 @@ class Foo(Base): # test #10818 # EXPECTED_TYPE: coalesce[str] reveal_type(func.coalesce(Foo.c, "a", "b")) +# EXPECTED_TYPE: coalesce[str] +reveal_type(func.coalesce("a", "b")) +# EXPECTED_TYPE: coalesce[int] +reveal_type(func.coalesce(column("x", Integer), 3)) stmt2 = select(Foo.a, func.coalesce(Foo.c, "a", "b")).group_by(Foo.a) diff --git a/tools/generate_sql_functions.py b/tools/generate_sql_functions.py index 5049ce52066..624fbb75ed2 100644 --- a/tools/generate_sql_functions.py +++ b/tools/generate_sql_functions.py @@ -67,10 +67,10 @@ def process_functions(filename: str, cmd: code_writer_cmd) -> str: textwrap.indent( f""" -# set ColumnElement[_T] as a separate overload, to appease mypy -# which seems to not want to accept _T from _ColumnExpressionArgument. -# this is even if all non-generic types are removed from it, so -# reasons remain unclear for why this does not work +# set ColumnElement[_T] as a separate overload, to appease +# mypy which seems to not want to accept _T from +# _ColumnExpressionArgument. Seems somewhat related to the covariant +# _HasClauseElement as of mypy 1.15 @overload def {key}( {' # noqa: A001' if is_reserved_word else ''} @@ -90,17 +90,15 @@ def {key}( {' # noqa: A001' if is_reserved_word else ''} ) -> {fn_class.__name__}[_T]: ... - @overload def {key}( {' # noqa: A001' if is_reserved_word else ''} self, - col: _ColumnExpressionOrLiteralArgument[_T], + col: _T, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any, ) -> {fn_class.__name__}[_T]: ... - def {key}( {' # noqa: A001' if is_reserved_word else ''} self, col: _ColumnExpressionOrLiteralArgument[_T], From b74d6ca27de18dd5436a32fbeeef8e072845f040 Mon Sep 17 00:00:00 2001 From: krave1986 Date: Fri, 4 Apr 2025 02:55:36 +0800 Subject: [PATCH 494/544] docs: Fix substr function starting index in hybrid_property example (#12482) (cherry picked from commit 0c1824c666c55ae19051feb4970060385c674bb3) --- doc/build/orm/mapped_attributes.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/orm/mapped_attributes.rst b/doc/build/orm/mapped_attributes.rst index d0610f4e0fa..b114680132e 100644 --- a/doc/build/orm/mapped_attributes.rst +++ b/doc/build/orm/mapped_attributes.rst @@ -234,7 +234,7 @@ logic:: """Produce a SQL expression that represents the value of the _email column, minus the last twelve characters.""" - return func.substr(cls._email, 0, func.length(cls._email) - 12) + return func.substr(cls._email, 1, func.length(cls._email) - 12) Above, accessing the ``email`` property of an instance of ``EmailAddress`` will return the value of the ``_email`` attribute, removing or adding the @@ -249,7 +249,7 @@ attribute, a SQL function is rendered which produces the same effect: {execsql}SELECT address.email AS address_email, address.id AS address_id FROM address WHERE substr(address.email, ?, length(address.email) - ?) = ? - (0, 12, 'address') + (1, 12, 'address') {stop} Read more about Hybrids at :ref:`hybrids_toplevel`. From 1162710a63d0ab6fa5206c81a9f1379bac23242b Mon Sep 17 00:00:00 2001 From: Inada Naoki Date: Mon, 7 Apr 2025 19:55:48 -0400 Subject: [PATCH 495/544] optimize `@util.decorator` ### Description util.decorator uses code generation + eval to create signature matching wrapper. It consumes some CPU because we can not use pyc cache. Additionally, each wrapped function has own globals for function annotations. By stripping function annotations from eval-ed code, compile time and memory usage are saved. ```python from sqlalchemy.util import decorator from sqlalchemy import * import timeit import tracemalloc import sqlalchemy.orm._orm_constructors @decorator def with_print(fn, *args, **kwargs): res = fn(*args, **kwargs) print(f"{fn.__name__}(*{args}, **{kwargs}) => {res}") return res # test PI = 3.14 def f(): @with_print def add(x: int|float, *, y: int|float=PI) -> int|float: return x + y return add add = f() add(1) print(add.__annotations__) # benchmark print(timeit.timeit(f, number=1000)*1000, "us") # memory tracemalloc.start(1) [f() for _ in range(1000)] mem, peak = tracemalloc.get_traced_memory() tracemalloc.stop() print(f"{mem=}, {peak=}") ``` Result: ``` $ .venv/bin/python -VV Python 3.14.0a6 (main, Mar 17 2025, 21:27:10) [Clang 20.1.0 ] $ .venv/bin/python sample.py add(*(1,), **{'y': 3.14}) => 4.140000000000001 {'x': int | float, 'y': int | float, 'return': int | float} 35.93937499681488 us mem=9252896, peak=9300808 $ git switch - Switched to branch 'opt-decorator' $ .venv/bin/python sample.py add(*(1,), **{'y': 3.14}) => 4.140000000000001 {'x': int | float, 'y': int | float, 'return': int | float} 23.32574996398762 us mem=1439032, peak=1476423 ``` ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [x] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. Closes: #12502 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12502 Pull-request-sha: 34409cbbfd2dee65bf86a85a87e415c9af47dc62 Change-Id: I88b88eb6eb018608bc2881459f58564881d06641 (cherry picked from commit 370f13fe88ec5e4ee2400e23717db1e13df102bf) --- lib/sqlalchemy/util/langhelpers.py | 60 +++++++++++++++--------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index b4086f1d579..a5ec5190f14 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -250,10 +250,30 @@ def decorate(fn: _Fn) -> _Fn: if not inspect.isfunction(fn) and not inspect.ismethod(fn): raise Exception("not a decoratable function") - spec = compat.inspect_getfullargspec(fn) - env: Dict[str, Any] = {} + # Python 3.14 defer creating __annotations__ until its used. + # We do not want to create __annotations__ now. + annofunc = getattr(fn, "__annotate__", None) + if annofunc is not None: + fn.__annotate__ = None # type: ignore[union-attr] + try: + spec = compat.inspect_getfullargspec(fn) + finally: + fn.__annotate__ = annofunc # type: ignore[union-attr] + else: + spec = compat.inspect_getfullargspec(fn) - spec = _update_argspec_defaults_into_env(spec, env) + # Do not generate code for annotations. + # update_wrapper() copies the annotation from fn to decorated. + # We use dummy defaults for code generation to avoid having + # copy of large globals for compiling. + # We copy __defaults__ and __kwdefaults__ from fn to decorated. + empty_defaults = (None,) * len(spec.defaults or ()) + empty_kwdefaults = dict.fromkeys(spec.kwonlydefaults or ()) + spec = spec._replace( + annotations={}, + defaults=empty_defaults, + kwonlydefaults=empty_kwdefaults, + ) names = ( tuple(cast("Tuple[str, ...]", spec[0])) @@ -298,43 +318,23 @@ def decorate(fn: _Fn) -> _Fn: % metadata ) - mod = sys.modules[fn.__module__] - env.update(vars(mod)) - env.update({targ_name: target, fn_name: fn, "__name__": fn.__module__}) + env: Dict[str, Any] = { + targ_name: target, + fn_name: fn, + "__name__": fn.__module__, + } decorated = cast( types.FunctionType, _exec_code_in_env(code, env, fn.__name__), ) - decorated.__defaults__ = getattr(fn, "__func__", fn).__defaults__ - - decorated.__wrapped__ = fn # type: ignore[attr-defined] + decorated.__defaults__ = fn.__defaults__ + decorated.__kwdefaults__ = fn.__kwdefaults__ # type: ignore return update_wrapper(decorated, fn) # type: ignore[return-value] return update_wrapper(decorate, target) # type: ignore[return-value] -def _update_argspec_defaults_into_env(spec, env): - """given a FullArgSpec, convert defaults to be symbol names in an env.""" - - if spec.defaults: - new_defaults = [] - i = 0 - for arg in spec.defaults: - if type(arg).__module__ not in ("builtins", "__builtin__"): - name = "x%d" % i - env[name] = arg - new_defaults.append(name) - i += 1 - else: - new_defaults.append(arg) - elem = list(spec) - elem[3] = tuple(new_defaults) - return compat.FullArgSpec(*elem) - else: - return spec - - def _exec_code_in_env( code: Union[str, types.CodeType], env: Dict[str, Any], fn_name: str ) -> Callable[..., Any]: From 81539b6d84eb024d58f81b9d7d1885ed0f199357 Mon Sep 17 00:00:00 2001 From: Inada Naoki Date: Wed, 9 Apr 2025 05:43:25 +0900 Subject: [PATCH 496/544] orm.exc.NoResultFound => exc.NoResultFound (#12509) * s/orm.exc.NoResultFound/exc.NoResultFound/ * use _exc (cherry picked from commit d5a913c8aefad763539f8fd88b99118bcabb19a2) --- lib/sqlalchemy/engine/result.py | 4 ++-- lib/sqlalchemy/ext/asyncio/scoping.py | 3 +-- lib/sqlalchemy/ext/asyncio/session.py | 3 +-- lib/sqlalchemy/orm/query.py | 11 +++++------ lib/sqlalchemy/orm/scoping.py | 3 +-- lib/sqlalchemy/orm/session.py | 3 +-- 6 files changed, 11 insertions(+), 16 deletions(-) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 3c81fc60520..ed099da05d1 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1489,8 +1489,8 @@ def scalar_one_or_none(self) -> Optional[Any]: def one(self) -> Row[_TP]: """Return exactly one row or raise an exception. - Raises :class:`.NoResultFound` if the result returns no - rows, or :class:`.MultipleResultsFound` if multiple rows + Raises :class:`_exc.NoResultFound` if the result returns no + rows, or :class:`_exc.MultipleResultsFound` if multiple rows would be returned. .. note:: This method returns one **row**, e.g. tuple, by default. diff --git a/lib/sqlalchemy/ext/asyncio/scoping.py b/lib/sqlalchemy/ext/asyncio/scoping.py index 7ecab37b400..d2a9a51b231 100644 --- a/lib/sqlalchemy/ext/asyncio/scoping.py +++ b/lib/sqlalchemy/ext/asyncio/scoping.py @@ -1175,8 +1175,7 @@ async def get_one( Proxied for the :class:`_asyncio.AsyncSession` class on behalf of the :class:`_asyncio.scoping.async_scoped_session` class. - Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects - no rows. + Raises :class:`_exc.NoResultFound` if the query selects no rows. ..versionadded: 2.0.22 diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index 1b8c9695c7f..68cbb59bfd6 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -628,8 +628,7 @@ async def get_one( """Return an instance based on the given primary key identifier, or raise an exception if not found. - Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects - no rows. + Raises :class:`_exc.NoResultFound` if the query selects no rows. ..versionadded: 2.0.22 diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index af496b245f4..3489c15fd6f 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -2787,11 +2787,10 @@ def one_or_none(self) -> Optional[_T]: def one(self) -> _T: """Return exactly one result or raise an exception. - Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects - no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` - if multiple object identities are returned, or if multiple - rows are returned for a query that returns only scalar values - as opposed to full identity-mapped entities. + Raises :class:`_exc.NoResultFound` if the query selects no rows. + Raises :class:`_exc.MultipleResultsFound` if multiple object identities + are returned, or if multiple rows are returned for a query that returns + only scalar values as opposed to full identity-mapped entities. Calling :meth:`.one` results in an execution of the underlying query. @@ -2811,7 +2810,7 @@ def one(self) -> _T: def scalar(self) -> Any: """Return the first element of the first result or None if no rows present. If multiple rows are returned, - raises MultipleResultsFound. + raises :class:`_exc.MultipleResultsFound`. >>> session.query(Item).scalar() diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index a0e9f17e4fa..df5a6534dce 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -1086,8 +1086,7 @@ def get_one( Proxied for the :class:`_orm.Session` class on behalf of the :class:`_orm.scoping.scoped_session` class. - Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query - selects no rows. + Raises :class:`_exc.NoResultFound` if the query selects no rows. For a detailed documentation of the arguments see the method :meth:`.Session.get`. diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index 6cd7cd63390..f8ad6fa6a4b 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -3718,8 +3718,7 @@ def get_one( """Return exactly one instance based on the given primary key identifier, or raise an exception if not found. - Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query - selects no rows. + Raises :class:`_exc.NoResultFound` if the query selects no rows. For a detailed documentation of the arguments see the method :meth:`.Session.get`. From 5278af0c0fcbcfd05cdb81d79df6e82e8f07088b Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Wed, 9 Apr 2025 03:04:20 -0400 Subject: [PATCH 497/544] Type postgresql.aggregate_order_by() Overloading of `__init__()` is needed, probably for the same reason as it is in `ReturnTypeFromArgs`. Related to #6810. Closes: #12463 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12463 Pull-request-sha: 701d979e20c6ca3e32b79145c20441407007122f Change-Id: I7e1bb4d2c48dfb3461725c7079aaa72c66f1dc03 (cherry picked from commit 09c1d3ccaccd93e0b8affa751c40c250aeedbaa5) --- lib/sqlalchemy/dialects/postgresql/ext.py | 53 +++++++++++++++---- .../dialects/postgresql/pg_stuff.py | 23 ++++++++ 2 files changed, 67 insertions(+), 9 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py index 94466ae0a13..54bacd94471 100644 --- a/lib/sqlalchemy/dialects/postgresql/ext.py +++ b/lib/sqlalchemy/dialects/postgresql/ext.py @@ -8,6 +8,10 @@ from __future__ import annotations from typing import Any +from typing import Iterable +from typing import List +from typing import Optional +from typing import overload from typing import TYPE_CHECKING from typing import TypeVar @@ -23,13 +27,19 @@ from ...sql.sqltypes import TEXT from ...sql.visitors import InternalTraversal -_T = TypeVar("_T", bound=Any) - if TYPE_CHECKING: + from ...sql._typing import _ColumnExpressionArgument + from ...sql.elements import ClauseElement + from ...sql.elements import ColumnElement + from ...sql.operators import OperatorType + from ...sql.selectable import FromClause + from ...sql.visitors import _CloneCallableType from ...sql.visitors import _TraverseInternalsType +_T = TypeVar("_T", bound=Any) -class aggregate_order_by(expression.ColumnElement): + +class aggregate_order_by(expression.ColumnElement[_T]): """Represent a PostgreSQL aggregate order by expression. E.g.:: @@ -75,11 +85,32 @@ class aggregate_order_by(expression.ColumnElement): ("order_by", InternalTraversal.dp_clauseelement), ] - def __init__(self, target, *order_by): - self.target = coercions.expect(roles.ExpressionElementRole, target) + @overload + def __init__( + self, + target: ColumnElement[_T], + *order_by: _ColumnExpressionArgument[Any], + ): ... + + @overload + def __init__( + self, + target: _ColumnExpressionArgument[_T], + *order_by: _ColumnExpressionArgument[Any], + ): ... + + def __init__( + self, + target: _ColumnExpressionArgument[_T], + *order_by: _ColumnExpressionArgument[Any], + ): + self.target: ClauseElement = coercions.expect( + roles.ExpressionElementRole, target + ) self.type = self.target.type _lob = len(order_by) + self.order_by: ClauseElement if _lob == 0: raise TypeError("at least one ORDER BY element is required") elif _lob == 1: @@ -91,18 +122,22 @@ def __init__(self, target, *order_by): *order_by, _literal_as_text_role=roles.ExpressionElementRole ) - def self_group(self, against=None): + def self_group( + self, against: Optional[OperatorType] = None + ) -> ClauseElement: return self - def get_children(self, **kwargs): + def get_children(self, **kwargs: Any) -> Iterable[ClauseElement]: return self.target, self.order_by - def _copy_internals(self, clone=elements._clone, **kw): + def _copy_internals( + self, clone: _CloneCallableType = elements._clone, **kw: Any + ) -> None: self.target = clone(self.target, **kw) self.order_by = clone(self.order_by, **kw) @property - def _from_objects(self): + def _from_objects(self) -> List[FromClause]: return self.target._from_objects + self.order_by._from_objects diff --git a/test/typing/plain_files/dialects/postgresql/pg_stuff.py b/test/typing/plain_files/dialects/postgresql/pg_stuff.py index 3dbb9498787..0f1e588bd95 100644 --- a/test/typing/plain_files/dialects/postgresql/pg_stuff.py +++ b/test/typing/plain_files/dialects/postgresql/pg_stuff.py @@ -10,6 +10,7 @@ from sqlalchemy import select from sqlalchemy import Text from sqlalchemy import UniqueConstraint +from sqlalchemy.dialects.postgresql import aggregate_order_by from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.dialects.postgresql import array from sqlalchemy.dialects.postgresql import DATERANGE @@ -131,3 +132,25 @@ class Test(Base): # EXPECTED_TYPE: Select[Tuple[Sequence[str]]] reveal_type(select(func.array_agg(Test.ident_str))) + +stmt_array_agg_order_by_1 = select( + func.array_agg( + aggregate_order_by( + Column("title", type_=Text), + Column("date", type_=DATERANGE).desc(), + Column("id", type_=Integer), + ), + ) +) + +# EXPECTED_TYPE: Select[Tuple[Sequence[str]]] +reveal_type(stmt_array_agg_order_by_1) + +stmt_array_agg_order_by_2 = select( + func.array_agg( + aggregate_order_by(Test.ident_str, Test.id.desc(), Test.ident), + ) +) + +# EXPECTED_TYPE: Select[Tuple[Sequence[str]]] +reveal_type(stmt_array_agg_order_by_2) From ecbb398850ec65a47d63249cfa35c0dd6549af55 Mon Sep 17 00:00:00 2001 From: Matt John Date: Tue, 15 Apr 2025 20:05:36 +0100 Subject: [PATCH 498/544] chore: Fix typo of psycopg2 in comment (#12526) This is the first example in the documentation of a particular connector, which mgith result in copy+pastes, resulting in an error (cherry picked from commit f2a9ecde29bb9d5daadd0626054ff8b54865c781) --- lib/sqlalchemy/dialects/postgresql/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 0fd030b49bb..f8c1d5c3bb1 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -266,7 +266,7 @@ def use_identity(element, compiler, **kw): from sqlalchemy import event postgresql_engine = create_engine( - "postgresql+pyscopg2://scott:tiger@hostname/dbname", + "postgresql+psycopg2://scott:tiger@hostname/dbname", # disable default reset-on-return scheme pool_reset_on_return=None, ) From 90609d9a28f6329a7d0c861aba7180f29cb0e1d0 Mon Sep 17 00:00:00 2001 From: Ryu Juheon Date: Fri, 18 Apr 2025 04:48:54 +0900 Subject: [PATCH 499/544] chore: add type hint for reconstructor (#12527) * chore: add type hint for reconstructor * chore: fix attr-defined * chore: use defined typevar * chore: ignore type error (cherry picked from commit 299284cec65076fd4c76bf1efaae60b60f4d4f7b) --- lib/sqlalchemy/orm/mapper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index eab2be558f6..5844854f9d0 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -4304,7 +4304,7 @@ def _dispose_registries(registries: Set[_RegistryType], cascade: bool) -> None: reg._new_mappers = False -def reconstructor(fn): +def reconstructor(fn: _Fn) -> _Fn: """Decorate a method as the 'reconstructor' hook. Designates a single method as the "reconstructor", an ``__init__``-like @@ -4330,7 +4330,7 @@ def reconstructor(fn): :meth:`.InstanceEvents.load` """ - fn.__sa_reconstructor__ = True + fn.__sa_reconstructor__ = True # type: ignore[attr-defined] return fn From fceab85d1a9a3615e65bf488b7a408361907df66 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 21 Apr 2025 09:44:40 -0400 Subject: [PATCH 500/544] disable mysql/connector-python, again Just as we got this driver "working", a new regression is introduced in version 9.3.0 which prevents basic binary string persistence [1]. I would say we need to leave this driver off for another few years until something changes with its upstream maintenance. [1] https://bugs.mysql.com/bug.php?id=118025 Change-Id: If876f63ebb9a6f7dfa0b316df044afa469a154f2 (cherry picked from commit 3217acc1131048aa67744e032fe8816407d8dfba) --- lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 10 +++++++++- tox.ini | 5 ++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index 71ac58601c1..faeae16abd5 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -22,11 +22,19 @@ with features such as server side cursors which remain disabled until upstream issues are repaired. +.. warning:: The MySQL Connector/Python driver published by Oracle is subject + to frequent, major regressions of essential functionality such as being able + to correctly persist simple binary strings which indicate it is not well + tested. The SQLAlchemy project is not able to maintain this dialect fully as + regressions in the driver prevent it from being included in continuous + integration. + .. versionchanged:: 2.0.39 The MySQL Connector/Python dialect has been updated to support the latest version of this DBAPI. Previously, MySQL Connector/Python - was not fully supported. + was not fully supported. However, support remains limited due to ongoing + regressions introduced in this driver. Connecting to MariaDB with MySQL Connector/Python -------------------------------------------------- diff --git a/tox.ini b/tox.ini index 576346aec62..76469ebef7f 100644 --- a/tox.ini +++ b/tox.ini @@ -38,7 +38,6 @@ extras= mysql: mysql mysql: pymysql mysql: mariadb_connector - mysql: mysql_connector oracle: oracle oracle: oracle_oracledb @@ -146,8 +145,8 @@ setenv= memusage: WORKERS={env:TOX_WORKERS:-n2} mysql: MYSQL={env:TOX_MYSQL:--db mysql} - mysql: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver asyncmy --dbdriver aiomysql --dbdriver mariadbconnector --dbdriver mysqlconnector} - mysql-nogreenlet: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver mariadbconnector --dbdriver mysqlconnector} + mysql: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver asyncmy --dbdriver aiomysql --dbdriver mariadbconnector} + mysql-nogreenlet: EXTRA_MYSQL_DRIVERS={env:EXTRA_MYSQL_DRIVERS:--dbdriver mysqldb --dbdriver pymysql --dbdriver mariadbconnector} mssql: MSSQL={env:TOX_MSSQL:--db mssql} mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc --dbdriver pymssql} From b10cd8607ed861dc27a789d859caec38f03440da Mon Sep 17 00:00:00 2001 From: Shamil Date: Thu, 17 Apr 2025 11:23:21 -0400 Subject: [PATCH 501/544] refactor: simplify and clean up dialect-specific code **Title:** Removed unused variables and redundant functions across multiple dialects. Improves code readability and reduces maintenance complexity without altering functionality. ### Description This pull request introduces several minor refactorings across different dialect modules: - **MSSQL:** - Simplified the initialization of the `fkeys` dictionary in `_get_foreign_keys` using `util.defaultdict` directly. - **MySQL:** Removed the unused variable in `_get_table_comment`. `rp` - **PostgreSQL (_psycopg_common):** Removed the unused variable `cursor` in `do_ping`. - **PostgreSQL (base):** Removed the unused variable `args` in `_get_column_info`. - **SQLite:** Removed the unused variable `new_filename` in `generate_driver_url`. These changes focus purely on code cleanup and simplification, removing dead code and improving clarity. They do not alter the existing logic or functionality of the dialects. ### Checklist This pull request is: - [ ] A documentation / typographical / small typing error fix - [x] A short code fix - _Note: This is a general cleanup refactor rather than a fix for a specific reported issue._ - [ ] A new feature implementation **Have a nice day!** Closes: #12534 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12534 Pull-request-sha: 2c7ae17b73192ba6bff6bec953b307a88ea31847 Change-Id: I1ec3b48f42aea7e45bc20f81add03051eb30bb98 (cherry picked from commit bb5bfb4beb35450ee8db7a173b9b438e065a90a9) --- lib/sqlalchemy/dialects/mssql/base.py | 9 +++------ lib/sqlalchemy/dialects/mysql/base.py | 1 - lib/sqlalchemy/dialects/postgresql/_psycopg_common.py | 1 - lib/sqlalchemy/dialects/postgresql/base.py | 1 - lib/sqlalchemy/dialects/sqlite/provision.py | 2 -- 5 files changed, 3 insertions(+), 11 deletions(-) diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 916809e7684..f641ff03ea8 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -3991,10 +3991,8 @@ def get_foreign_keys( ) # group rows by constraint ID, to handle multi-column FKs - fkeys = [] - - def fkey_rec(): - return { + fkeys = util.defaultdict( + lambda: { "name": None, "constrained_columns": [], "referred_schema": None, @@ -4002,8 +4000,7 @@ def fkey_rec(): "referred_columns": [], "options": {}, } - - fkeys = util.defaultdict(fkey_rec) + ) for r in connection.execute(s).all(): ( diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 50976310a75..a78c4e0f747 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -3478,7 +3478,6 @@ def _show_create_table( full_name = self.identifier_preparer.format_table(table) st = "SHOW CREATE TABLE %s" % full_name - rp = None try: rp = connection.execution_options( skip_user_error_events=True diff --git a/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py b/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py index d827e054ccf..9b09868bd3a 100644 --- a/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py +++ b/lib/sqlalchemy/dialects/postgresql/_psycopg_common.py @@ -171,7 +171,6 @@ def _do_autocommit(self, connection, value): connection.autocommit = value def do_ping(self, dbapi_connection): - cursor = None before_autocommit = dbapi_connection.autocommit if not before_autocommit: diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index f8c1d5c3bb1..9d4257cf0e4 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -3935,7 +3935,6 @@ def _reflect_type( schema_type = ENUM enum = enums[enum_or_domain_key] - args = tuple(enum["labels"]) kwargs["name"] = enum["name"] if not enum["visible"]: diff --git a/lib/sqlalchemy/dialects/sqlite/provision.py b/lib/sqlalchemy/dialects/sqlite/provision.py index 97f882e7f28..e1df005e72c 100644 --- a/lib/sqlalchemy/dialects/sqlite/provision.py +++ b/lib/sqlalchemy/dialects/sqlite/provision.py @@ -52,8 +52,6 @@ def _format_url(url, driver, ident): assert "test_schema" not in filename tokens = re.split(r"[_\.]", filename) - new_filename = f"{driver}" - for token in tokens: if token in _drivernames: if driver is None: From 3ca84922d77f1be601c0989d67ac3995a9de2b56 Mon Sep 17 00:00:00 2001 From: Shamil Date: Mon, 21 Apr 2025 12:36:21 -0400 Subject: [PATCH 502/544] refactor (sql): simplify and optimize internal SQL handling Replaced redundant variable assignments with direct operations. Used `dict.get()` for safer dictionary lookups to streamline logic. Improves code readability and reduces unnecessary lines. Closes: #12538 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12538 Pull-request-sha: d322d1508cfc37668099e6624816aba9c647ad51 Change-Id: Ib3dfc7086ec35117fdad65e136a17aa014b96ae5 (cherry picked from commit 93b0be7009b4f6efd091fda31229353f929f4cc9) --- lib/sqlalchemy/sql/cache_key.py | 2 +- lib/sqlalchemy/sql/compiler.py | 2 +- lib/sqlalchemy/sql/crud.py | 2 +- lib/sqlalchemy/sql/lambdas.py | 7 ++----- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/lib/sqlalchemy/sql/cache_key.py b/lib/sqlalchemy/sql/cache_key.py index 1f562f2e67b..cec0450aa61 100644 --- a/lib/sqlalchemy/sql/cache_key.py +++ b/lib/sqlalchemy/sql/cache_key.py @@ -516,7 +516,7 @@ def _whats_different(self, other: CacheKey) -> Iterator[str]: e2, ) else: - pickup_index = stack.pop(-1) + stack.pop(-1) break def _diff(self, other: CacheKey) -> str: diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 098667f92b6..f171256d4a1 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -4292,7 +4292,7 @@ def visit_alias( inner = "(%s)" % (inner,) return inner else: - enclosing_alias = kwargs["enclosing_alias"] = alias + kwargs["enclosing_alias"] = alias if asfrom or ashint: if isinstance(alias.name, elements._truncated_label): diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py index c0c0c86bb9c..4a592ff7b97 100644 --- a/lib/sqlalchemy/sql/crud.py +++ b/lib/sqlalchemy/sql/crud.py @@ -241,7 +241,7 @@ def _get_crud_params( stmt_parameter_tuples = list(spd.items()) spd_str_key = {_column_as_key(key) for key in spd} else: - stmt_parameter_tuples = spd = spd_str_key = None + stmt_parameter_tuples = spd_str_key = None # if we have statement parameters - set defaults in the # compiled params diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index 8d70f800e74..ce755c1f832 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -256,10 +256,7 @@ def _retrieve_tracker_rec(self, fn, apply_propagate_attrs, opts): self.closure_cache_key = cache_key - try: - rec = lambda_cache[tracker_key + cache_key] - except KeyError: - rec = None + rec = lambda_cache.get(tracker_key + cache_key) else: cache_key = _cache_key.NO_CACHE rec = None @@ -1173,7 +1170,7 @@ def _instrument_and_run_function(self, lambda_element): closure_pywrappers.append(bind) else: value = fn.__globals__[name] - new_globals[name] = bind = PyWrapper(fn, name, value) + new_globals[name] = PyWrapper(fn, name, value) # rewrite the original fn. things that look like they will # become bound parameters are wrapped in a PyWrapper. From 939afc1e3ca7caf18214a0788a61e525490eefea Mon Sep 17 00:00:00 2001 From: Shamil Date: Thu, 17 Apr 2025 15:48:19 -0400 Subject: [PATCH 503/544] refactor: clean up unused variables in engine module Removed unused variables to improve code clarity and maintainability. This change simplifies logic in `base.py`, `default.py`, and `result.py`. No functionality was altered. Closes: #12535 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12535 Pull-request-sha: a9d849f3a4f3abe9aff49279c4cc81aa26aeaa9b Change-Id: If78b18dbd33733c631f8b5aad7d55261fbc4817b (cherry picked from commit d1d81f80a3764e3ebc38481fb6fd82cf6295dcf9) (cherry picked from commit ba433ff9f567b0c58f5f78c2d5cf262f9e5e9c43) --- lib/sqlalchemy/engine/base.py | 4 +--- lib/sqlalchemy/engine/default.py | 4 +--- lib/sqlalchemy/engine/result.py | 1 - 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index cbf11acf5ac..e9925eb9300 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -2428,9 +2428,7 @@ def _handle_dbapi_exception_noconnection( break if sqlalchemy_exception and is_disconnect != ctx.is_disconnect: - sqlalchemy_exception.connection_invalidated = is_disconnect = ( - ctx.is_disconnect - ) + sqlalchemy_exception.connection_invalidated = ctx.is_disconnect if newraise: raise newraise.with_traceback(exc_info[2]) from e diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index f0b58b634f9..69c6dc1b623 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -739,8 +739,6 @@ def _do_ping_w_event(self, dbapi_connection: DBAPIConnection) -> bool: raise def do_ping(self, dbapi_connection: DBAPIConnection) -> bool: - cursor = None - cursor = dbapi_connection.cursor() try: cursor.execute(self._dialect_specific_select_one) @@ -1844,7 +1842,7 @@ def _setup_result_proxy(self): if self.is_crud or self.is_text: result = self._setup_dml_or_text_result() - yp = sr = False + yp = False else: yp = exec_opt.get("yield_per", None) sr = self._is_server_side or exec_opt.get("stream_results", False) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index ed099da05d1..5d597fd5f49 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -809,7 +809,6 @@ def _only_one_row( "was required" ) else: - next_row = _NO_ROW # if we checked for second row then that would have # closed us :) self._soft_close(hard=True) From 4bfdafe337aacc7ac03058063458091502f650f1 Mon Sep 17 00:00:00 2001 From: Shamil Date: Mon, 21 Apr 2025 12:35:43 -0400 Subject: [PATCH 504/544] refactor(testing-and-utils): Remove unused code and fix style issues This PR includes several small refactorings and style fixes aimed at improving code cleanliness, primarily within the test suite and tooling. Key changes: * Removed assignments to unused variables in various test files (`test_dialect.py`, `test_reflection.py`, `test_select.py`). * Removed an unused variable in the pytest plugin (`pytestplugin.py`). * Removed an unused variable in the topological sort utility (`topological.py`). * Fixed a minor style issue (removed an extra blank line) in the `cython_imports.py` script. Closes: #12539 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12539 Pull-request-sha: 837c1e6cb17f0ff31444d5161329c318b52e48e7 Change-Id: Ifa37fb956bc3cacd31967f08bdaa4254e16911c2 (cherry picked from commit 64f45d0a6b4ad41cf570a8f0e09b86fba0ebb043) (cherry picked from commit e15cb0779b42829027faea3496fa0d5163d9e2f5) --- lib/sqlalchemy/testing/plugin/pytestplugin.py | 1 - lib/sqlalchemy/testing/suite/test_dialect.py | 4 ++-- lib/sqlalchemy/testing/suite/test_reflection.py | 6 +++--- lib/sqlalchemy/testing/suite/test_select.py | 2 +- lib/sqlalchemy/util/topological.py | 2 +- 5 files changed, 7 insertions(+), 8 deletions(-) diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py index f6d47c631ce..e5b63adf295 100644 --- a/lib/sqlalchemy/testing/plugin/pytestplugin.py +++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py @@ -270,7 +270,6 @@ def setup_test_classes(): for test_class in test_classes: # transfer legacy __backend__ and __sparse_backend__ symbols # to be markers - add_markers = set() if getattr(test_class.cls, "__backend__", False) or getattr( test_class.cls, "__only_on__", False ): diff --git a/lib/sqlalchemy/testing/suite/test_dialect.py b/lib/sqlalchemy/testing/suite/test_dialect.py index ae67cc10adc..ebbb9e435a0 100644 --- a/lib/sqlalchemy/testing/suite/test_dialect.py +++ b/lib/sqlalchemy/testing/suite/test_dialect.py @@ -537,7 +537,7 @@ def test_round_trip_same_named_column( t.c[name].in_(["some name", "some other_name"]) ) - row = connection.execute(stmt).first() + connection.execute(stmt).first() @testing.fixture def multirow_fixture(self, metadata, connection): @@ -621,7 +621,7 @@ def go(stmt, executemany, id_param_name, expect_success): f"current server capabilities does not support " f".*RETURNING when executemany is used", ): - result = connection.execute( + connection.execute( stmt, [ {id_param_name: 1, "data": "d1"}, diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 47e012aba3b..d3d8b37dfa7 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -460,7 +460,7 @@ def test_get_table_options(self, name): is_true(isinstance(res, dict)) else: with expect_raises(NotImplementedError): - res = insp.get_table_options(name) + insp.get_table_options(name) @quote_fixtures @testing.requires.view_column_reflection @@ -2047,7 +2047,7 @@ def test_get_table_options(self, use_schema): is_true(isinstance(res, dict)) else: with expect_raises(NotImplementedError): - res = insp.get_table_options("users", schema=schema) + insp.get_table_options("users", schema=schema) @testing.combinations((True, testing.requires.schemas), False) def test_multi_get_table_options(self, use_schema): @@ -2063,7 +2063,7 @@ def test_multi_get_table_options(self, use_schema): eq_(res, exp) else: with expect_raises(NotImplementedError): - res = insp.get_multi_table_options() + insp.get_multi_table_options() @testing.fixture def get_multi_exp(self, connection): diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index b9e8b11efec..d67d7698767 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -1780,7 +1780,7 @@ def define_tables(cls, metadata): ) def test_autoincrement_with_identity(self, connection): - res = connection.execute(self.tables.tbl.insert(), {"desc": "row"}) + connection.execute(self.tables.tbl.insert(), {"desc": "row"}) res = connection.execute(self.tables.tbl.select()).first() eq_(res, (1, "row")) diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py index 393c855abca..82f22a01957 100644 --- a/lib/sqlalchemy/util/topological.py +++ b/lib/sqlalchemy/util/topological.py @@ -112,7 +112,7 @@ def find_cycles( todo.remove(node) break else: - node = stack.pop() + stack.pop() return output From 244fb956aa53bbd64d01f9ec9aaec8bfa70f157f Mon Sep 17 00:00:00 2001 From: Christoph Heer Date: Thu, 24 Apr 2025 22:00:52 +0200 Subject: [PATCH 505/544] Update entry for sqlalchemy-hana (#12553) (cherry picked from commit 686b3423d2a20325ccae4d5cf998774885f52c9f) --- doc/build/dialects/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 9f18cbba22e..535b13552a4 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -124,7 +124,7 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | SAP ASE (fork of former Sybase dialect) | sqlalchemy-sybase_ | +------------------------------------------------+---------------------------------------+ -| SAP Hana [1]_ | sqlalchemy-hana_ | +| SAP HANA | sqlalchemy-hana_ | +------------------------------------------------+---------------------------------------+ | SAP Sybase SQL Anywhere | sqlalchemy-sqlany_ | +------------------------------------------------+---------------------------------------+ From b74b004ea124c365a5f2ec3828792e3433717f2c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 26 Apr 2025 11:32:30 -0400 Subject: [PATCH 506/544] fix reference cycles/ perf in DialectKWArgs Identified some unnecessary cycles and overhead in how this is implemented. since we want to add this to Select, needs these improvements. Change-Id: I4324db14aaf52ab87a8b7fa49ebf1b6624bc2dcb (cherry picked from commit ce3bbfcc4550e72a603640e533bc736715c5d76b) --- lib/sqlalchemy/sql/base.py | 13 ++++---- lib/sqlalchemy/util/langhelpers.py | 3 ++ test/aaa_profiling/test_memusage.py | 47 +++++++++++++++++++++++++++++ 3 files changed, 57 insertions(+), 6 deletions(-) diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 7ccef84e0d5..8b9883ff1c9 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -363,6 +363,8 @@ class _DialectArgView(MutableMapping[str, Any]): """ + __slots__ = ("obj",) + def __init__(self, obj): self.obj = obj @@ -521,7 +523,7 @@ def argument_for(cls, dialect_name, argument_name, default): construct_arg_dictionary[cls] = {} construct_arg_dictionary[cls][argument_name] = default - @util.memoized_property + @property def dialect_kwargs(self): """A collection of keyword arguments specified as dialect-specific options to this construct. @@ -549,14 +551,15 @@ def kwargs(self): _kw_registry = util.PopulateDict(_kw_reg_for_dialect) - def _kw_reg_for_dialect_cls(self, dialect_name): + @classmethod + def _kw_reg_for_dialect_cls(cls, dialect_name): construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name] d = _DialectArgDict() if construct_arg_dictionary is None: d._defaults.update({"*": None}) else: - for cls in reversed(self.__class__.__mro__): + for cls in reversed(cls.__mro__): if cls in construct_arg_dictionary: d._defaults.update(construct_arg_dictionary[cls]) return d @@ -580,9 +583,7 @@ def dialect_options(self): """ - return util.PopulateDict( - util.portable_instancemethod(self._kw_reg_for_dialect_cls) - ) + return util.PopulateDict(self._kw_reg_for_dialect_cls) def _validate_dialect_kwargs(self, kwargs: Dict[str, Any]) -> None: # validate remaining kwargs that they all specify DB prefixes diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index a5ec5190f14..ae640c5ec28 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -385,6 +385,9 @@ def load(): self.impls[name] = load + def deregister(self, name: str) -> None: + del self.impls[name] + def _inspect_func_args(fn): try: diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py index fc6be0f0960..69952ac1890 100644 --- a/test/aaa_profiling/test_memusage.py +++ b/test/aaa_profiling/test_memusage.py @@ -7,6 +7,7 @@ import sqlalchemy as sa from sqlalchemy import and_ +from sqlalchemy import ClauseElement from sqlalchemy import ForeignKey from sqlalchemy import func from sqlalchemy import inspect @@ -20,8 +21,10 @@ from sqlalchemy import util from sqlalchemy.dialects import mysql from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects import registry from sqlalchemy.dialects import sqlite from sqlalchemy.engine import result +from sqlalchemy.engine.default import DefaultDialect from sqlalchemy.engine.processors import to_decimal_processor_factory from sqlalchemy.orm import aliased from sqlalchemy.orm import attributes @@ -39,6 +42,7 @@ from sqlalchemy.orm.session import _sessions from sqlalchemy.sql import column from sqlalchemy.sql import util as sql_util +from sqlalchemy.sql.base import DialectKWArgs from sqlalchemy.sql.util import visit_binary_product from sqlalchemy.sql.visitors import cloned_traverse from sqlalchemy.sql.visitors import replacement_traverse @@ -1192,6 +1196,22 @@ def go(): metadata.drop_all(self.engine) +class SomeFoo(DialectKWArgs, ClauseElement): + pass + + +class FooDialect(DefaultDialect): + construct_arguments = [ + ( + SomeFoo, + { + "bar": False, + "bat": False, + }, + ) + ] + + @testing.add_to_marker.memory_intensive class CycleTest(_fixtures.FixtureTest): __requires__ = ("cpython", "no_windows") @@ -1216,6 +1236,33 @@ def go(): go() + @testing.fixture + def foo_dialect(self): + registry.register("foo", __name__, "FooDialect") + + yield + registry.deregister("foo") + + def test_dialect_kwargs(self, foo_dialect): + + @assert_cycles() + def go(): + ff = SomeFoo() + + ff._validate_dialect_kwargs({"foo_bar": True}) + + eq_(ff.dialect_options["foo"]["bar"], True) + + eq_(ff.dialect_options["foo"]["bat"], False) + + eq_(ff.dialect_kwargs["foo_bar"], True) + eq_(ff.dialect_kwargs["foo_bat"], False) + + ff.dialect_kwargs["foo_bat"] = True + eq_(ff.dialect_options["foo"]["bat"], True) + + go() + def test_session_execute_orm(self): User, Address = self.classes("User", "Address") configure_mappers() From 6f850bd4e58e401d75c1b622a0f088c4eb9deb60 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Thu, 24 Apr 2025 18:02:32 -0400 Subject: [PATCH 507/544] refactor (orm): remove unused variables and simplify key lookups Redundant variables and unnecessary conditions were removed across several modules. Improved readability and reduced code complexity without changing functionality. Closes: #12537 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12537 Pull-request-sha: ab53f8c3487e8cfb4d4a0235c27d8a5b8557d193 Change-Id: I910d65729fdbc96933f9822c553924d37e89e201 (cherry picked from commit 29895487915b8858deb2f8ac4a88d92917641c55) --- lib/sqlalchemy/orm/clsregistry.py | 4 ++-- lib/sqlalchemy/orm/context.py | 4 +--- lib/sqlalchemy/orm/decl_base.py | 2 -- lib/sqlalchemy/orm/dependency.py | 2 +- lib/sqlalchemy/orm/properties.py | 2 -- lib/sqlalchemy/orm/relationships.py | 5 ----- lib/sqlalchemy/orm/session.py | 9 +-------- lib/sqlalchemy/orm/strategies.py | 3 --- lib/sqlalchemy/orm/strategy_options.py | 1 - 9 files changed, 5 insertions(+), 27 deletions(-) diff --git a/lib/sqlalchemy/orm/clsregistry.py b/lib/sqlalchemy/orm/clsregistry.py index 70307ec7679..fd4828e8559 100644 --- a/lib/sqlalchemy/orm/clsregistry.py +++ b/lib/sqlalchemy/orm/clsregistry.py @@ -72,7 +72,7 @@ def add_class( # class already exists. existing = decl_class_registry[classname] if not isinstance(existing, _MultipleClassMarker): - existing = decl_class_registry[classname] = _MultipleClassMarker( + decl_class_registry[classname] = _MultipleClassMarker( [cls, cast("Type[Any]", existing)] ) else: @@ -317,7 +317,7 @@ def add_class(self, name: str, cls: Type[Any]) -> None: else: raise else: - existing = self.contents[name] = _MultipleClassMarker( + self.contents[name] = _MultipleClassMarker( [cls], on_remove=lambda: self._remove_item(name) ) diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index b04d6d48c28..d5ed61de53f 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -231,7 +231,7 @@ def _init_global_attributes( if compiler is None: # this is the legacy / testing only ORM _compile_state() use case. # there is no need to apply criteria options for this. - self.global_attributes = ga = {} + self.global_attributes = {} assert toplevel return else: @@ -1864,8 +1864,6 @@ def _join(self, args, entities_collection): "selectable/table as join target" ) - of_type = None - if isinstance(onclause, interfaces.PropComparator): # descriptor/property given (or determined); this tells us # explicitly what the expected "left" side of the join is. diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index f17717b53cc..1176b504186 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -1296,8 +1296,6 @@ def _collect_annotation( or isinstance(attr_value, _MappedAttribute) ) ) - else: - is_dataclass_field = False is_dataclass_field = False extracted = _extract_mapped_subtype( diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py index 5953062459e..b055240a353 100644 --- a/lib/sqlalchemy/orm/dependency.py +++ b/lib/sqlalchemy/orm/dependency.py @@ -1054,7 +1054,7 @@ def presort_saves(self, uowcommit, states): # so that prop_has_changes() returns True for state in states: if self._pks_changed(uowcommit, state): - history = uowcommit.get_attribute_history( + uowcommit.get_attribute_history( state, self.key, attributes.PASSIVE_OFF ) diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 218285cab88..75ad5b1ca0e 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -847,8 +847,6 @@ def _init_column_for_annotation( ) if sqltype._isnull and not self.column.foreign_keys: - new_sqltype = None - checks: List[Any] if our_type_is_pep593: checks = [our_type, raw_pep_593_type] diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 0d0bc708941..eae00338f10 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -1758,8 +1758,6 @@ def declarative_scan( extracted_mapped_annotation: Optional[_AnnotationScanType], is_dataclass_field: bool, ) -> None: - argument = extracted_mapped_annotation - if extracted_mapped_annotation is None: if self.argument is None: self._raise_for_required(key, cls) @@ -2912,9 +2910,6 @@ def _check_foreign_cols( ) -> None: """Check the foreign key columns collected and emit error messages.""" - - can_sync = False - foreign_cols = self._gather_columns_with_annotation( join_condition, "foreign" ) diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index f8ad6fa6a4b..ca7b2c2b59f 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -4014,14 +4014,7 @@ def _merge( else: key_is_persistent = True - if key in self.identity_map: - try: - merged = self.identity_map[key] - except KeyError: - # object was GC'ed right as we checked for it - merged = None - else: - merged = None + merged = self.identity_map.get(key) if merged is None: if key_is_persistent and key in _resolve_conflict_map: diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index f2d165145a1..d9eaa2b388e 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -1435,7 +1435,6 @@ def _load_for_path( alternate_effective_path = path._truncate_recursive() extra_options = (new_opt,) else: - new_opt = None alternate_effective_path = path extra_options = () @@ -2165,8 +2164,6 @@ def setup_query( path = path[self.parent_property] - with_polymorphic = None - user_defined_adapter = ( self._init_user_defined_eager_proc( loadopt, compile_state, compile_state.attributes diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index f4f292ee7ec..f2e6948a7ba 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1103,7 +1103,6 @@ def _reconcile_query_entities_with_us(self, mapper_entities, raiseerr): """ path = self.path - ezero = None for ent in mapper_entities: ezero = ent.entity_zero if ezero and orm_util._entity_corresponds_to( From abc40e50cd9b45f8571343dedbfe83f6d209c9db Mon Sep 17 00:00:00 2001 From: Ross Patterson Date: Tue, 29 Apr 2025 13:14:09 -0700 Subject: [PATCH 508/544] Fix simple typo (#12555) (cherry picked from commit 35c7fa9e9e591b120b5d20cf4125f46a3f23a251) --- doc/build/core/custom_types.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/core/custom_types.rst b/doc/build/core/custom_types.rst index 5390824dda8..4b27f2f18a2 100644 --- a/doc/build/core/custom_types.rst +++ b/doc/build/core/custom_types.rst @@ -15,7 +15,7 @@ A frequent need is to force the "string" version of a type, that is the one rendered in a CREATE TABLE statement or other SQL function like CAST, to be changed. For example, an application may want to force the rendering of ``BINARY`` for all platforms -except for one, in which is wants ``BLOB`` to be rendered. Usage +except for one, in which it wants ``BLOB`` to be rendered. Usage of an existing generic type, in this case :class:`.LargeBinary`, is preferred for most use cases. But to control types more accurately, a compilation directive that is per-dialect From 6b222d772400500ca7efbb02350bb6d8608f6bf1 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 28 Apr 2025 23:44:50 +0200 Subject: [PATCH 509/544] add correct typing for row getitem The overloads were broken in 8a4c27589500bc57605bb8f28c215f5f0ae5066d Change-Id: I3736b15e95ead28537e25169a54521e991f763da (cherry picked from commit 4ac02007e030232f57226aafbb9313c8ff186a62) --- lib/sqlalchemy/engine/result.py | 8 ++ lib/sqlalchemy/testing/fixtures/mypy.py | 30 +++++- .../plain_files/engine/engine_result.py | 94 +++++++++++++++++++ 3 files changed, 127 insertions(+), 5 deletions(-) create mode 100644 test/typing/plain_files/engine/engine_result.py diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 5d597fd5f49..b84fb3d1cb5 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -722,6 +722,14 @@ def manyrows( return manyrows + @overload + def _only_one_row( + self: ResultInternal[Row[Any]], + raise_for_second_row: bool, + raise_for_none: bool, + scalar: Literal[True], + ) -> Any: ... + @overload def _only_one_row( self, diff --git a/lib/sqlalchemy/testing/fixtures/mypy.py b/lib/sqlalchemy/testing/fixtures/mypy.py index 0832d89246f..849df4dc30a 100644 --- a/lib/sqlalchemy/testing/fixtures/mypy.py +++ b/lib/sqlalchemy/testing/fixtures/mypy.py @@ -143,7 +143,9 @@ def _collect_messages(self, path): from sqlalchemy.ext.mypy.util import mypy_14 expected_messages = [] - expected_re = re.compile(r"\s*# EXPECTED(_MYPY)?(_RE)?(_TYPE)?: (.+)") + expected_re = re.compile( + r"\s*# EXPECTED(_MYPY)?(_RE)?(_ROW)?(_TYPE)?: (.+)" + ) py_ver_re = re.compile(r"^#\s*PYTHON_VERSION\s?>=\s?(\d+\.\d+)") with open(path) as file_: current_assert_messages = [] @@ -161,9 +163,24 @@ def _collect_messages(self, path): if m: is_mypy = bool(m.group(1)) is_re = bool(m.group(2)) - is_type = bool(m.group(3)) + is_row = bool(m.group(3)) + is_type = bool(m.group(4)) + + expected_msg = re.sub(r"# noqa[:]? ?.*", "", m.group(5)) + if is_row: + expected_msg = re.sub( + r"Row\[([^\]]+)\]", + lambda m: f"tuple[{m.group(1)}, fallback=s" + f"qlalchemy.engine.row.{m.group(0)}]", + expected_msg, + ) + # For some reason it does not use or syntax (|) + expected_msg = re.sub( + r"Optional\[(.*)\]", + lambda m: f"Union[{m.group(1)}, None]", + expected_msg, + ) - expected_msg = re.sub(r"# noqa[:]? ?.*", "", m.group(4)) if is_type: if not is_re: # the goal here is that we can cut-and-paste @@ -243,7 +260,9 @@ def _collect_messages(self, path): return expected_messages - def _check_output(self, path, expected_messages, stdout, stderr, exitcode): + def _check_output( + self, path, expected_messages, stdout: str, stderr, exitcode + ): not_located = [] filename = os.path.basename(path) if expected_messages: @@ -263,7 +282,8 @@ def _check_output(self, path, expected_messages, stdout, stderr, exitcode): ): while raw_lines: ol = raw_lines.pop(0) - if not re.match(r".+\.py:\d+: note: +def \[.*", ol): + if not re.match(r".+\.py:\d+: note: +def .*", ol): + raw_lines.insert(0, ol) break elif re.match( r".+\.py:\d+: note: .*(?:perhaps|suggestion)", e, re.I diff --git a/test/typing/plain_files/engine/engine_result.py b/test/typing/plain_files/engine/engine_result.py new file mode 100644 index 00000000000..7ff20b7846d --- /dev/null +++ b/test/typing/plain_files/engine/engine_result.py @@ -0,0 +1,94 @@ +from typing import reveal_type +from typing import Tuple + +from sqlalchemy import column +from sqlalchemy.engine import Result +from sqlalchemy.engine import Row + + +def row_one(row: Row[Tuple[int, str, bool]]) -> None: + # EXPECTED_TYPE: Any + reveal_type(row[0]) + # EXPECTED_TYPE: Any + reveal_type(row[1]) + # EXPECTED_TYPE: Any + reveal_type(row[2]) + + # EXPECTED_MYPY: No overload variant of "__getitem__" of "Row" matches argument type "str" # noqa: E501 + row["a"] + + # EXPECTED_TYPE: RowMapping + reveal_type(row._mapping) + rm = row._mapping + # EXPECTED_TYPE: Any + reveal_type(rm["foo"]) + # EXPECTED_TYPE: Any + reveal_type(rm[column("bar")]) + + # EXPECTED_MYPY: Invalid index type "int" for "RowMapping"; expected type "str | SQLCoreOperations[Any]" # noqa: E501 + rm[3] + + +def result_one( + res: Result[Tuple[int, str]], r_single: Result[Tuple[float]] +) -> None: + # EXPECTED_TYPE: Row[Tuple[int, str]] + reveal_type(res.one()) + # EXPECTED_TYPE: Union[Row[Tuple[int, str]], None] + reveal_type(res.one_or_none()) + # EXPECTED_TYPE: Union[Row[Tuple[int, str]], None] + reveal_type(res.fetchone()) + # EXPECTED_TYPE: Union[Row[Tuple[int, str]], None] + reveal_type(res.first()) + # EXPECTED_TYPE: Sequence[Row[Tuple[int, str]]] + reveal_type(res.all()) + # EXPECTED_TYPE: Sequence[Row[Tuple[int, str]]] + reveal_type(res.fetchmany()) + # EXPECTED_TYPE: Sequence[Row[Tuple[int, str]]] + reveal_type(res.fetchall()) + # EXPECTED_TYPE: Row[Tuple[int, str]] + reveal_type(next(res)) + for rf in res: + # EXPECTED_TYPE: Row[Tuple[int, str]] + reveal_type(rf) + for rp in res.partitions(): + # EXPECTED_TYPE: Sequence[Row[Tuple[int, str]]] + reveal_type(rp) + + # EXPECTED_TYPE: ScalarResult[Any] + res_s = reveal_type(res.scalars()) + # EXPECTED_TYPE: ScalarResult[Any] + res_s = reveal_type(res.scalars(0)) + # EXPECTED_TYPE: Any + reveal_type(res_s.one()) + # EXPECTED_TYPE: ScalarResult[Any] + reveal_type(res.scalars(1)) + # EXPECTED_TYPE: MappingResult + reveal_type(res.mappings()) + # EXPECTED_TYPE: FrozenResult[Tuple[int, str]] + reveal_type(res.freeze()) + + # EXPECTED_TYPE: Any + reveal_type(res.scalar_one()) + # EXPECTED_TYPE: Union[Any, None] + reveal_type(res.scalar_one_or_none()) + # EXPECTED_TYPE: Any + reveal_type(res.scalar()) + + # EXPECTED_TYPE: ScalarResult[float] + res_s2 = reveal_type(r_single.scalars()) + # EXPECTED_TYPE: ScalarResult[float] + res_s2 = reveal_type(r_single.scalars(0)) + # EXPECTED_TYPE: float + reveal_type(res_s2.one()) + # EXPECTED_TYPE: ScalarResult[Any] + reveal_type(r_single.scalars(1)) + # EXPECTED_TYPE: MappingResult + reveal_type(r_single.mappings()) + + # EXPECTED_TYPE: float + reveal_type(r_single.scalar_one()) + # EXPECTED_TYPE: Union[float, None] + reveal_type(r_single.scalar_one_or_none()) + # EXPECTED_TYPE: Union[float, None] + reveal_type(r_single.scalar()) From 09b70d72f555ba7f9dc731e60bf104d451bbd257 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 1 May 2025 09:43:29 -0400 Subject: [PATCH 510/544] fix sqlite localtimestamp function Fixed and added test support for a few SQLite SQL functions hardcoded into the compiler most notably the "localtimestamp" function which rendered with incorrect internal quoting. Fixes: #12566 Change-Id: Id5bd8dc7841f0afab7df031ba5c0854dab845a1d (cherry picked from commit d689e465edf11308b0efba018aa84c3d79ccbaab) --- doc/build/changelog/unreleased_20/12566.rst | 7 +++++++ lib/sqlalchemy/dialects/sqlite/base.py | 2 +- test/dialect/test_sqlite.py | 12 +++++++++++- 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12566.rst diff --git a/doc/build/changelog/unreleased_20/12566.rst b/doc/build/changelog/unreleased_20/12566.rst new file mode 100644 index 00000000000..194936f9675 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12566.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, sqlite + :tickets: 12566 + + Fixed and added test support for a few SQLite SQL functions hardcoded into + the compiler most notably the "localtimestamp" function which rendered with + incorrect internal quoting. diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index bf632f1fa4f..719c0860b4f 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1367,7 +1367,7 @@ def visit_now_func(self, fn, **kw): return "CURRENT_TIMESTAMP" def visit_localtimestamp_func(self, func, **kw): - return 'DATETIME(CURRENT_TIMESTAMP, "localtime")' + return "DATETIME(CURRENT_TIMESTAMP, 'localtime')" def visit_true(self, expr, **kw): return "1" diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index c2c63e9ef06..0d8c671402d 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -785,6 +785,16 @@ def test_column_computed(self, text, persisted): " y INTEGER GENERATED ALWAYS AS (x + 2)%s)" % text, ) + @testing.combinations( + (func.localtimestamp(),), + (func.now(),), + (func.char_length("test"),), + (func.aggregate_strings("abc", ","),), + argnames="fn", + ) + def test_builtin_functions_roundtrip(self, fn, connection): + connection.execute(select(fn)) + class AttachedDBTest(fixtures.TablesTest): __only_on__ = "sqlite" @@ -969,7 +979,7 @@ def test_is_distinct_from(self): def test_localtime(self): self.assert_compile( - func.localtimestamp(), 'DATETIME(CURRENT_TIMESTAMP, "localtime")' + func.localtimestamp(), "DATETIME(CURRENT_TIMESTAMP, 'localtime')" ) def test_constraints_with_schemas(self): From 4a492417bbba6b8fade155c968377a181b67bbf1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 1 May 2025 09:49:33 -0400 Subject: [PATCH 511/544] add black dependency for format_docs_code this doesnt run if black is not installed, so use a python env for it Change-Id: I567d454917e7e8e4be2b7a21ffc511900f16457c (cherry picked from commit 667a5d397ff50b24d4d4cf7e600d51fe84188949) --- .pre-commit-config.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1d58505b79f..35e10ee29d2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,6 +33,8 @@ repos: - id: black-docs name: Format docs code block with black entry: python tools/format_docs_code.py -f - language: system + language: python types: [rst] exclude: README.* + additional_dependencies: + - black==24.10.0 From 1ca4fe4159084c8ceda360f418c73118c4d15002 Mon Sep 17 00:00:00 2001 From: suraj Date: Mon, 5 May 2025 11:14:35 -0400 Subject: [PATCH 512/544] Added vector datatype support in Oracle dialect Added new datatype :class:`_oracle.VECTOR` and accompanying DDL and DQL support to fully support this type for Oracle Database. This change includes the base :class:`_oracle.VECTOR` type that adds new type-specific methods ``l2_distance``, ``cosine_distance``, ``inner_product`` as well as new parameters ``oracle_vector`` for the :class:`.Index` construct, allowing vector indexes to be configured, and ``oracle_fetch_approximate`` for the :meth:`.Select.fetch` clause. Pull request courtesy Suraj Shaw. Fixes: #12317 Closes: #12321 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12321 Pull-request-sha: a72a18a45c85ae7fa50a34e97ac642e16b463b54 Change-Id: I6f3af4623ce439d0820c14582cd129df293f0ba8 (cherry picked from commit 1b780ce3d3f7e33e5cc9e49eafa316a514cdc324) --- doc/build/changelog/unreleased_20/12317.rst | 16 ++ doc/build/dialects/oracle.rst | 18 ++ lib/sqlalchemy/dialects/oracle/__init__.py | 10 + lib/sqlalchemy/dialects/oracle/base.py | 265 ++++++++++++++++++- lib/sqlalchemy/dialects/oracle/vector.py | 266 ++++++++++++++++++++ lib/sqlalchemy/sql/selectable.py | 20 +- test/dialect/oracle/test_compiler.py | 11 + test/dialect/oracle/test_reflection.py | 60 +++++ test/dialect/oracle/test_types.py | 195 ++++++++++++++ test/sql/test_compare.py | 9 +- 10 files changed, 863 insertions(+), 7 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12317.rst create mode 100644 lib/sqlalchemy/dialects/oracle/vector.py diff --git a/doc/build/changelog/unreleased_20/12317.rst b/doc/build/changelog/unreleased_20/12317.rst new file mode 100644 index 00000000000..13f69693e60 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12317.rst @@ -0,0 +1,16 @@ +.. change:: + :tags: usecase, oracle + :tickets: 12317, 12341 + + Added new datatype :class:`_oracle.VECTOR` and accompanying DDL and DQL + support to fully support this type for Oracle Database. This change + includes the base :class:`_oracle.VECTOR` type that adds new type-specific + methods ``l2_distance``, ``cosine_distance``, ``inner_product`` as well as + new parameters ``oracle_vector`` for the :class:`.Index` construct, + allowing vector indexes to be configured, and ``oracle_fetch_approximate`` + for the :meth:`.Select.fetch` clause. Pull request courtesy Suraj Shaw. + + .. seealso:: + + :ref:`oracle_vector_datatype` + diff --git a/doc/build/dialects/oracle.rst b/doc/build/dialects/oracle.rst index b3d44858ced..882f9266047 100644 --- a/doc/build/dialects/oracle.rst +++ b/doc/build/dialects/oracle.rst @@ -31,6 +31,7 @@ originate from :mod:`sqlalchemy.types` or from the local dialect:: TIMESTAMP, VARCHAR, VARCHAR2, + VECTOR, ) .. versionadded:: 1.2.19 Added :class:`_types.NCHAR` to the list of datatypes @@ -80,6 +81,23 @@ construction arguments, are as follows: .. autoclass:: TIMESTAMP :members: __init__ +.. autoclass:: VECTOR + :members: __init__ + +.. autoclass:: VectorIndexType + :members: + +.. autoclass:: VectorIndexConfig + :members: + :undoc-members: + +.. autoclass:: VectorStorageFormat + :members: + +.. autoclass:: VectorDistanceType + :members: + + .. _oracledb: python-oracledb diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py index 7ceb743d616..2265de033c9 100644 --- a/lib/sqlalchemy/dialects/oracle/__init__.py +++ b/lib/sqlalchemy/dialects/oracle/__init__.py @@ -32,6 +32,11 @@ from .base import TIMESTAMP from .base import VARCHAR from .base import VARCHAR2 +from .base import VECTOR +from .base import VectorIndexConfig +from .base import VectorIndexType +from .vector import VectorDistanceType +from .vector import VectorStorageFormat # Alias oracledb also as oracledb_async oracledb_async = type( @@ -64,4 +69,9 @@ "NVARCHAR2", "ROWID", "REAL", + "VECTOR", + "VectorDistanceType", + "VectorIndexType", + "VectorIndexConfig", + "VectorStorageFormat", ) diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 02aa4d53663..1d882def8d6 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -722,11 +722,177 @@ number of prefix columns to compress, or ``True`` to use the default (all columns for non-unique indexes, all but the last column for unique indexes). +.. _oracle_vector_datatype: + +VECTOR Datatype +--------------- + +Oracle Database 23ai introduced a new VECTOR datatype for artificial intelligence +and machine learning search operations. The VECTOR datatype is a homogeneous array +of 8-bit signed integers, 8-bit unsigned integers (binary), 32-bit floating-point numbers, +or 64-bit floating-point numbers. + +.. seealso:: + + `Using VECTOR Data + `_ - in the documentation + for the :ref:`oracledb` driver. + +.. versionadded:: 2.0.41 + +CREATE TABLE support for VECTOR +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +With the :class:`.VECTOR` datatype, you can specify the dimension for the data +and the storage format. Valid values for storage format are enum values from +:class:`.VectorStorageFormat`. To create a table that includes a +:class:`.VECTOR` column:: + + from sqlalchemy.dialects.oracle import VECTOR, VectorStorageFormat + + t = Table( + "t1", + metadata, + Column("id", Integer, primary_key=True), + Column( + "embedding", + VECTOR(dim=3, storage_format=VectorStorageFormat.FLOAT32), + ), + Column(...), + ..., + ) + +Vectors can also be defined with an arbitrary number of dimensions and formats. +This allows you to specify vectors of different dimensions with the various +storage formats mentioned above. + +**Examples** + +* In this case, the storage format is flexible, allowing any vector type data to be inserted, + such as INT8 or BINARY etc:: + + vector_col: Mapped[array.array] = mapped_column(VECTOR(dim=3)) + +* The dimension is flexible in this case, meaning that any dimension vector can be used:: + + vector_col: Mapped[array.array] = mapped_column( + VECTOR(storage_format=VectorStorageType.INT8) + ) + +* Both the dimensions and the storage format are flexible:: + + vector_col: Mapped[array.array] = mapped_column(VECTOR) + +Python Datatypes for VECTOR +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +VECTOR data can be inserted using Python list or Python ``array.array()`` objects. +Python arrays of type FLOAT (32-bit), DOUBLE (64-bit), or INT (8-bit signed integer) +are used as bind values when inserting VECTOR columns:: + + from sqlalchemy import insert, select + + with engine.begin() as conn: + conn.execute( + insert(t1), + {"id": 1, "embedding": [1, 2, 3]}, + ) + +VECTOR Indexes +~~~~~~~~~~~~~~ + +The VECTOR feature supports an Oracle-specific parameter ``oracle_vector`` +on the :class:`.Index` construct, which allows the construction of VECTOR +indexes. + +To utilize VECTOR indexing, set the ``oracle_vector`` parameter to True to use +the default values provided by Oracle. HNSW is the default indexing method:: + + from sqlalchemy import Index + + Index( + "vector_index", + t1.c.embedding, + oracle_vector=True, + ) + +The full range of parameters for vector indexes are available by using the +:class:`.VectorIndexConfig` dataclass in place of a boolean; this dataclass +allows full configuration of the index:: + + Index( + "hnsw_vector_index", + t1.c.embedding, + oracle_vector=VectorIndexConfig( + index_type=VectorIndexType.HNSW, + distance=VectorDistanceType.COSINE, + accuracy=90, + hnsw_neighbors=5, + hnsw_efconstruction=20, + parallel=10, + ), + ) + + Index( + "ivf_vector_index", + t1.c.embedding, + oracle_vector=VectorIndexConfig( + index_type=VectorIndexType.IVF, + distance=VectorDistanceType.DOT, + accuracy=90, + ivf_neighbor_partitions=5, + ), + ) + +For complete explanation of these parameters, see the Oracle documentation linked +below. + +.. seealso:: + + `CREATE VECTOR INDEX `_ - in the Oracle documentation + + + +Similarity Searching +~~~~~~~~~~~~~~~~~~~~ + +When using the :class:`_oracle.VECTOR` datatype with a :class:`.Column` or similar +ORM mapped construct, additional comparison functions are available, including: + +* ``l2_distance`` +* ``cosine_distance`` +* ``inner_product`` + +Example Usage:: + + result_vector = connection.scalars( + select(t1).order_by(t1.embedding.l2_distance([2, 3, 4])).limit(3) + ) + + for user in vector: + print(user.id, user.embedding) + +FETCH APPROXIMATE support +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Approximate vector search can only be performed when all syntax and semantic +rules are satisfied, the corresponding vector index is available, and the +query optimizer determines to perform it. If any of these conditions are +unmet, then an approximate search is not performed. In this case the query +returns exact results. + +To enable approximate searching during similarity searches on VECTORS, the +``oracle_fetch_approximate`` parameter may be used with the :meth:`.Select.fetch` +clause to add ``FETCH APPROX`` to the SELECT statement:: + + select(users_table).fetch(5, oracle_fetch_approximate=True) + """ # noqa from __future__ import annotations from collections import defaultdict +from dataclasses import fields from functools import lru_cache from functools import wraps import re @@ -749,6 +915,9 @@ from .types import ROWID # noqa from .types import TIMESTAMP from .types import VARCHAR2 # noqa +from .vector import VECTOR +from .vector import VectorIndexConfig +from .vector import VectorIndexType from ... import Computed from ... import exc from ... import schema as sa_schema @@ -767,6 +936,7 @@ from ...sql import null from ...sql import or_ from ...sql import select +from ...sql import selectable as sa_selectable from ...sql import sqltypes from ...sql import util as sql_util from ...sql import visitors @@ -828,6 +998,7 @@ "BINARY_DOUBLE": BINARY_DOUBLE, "BINARY_FLOAT": BINARY_FLOAT, "ROWID": ROWID, + "VECTOR": VECTOR, } @@ -985,6 +1156,16 @@ def visit_RAW(self, type_, **kw): def visit_ROWID(self, type_, **kw): return "ROWID" + def visit_VECTOR(self, type_, **kw): + if type_.dim is None and type_.storage_format is None: + return "VECTOR(*,*)" + elif type_.storage_format is None: + return f"VECTOR({type_.dim},*)" + elif type_.dim is None: + return f"VECTOR(*,{type_.storage_format.value})" + else: + return f"VECTOR({type_.dim},{type_.storage_format.value})" + class OracleCompiler(compiler.SQLCompiler): """Oracle compiler modifies the lexical structure of Select @@ -1223,6 +1404,29 @@ def _get_limit_or_fetch(self, select): else: return select._fetch_clause + def fetch_clause( + self, + select, + fetch_clause=None, + require_offset=False, + use_literal_execute_for_simple_int=False, + **kw, + ): + text = super().fetch_clause( + select, + fetch_clause=fetch_clause, + require_offset=require_offset, + use_literal_execute_for_simple_int=( + use_literal_execute_for_simple_int + ), + **kw, + ) + + if select.dialect_options["oracle"]["fetch_approximate"]: + text = re.sub("FETCH FIRST", "FETCH APPROX FIRST", text) + + return text + def translate_select_structure(self, select_stmt, **kwargs): select = select_stmt @@ -1471,6 +1675,48 @@ def visit_bitwise_not_op_unary_operator(self, element, operator, **kw): class OracleDDLCompiler(compiler.DDLCompiler): + + def _build_vector_index_config( + self, vector_index_config: VectorIndexConfig + ) -> str: + parts = [] + sql_param_name = { + "hnsw_neighbors": "neighbors", + "hnsw_efconstruction": "efconstruction", + "ivf_neighbor_partitions": "neighbor partitions", + "ivf_sample_per_partition": "sample_per_partition", + "ivf_min_vectors_per_partition": "min_vectors_per_partition", + } + if vector_index_config.index_type == VectorIndexType.HNSW: + parts.append("ORGANIZATION INMEMORY NEIGHBOR GRAPH") + elif vector_index_config.index_type == VectorIndexType.IVF: + parts.append("ORGANIZATION NEIGHBOR PARTITIONS") + if vector_index_config.distance is not None: + parts.append(f"DISTANCE {vector_index_config.distance.value}") + + if vector_index_config.accuracy is not None: + parts.append( + f"WITH TARGET ACCURACY {vector_index_config.accuracy}" + ) + + parameters_str = [f"type {vector_index_config.index_type.name}"] + prefix = vector_index_config.index_type.name.lower() + "_" + + for field in fields(vector_index_config): + if field.name.startswith(prefix): + key = sql_param_name.get(field.name) + value = getattr(vector_index_config, field.name) + if value is not None: + parameters_str.append(f"{key} {value}") + + parameters_str = ", ".join(parameters_str) + parts.append(f"PARAMETERS ({parameters_str})") + + if vector_index_config.parallel is not None: + parts.append(f"PARALLEL {vector_index_config.parallel}") + + return " ".join(parts) + def define_constraint_cascades(self, constraint): text = "" if constraint.ondelete is not None: @@ -1503,6 +1749,9 @@ def visit_create_index(self, create, **kw): text += "UNIQUE " if index.dialect_options["oracle"]["bitmap"]: text += "BITMAP " + vector_options = index.dialect_options["oracle"]["vector"] + if vector_options: + text += "VECTOR " text += "INDEX %s ON %s (%s)" % ( self._prepared_index_name(index, include_schema=True), preparer.format_table(index.table, use_schema=True), @@ -1520,6 +1769,11 @@ def visit_create_index(self, create, **kw): text += " COMPRESS %d" % ( index.dialect_options["oracle"]["compress"] ) + if vector_options: + if vector_options is True: + vector_options = VectorIndexConfig() + + text += " " + self._build_vector_index_config(vector_options) return text def post_create_table(self, table): @@ -1670,7 +1924,16 @@ class OracleDialect(default.DefaultDialect): "tablespace": None, }, ), - (sa_schema.Index, {"bitmap": False, "compress": False}), + ( + sa_schema.Index, + { + "bitmap": False, + "compress": False, + "vector": False, + }, + ), + (sa_selectable.Select, {"fetch_approximate": False}), + (sa_selectable.CompoundSelect, {"fetch_approximate": False}), ] @util.deprecated_params( diff --git a/lib/sqlalchemy/dialects/oracle/vector.py b/lib/sqlalchemy/dialects/oracle/vector.py new file mode 100644 index 00000000000..dae89d3418d --- /dev/null +++ b/lib/sqlalchemy/dialects/oracle/vector.py @@ -0,0 +1,266 @@ +# dialects/oracle/vector.py +# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +from __future__ import annotations + +import array +from dataclasses import dataclass +from enum import Enum +from typing import Optional + +import sqlalchemy.types as types +from sqlalchemy.types import Float + + +class VectorIndexType(Enum): + """Enum representing different types of VECTOR index structures. + + See :ref:`oracle_vector_datatype` for background. + + .. versionadded:: 2.0.41 + + """ + + HNSW = "HNSW" + """ + The HNSW (Hierarchical Navigable Small World) index type. + """ + IVF = "IVF" + """ + The IVF (Inverted File Index) index type + """ + + +class VectorDistanceType(Enum): + """Enum representing different types of vector distance metrics. + + See :ref:`oracle_vector_datatype` for background. + + .. versionadded:: 2.0.41 + + """ + + EUCLIDEAN = "EUCLIDEAN" + """Euclidean distance (L2 norm). + + Measures the straight-line distance between two vectors in space. + """ + DOT = "DOT" + """Dot product similarity. + + Measures the algebraic similarity between two vectors. + """ + COSINE = "COSINE" + """Cosine similarity. + + Measures the cosine of the angle between two vectors. + """ + MANHATTAN = "MANHATTAN" + """Manhattan distance (L1 norm). + + Calculates the sum of absolute differences across dimensions. + """ + + +class VectorStorageFormat(Enum): + """Enum representing the data format used to store vector components. + + See :ref:`oracle_vector_datatype` for background. + + .. versionadded:: 2.0.41 + + """ + + INT8 = "INT8" + """ + 8-bit integer format. + """ + BINARY = "BINARY" + """ + Binary format. + """ + FLOAT32 = "FLOAT32" + """ + 32-bit floating-point format. + """ + FLOAT64 = "FLOAT64" + """ + 64-bit floating-point format. + """ + + +@dataclass +class VectorIndexConfig: + """Define the configuration for Oracle VECTOR Index. + + See :ref:`oracle_vector_datatype` for background. + + .. versionadded:: 2.0.41 + + :param index_type: Enum value from :class:`.VectorIndexType` + Specifies the indexing method. For HNSW, this must be + :attr:`.VectorIndexType.HNSW`. + + :param distance: Enum value from :class:`.VectorDistanceType` + specifies the metric for calculating distance between VECTORS. + + :param accuracy: interger. Should be in the range 0 to 100 + Specifies the accuracy of the nearest neighbor search during + query execution. + + :param parallel: integer. Specifies degree of parallelism. + + :param hnsw_neighbors: interger. Should be in the range 0 to + 2048. Specifies the number of nearest neighbors considered + during the search. The attribute :attr:`.VectorIndexConfig.hnsw_neighbors` + is HNSW index specific. + + :param hnsw_efconstruction: integer. Should be in the range 0 + to 65535. Controls the trade-off between indexing speed and + recall quality during index construction. The attribute + :attr:`.VectorIndexConfig.hnsw_efconstruction` is HNSW index + specific. + + :param ivf_neighbor_partitions: integer. Should be in the range + 0 to 10,000,000. Specifies the number of partitions used to + divide the dataset. The attribute + :attr:`.VectorIndexConfig.ivf_neighbor_partitions` is IVF index + specific. + + :param ivf_sample_per_partition: integer. Should be between 1 + and ``num_vectors / neighbor partitions``. Specifies the + number of samples used per partition. The attribute + :attr:`.VectorIndexConfig.ivf_sample_per_partition` is IVF index + specific. + + :param ivf_min_vectors_per_partition: integer. From 0 (no trimming) + to the total number of vectors (results in 1 partition). Specifies + the minimum number of vectors per partition. The attribute + :attr:`.VectorIndexConfig.ivf_min_vectors_per_partition` + is IVF index specific. + + """ + + index_type: VectorIndexType = VectorIndexType.HNSW + distance: Optional[VectorDistanceType] = None + accuracy: Optional[int] = None + hnsw_neighbors: Optional[int] = None + hnsw_efconstruction: Optional[int] = None + ivf_neighbor_partitions: Optional[int] = None + ivf_sample_per_partition: Optional[int] = None + ivf_min_vectors_per_partition: Optional[int] = None + parallel: Optional[int] = None + + def __post_init__(self): + self.index_type = VectorIndexType(self.index_type) + for field in [ + "hnsw_neighbors", + "hnsw_efconstruction", + "ivf_neighbor_partitions", + "ivf_sample_per_partition", + "ivf_min_vectors_per_partition", + "parallel", + "accuracy", + ]: + value = getattr(self, field) + if value is not None and not isinstance(value, int): + raise TypeError( + f"{field} must be an integer if" + f"provided, got {type(value).__name__}" + ) + + +class VECTOR(types.TypeEngine): + """Oracle VECTOR datatype. + + For complete background on using this type, see + :ref:`oracle_vector_datatype`. + + .. versionadded:: 2.0.41 + + """ + + cache_ok = True + __visit_name__ = "VECTOR" + + _typecode_map = { + VectorStorageFormat.INT8: "b", # Signed int + VectorStorageFormat.BINARY: "B", # Unsigned int + VectorStorageFormat.FLOAT32: "f", # Float + VectorStorageFormat.FLOAT64: "d", # Double + } + + def __init__(self, dim=None, storage_format=None): + """Construct a VECTOR. + + :param dim: integer. The dimension of the VECTOR datatype. This + should be an integer value. + + :param storage_format: VectorStorageFormat. The VECTOR storage + type format. This may be Enum values form + :class:`.VectorStorageFormat` INT8, BINARY, FLOAT32, or FLOAT64. + + """ + if dim is not None and not isinstance(dim, int): + raise TypeError("dim must be an interger") + if storage_format is not None and not isinstance( + storage_format, VectorStorageFormat + ): + raise TypeError( + "storage_format must be an enum of type VectorStorageFormat" + ) + self.dim = dim + self.storage_format = storage_format + + def _cached_bind_processor(self, dialect): + """ + Convert a list to a array.array before binding it to the database. + """ + + def process(value): + if value is None or isinstance(value, array.array): + return value + + # Convert list to a array.array + elif isinstance(value, list): + typecode = self._array_typecode(self.storage_format) + value = array.array(typecode, value) + return value + + else: + raise TypeError("VECTOR accepts list or array.array()") + + return process + + def _cached_result_processor(self, dialect, coltype): + """ + Convert a array.array to list before binding it to the database. + """ + + def process(value): + if isinstance(value, array.array): + return list(value) + + return process + + def _array_typecode(self, typecode): + """ + Map storage format to array typecode. + """ + return self._typecode_map.get(typecode, "d") + + class comparator_factory(types.TypeEngine.Comparator): + def l2_distance(self, other): + return self.op("<->", return_type=Float)(other) + + def inner_product(self, other): + return self.op("<#>", return_type=Float)(other) + + def cosine_distance(self, other): + return self.op("<=>", return_type=Float)(other) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index d137ab504ea..ef7605a64b9 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -72,6 +72,7 @@ from .base import ColumnSet from .base import CompileState from .base import DedupeColumnCollection +from .base import DialectKWArgs from .base import Executable from .base import Generative from .base import HasCompileState @@ -3927,7 +3928,7 @@ def add_cte(self, *ctes: CTE, nest_here: bool = False) -> Self: raise NotImplementedError -class GenerativeSelect(SelectBase, Generative): +class GenerativeSelect(DialectKWArgs, SelectBase, Generative): """Base class for SELECT statements where additional elements can be added. @@ -4208,8 +4209,9 @@ def fetch( count: _LimitOffsetType, with_ties: bool = False, percent: bool = False, + **dialect_kw: Any, ) -> Self: - """Return a new selectable with the given FETCH FIRST criterion + r"""Return a new selectable with the given FETCH FIRST criterion applied. This is a numeric value which usually renders as ``FETCH {FIRST | NEXT} @@ -4239,6 +4241,11 @@ def fetch( :param percent: When ``True``, ``count`` represents the percentage of the total number of selected rows to return. Defaults to ``False`` + :param \**dialect_kw: Additional dialect-specific keyword arguments + may be accepted by dialects. + + .. versionadded:: 2.0.41 + .. seealso:: :meth:`_sql.GenerativeSelect.limit` @@ -4246,7 +4253,7 @@ def fetch( :meth:`_sql.GenerativeSelect.offset` """ - + self._validate_dialect_kwargs(dialect_kw) self._limit_clause = None if count is None: self._fetch_clause = self._fetch_clause_options = None @@ -4488,6 +4495,7 @@ class CompoundSelect(HasCompileState, GenerativeSelect, TypedReturnsRows[_TP]): ] + SupportsCloneAnnotations._clone_annotations_traverse_internals + HasCTE._has_ctes_traverse_internals + + DialectKWArgs._dialect_kwargs_traverse_internals ) selects: List[SelectBase] @@ -5309,6 +5317,7 @@ class Select( + HasHints._has_hints_traverse_internals + SupportsCloneAnnotations._clone_annotations_traverse_internals + Executable._executable_traverse_internals + + DialectKWArgs._dialect_kwargs_traverse_internals ) _cache_key_traversal: _CacheKeyTraversalType = _traverse_internals + [ @@ -5330,7 +5339,9 @@ def _create_raw_select(cls, **kw: Any) -> Select[Any]: stmt.__dict__.update(kw) return stmt - def __init__(self, *entities: _ColumnsClauseArgument[Any]): + def __init__( + self, *entities: _ColumnsClauseArgument[Any], **dialect_kw: Any + ): r"""Construct a new :class:`_expression.Select`. The public constructor for :class:`_expression.Select` is the @@ -5343,7 +5354,6 @@ def __init__(self, *entities: _ColumnsClauseArgument[Any]): ) for ent in entities ] - GenerativeSelect.__init__(self) def _scalar_type(self) -> TypeEngine[Any]: diff --git a/test/dialect/oracle/test_compiler.py b/test/dialect/oracle/test_compiler.py index 467edbe1832..7effcf3aa58 100644 --- a/test/dialect/oracle/test_compiler.py +++ b/test/dialect/oracle/test_compiler.py @@ -310,6 +310,17 @@ def test_simple_fetch_offset(self): checkparams={"param_1": 20, "param_2": 10}, ) + @testing.only_on("oracle>=23.4") + def test_fetch_type(self): + t = table("sometable", column("col1"), column("col2")) + s = select(t).fetch(2, oracle_fetch_approximate=True) + self.assert_compile( + s, + "SELECT sometable.col1, sometable.col2 FROM sometable " + "FETCH APPROX FIRST __[POSTCOMPILE_param_1] ROWS ONLY", + checkparams={"param_1": 2}, + ) + def test_limit_two(self): t = table("sometable", column("col1"), column("col2")) s = select(t).limit(10).offset(20).subquery() diff --git a/test/dialect/oracle/test_reflection.py b/test/dialect/oracle/test_reflection.py index a17b53895f1..35735889488 100644 --- a/test/dialect/oracle/test_reflection.py +++ b/test/dialect/oracle/test_reflection.py @@ -21,6 +21,11 @@ from sqlalchemy import Unicode from sqlalchemy import UniqueConstraint from sqlalchemy.dialects import oracle +from sqlalchemy.dialects.oracle import VECTOR +from sqlalchemy.dialects.oracle import VectorDistanceType +from sqlalchemy.dialects.oracle import VectorIndexConfig +from sqlalchemy.dialects.oracle import VectorIndexType +from sqlalchemy.dialects.oracle import VectorStorageFormat from sqlalchemy.dialects.oracle.base import BINARY_DOUBLE from sqlalchemy.dialects.oracle.base import BINARY_FLOAT from sqlalchemy.dialects.oracle.base import DOUBLE_PRECISION @@ -698,6 +703,25 @@ def test_tablespace(self, connection, metadata): tbl = Table("test_tablespace", m2, autoload_with=connection) assert tbl.dialect_options["oracle"]["tablespace"] == "TEMP" + @testing.only_on("oracle>=23.4") + def test_reflection_w_vector_column(self, connection, metadata): + tb1 = Table( + "test_vector", + metadata, + Column("id", Integer, primary_key=True), + Column("name", String(30)), + Column( + "embedding", + VECTOR(dim=3, storage_format=VectorStorageFormat.FLOAT32), + ), + ) + metadata.create_all(connection) + + m2 = MetaData() + + tb1 = Table("test_vector", m2, autoload_with=connection) + assert tb1.columns.keys() == ["id", "name", "embedding"] + class ViewReflectionTest(fixtures.TestBase): __only_on__ = "oracle" @@ -1180,6 +1204,42 @@ def obj_definition(obj): eq_(len(reflectedtable.constraints), 1) eq_(len(reflectedtable.indexes), 5) + @testing.only_on("oracle>=23.4") + def test_vector_index(self, metadata, connection): + tb1 = Table( + "test_vector", + metadata, + Column("id", Integer, primary_key=True), + Column("name", String(30)), + Column( + "embedding", + VECTOR(dim=3, storage_format=VectorStorageFormat.FLOAT32), + ), + ) + tb1.create(connection) + + ivf_index = Index( + "ivf_vector_index", + tb1.c.embedding, + oracle_vector=VectorIndexConfig( + index_type=VectorIndexType.IVF, + distance=VectorDistanceType.DOT, + accuracy=90, + ivf_neighbor_partitions=5, + ), + ) + ivf_index.create(connection) + + expected = [ + { + "name": "ivf_vector_index", + "column_names": ["embedding"], + "dialect_options": {}, + "unique": False, + }, + ] + eq_(inspect(connection).get_indexes("test_vector"), expected) + class DBLinkReflectionTest(fixtures.TestBase): __requires__ = ("oracle_test_dblink",) diff --git a/test/dialect/oracle/test_types.py b/test/dialect/oracle/test_types.py index 7b03de88c53..331103e8f25 100644 --- a/test/dialect/oracle/test_types.py +++ b/test/dialect/oracle/test_types.py @@ -1,3 +1,4 @@ +import array import datetime import decimal import os @@ -15,6 +16,7 @@ from sqlalchemy import exc from sqlalchemy import FLOAT from sqlalchemy import Float +from sqlalchemy import Index from sqlalchemy import Integer from sqlalchemy import LargeBinary from sqlalchemy import literal @@ -37,6 +39,11 @@ from sqlalchemy.dialects.oracle import base as oracle from sqlalchemy.dialects.oracle import cx_oracle from sqlalchemy.dialects.oracle import oracledb +from sqlalchemy.dialects.oracle import VECTOR +from sqlalchemy.dialects.oracle import VectorDistanceType +from sqlalchemy.dialects.oracle import VectorIndexConfig +from sqlalchemy.dialects.oracle import VectorIndexType +from sqlalchemy.dialects.oracle import VectorStorageFormat from sqlalchemy.sql import column from sqlalchemy.sql.sqltypes import NullType from sqlalchemy.testing import assert_raises_message @@ -952,6 +959,194 @@ def test_longstring(self, metadata, connection): finally: exec_sql(connection, "DROP TABLE Z_TEST") + @testing.only_on("oracle>=23.4") + def test_vector_dim(self, metadata, connection): + t1 = Table( + "t1", + metadata, + Column( + "c1", VECTOR(dim=3, storage_format=VectorStorageFormat.FLOAT32) + ), + ) + + t1.create(connection) + eq_(t1.c.c1.type.dim, 3) + + @testing.only_on("oracle>=23.4") + def test_vector_insert(self, metadata, connection): + t1 = Table( + "t1", + metadata, + Column("id", Integer, primary_key=True), + Column("c1", VECTOR(storage_format=VectorStorageFormat.INT8)), + ) + + t1.create(connection) + connection.execute( + t1.insert(), + dict(id=1, c1=[6, 7, 8, 5]), + ) + eq_( + connection.execute(t1.select()).first(), + (1, [6, 7, 8, 5]), + ) + connection.execute(t1.delete().where(t1.c.id == 1)) + connection.execute(t1.insert(), dict(id=1, c1=[6, 7])) + eq_( + connection.execute(t1.select()).first(), + (1, [6, 7]), + ) + + @testing.only_on("oracle>=23.4") + def test_vector_insert_array(self, metadata, connection): + t1 = Table( + "t1", + metadata, + Column("id", Integer, primary_key=True), + Column("c1", VECTOR), + ) + + t1.create(connection) + connection.execute( + t1.insert(), + dict(id=1, c1=array.array("b", [6, 7, 8, 5])), + ) + eq_( + connection.execute(t1.select()).first(), + (1, [6, 7, 8, 5]), + ) + + connection.execute(t1.delete().where(t1.c.id == 1)) + + connection.execute( + t1.insert(), dict(id=1, c1=array.array("b", [6, 7])) + ) + eq_( + connection.execute(t1.select()).first(), + (1, [6, 7]), + ) + + @testing.only_on("oracle>=23.4") + def test_vector_multiformat_insert(self, metadata, connection): + t1 = Table( + "t1", + metadata, + Column("id", Integer, primary_key=True), + Column("c1", VECTOR), + ) + + t1.create(connection) + connection.execute( + t1.insert(), + dict(id=1, c1=[6.12, 7.54, 8.33]), + ) + eq_( + connection.execute(t1.select()).first(), + (1, [6.12, 7.54, 8.33]), + ) + connection.execute(t1.delete().where(t1.c.id == 1)) + connection.execute(t1.insert(), dict(id=1, c1=[6, 7])) + eq_( + connection.execute(t1.select()).first(), + (1, [6, 7]), + ) + + @testing.only_on("oracle>=23.4") + def test_vector_format(self, metadata, connection): + t1 = Table( + "t1", + metadata, + Column( + "c1", VECTOR(dim=3, storage_format=VectorStorageFormat.FLOAT32) + ), + ) + + t1.create(connection) + eq_(t1.c.c1.type.storage_format, VectorStorageFormat.FLOAT32) + + @testing.only_on("oracle>=23.4") + def test_vector_hnsw_index(self, metadata, connection): + t1 = Table( + "t1", + metadata, + Column("id", Integer), + Column( + "embedding", + VECTOR(dim=3, storage_format=VectorStorageFormat.FLOAT32), + ), + ) + + t1.create(connection) + + hnsw_index = Index( + "hnsw_vector_index", t1.c.embedding, oracle_vector=True + ) + hnsw_index.create(connection) + + connection.execute(t1.insert(), dict(id=1, embedding=[6, 7, 8])) + eq_( + connection.execute(t1.select()).first(), + (1, [6.0, 7.0, 8.0]), + ) + + @testing.only_on("oracle>=23.4") + def test_vector_ivf_index(self, metadata, connection): + t1 = Table( + "t1", + metadata, + Column("id", Integer), + Column( + "embedding", + VECTOR(dim=3, storage_format=VectorStorageFormat.FLOAT32), + ), + ) + + t1.create(connection) + ivf_index = Index( + "ivf_vector_index", + t1.c.embedding, + oracle_vector=VectorIndexConfig( + index_type=VectorIndexType.IVF, + distance=VectorDistanceType.DOT, + accuracy=90, + ivf_neighbor_partitions=5, + ), + ) + ivf_index.create(connection) + + connection.execute(t1.insert(), dict(id=1, embedding=[6, 7, 8])) + eq_( + connection.execute(t1.select()).first(), + (1, [6.0, 7.0, 8.0]), + ) + + @testing.only_on("oracle>=23.4") + def test_vector_l2_distance(self, metadata, connection): + t1 = Table( + "t1", + metadata, + Column("id", Integer), + Column( + "embedding", + VECTOR(dim=3, storage_format=VectorStorageFormat.INT8), + ), + ) + + t1.create(connection) + + connection.execute(t1.insert(), dict(id=1, embedding=[8, 9, 10])) + connection.execute(t1.insert(), dict(id=2, embedding=[1, 2, 3])) + connection.execute( + t1.insert(), + dict(id=3, embedding=[15, 16, 17]), + ) + + query_vector = [2, 3, 4] + res = connection.execute( + t1.select().order_by((t1.c.embedding.l2_distance(query_vector))) + ).first() + eq_(res.embedding, [1, 2, 3]) + class LOBFetchTest(fixtures.TablesTest): __only_on__ = "oracle" diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index 04577704421..77743b9c924 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -44,6 +44,7 @@ from sqlalchemy.sql import type_coerce from sqlalchemy.sql import visitors from sqlalchemy.sql.annotation import Annotated +from sqlalchemy.sql.base import DialectKWArgs from sqlalchemy.sql.base import HasCacheKey from sqlalchemy.sql.base import SingletonConstant from sqlalchemy.sql.elements import _label_reference @@ -537,6 +538,7 @@ class CoreFixtures: select(table_a.c.a).fetch(2, percent=True), select(table_a.c.a).fetch(2, with_ties=True), select(table_a.c.a).fetch(2, with_ties=True, percent=True), + select(table_a.c.a).fetch(2, oracle_fetch_approximate=True), select(table_a.c.a).fetch(2).offset(3), select(table_a.c.a).fetch(2).offset(5), select(table_a.c.a).limit(2).offset(5), @@ -1635,7 +1637,12 @@ def test_traverse_internals(self, cls: type): @testing.combinations( *all_hascachekey_subclasses( - ignore_subclasses=[Annotated, NoInit, SingletonConstant] + ignore_subclasses=[ + Annotated, + NoInit, + SingletonConstant, + DialectKWArgs, + ] ) ) def test_init_args_in_traversal(self, cls: type): From bb46755ba67b08fa5176c6441dbfb825fb52ee86 Mon Sep 17 00:00:00 2001 From: Shamil Date: Mon, 5 May 2025 21:05:21 +0300 Subject: [PATCH 513/544] Remove unused typing imports (#12568) * Remove unused typing imports * remove unused per file ignores * Revert "remove unused per file ignores" --------- Co-authored-by: Pablo Estevez (cherry picked from commit 37c5b2e3e2cea552b5000df9281285b9f74c8166) --- lib/sqlalchemy/util/__init__.py | 1 - lib/sqlalchemy/util/typing.py | 1 - 2 files changed, 2 deletions(-) diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index a9b4c3b1c0f..8353c9a7a35 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -9,7 +9,6 @@ from collections import defaultdict as defaultdict from functools import partial as partial from functools import update_wrapper as update_wrapper -from typing import TYPE_CHECKING from . import preloaded as preloaded from ._collections import coerce_generator_arg as coerce_generator_arg diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index e68b42b7ed0..e44c623d3dc 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -16,7 +16,6 @@ import typing from typing import Any from typing import Callable -from typing import cast from typing import Dict from typing import ForwardRef from typing import Generic From e80ecc9e08783ad6de0bc0fd5b8ae0f850631cb6 Mon Sep 17 00:00:00 2001 From: krave1986 Date: Tue, 6 May 2025 03:38:19 +0800 Subject: [PATCH 514/544] Fix issues in versioning.rst (#12567) (cherry picked from commit e1f2f204c1b2967486d160b19a8ddf21c0b698bf) --- doc/build/orm/versioning.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/build/orm/versioning.rst b/doc/build/orm/versioning.rst index 7f209e24b26..9c08acef682 100644 --- a/doc/build/orm/versioning.rst +++ b/doc/build/orm/versioning.rst @@ -233,14 +233,14 @@ at our choosing:: __mapper_args__ = {"version_id_col": version_uuid, "version_id_generator": False} - u1 = User(name="u1", version_uuid=uuid.uuid4()) + u1 = User(name="u1", version_uuid=uuid.uuid4().hex) session.add(u1) session.commit() u1.name = "u2" - u1.version_uuid = uuid.uuid4() + u1.version_uuid = uuid.uuid4().hex session.commit() From b6fb61ba28a85ebfaae729b107a5874145fdbf9a Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Mon, 5 May 2025 23:03:18 +0200 Subject: [PATCH 515/544] fix failing typing test fix failing test added in 4ac02007e030232f57226aafbb9313c8ff186a62 Change-Id: If0c62fac8744caa98bd04f808ef381ffb04afd7f (cherry picked from commit 46996843876a7635705686f67057fba9c795d787) --- test/typing/plain_files/engine/engine_result.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/typing/plain_files/engine/engine_result.py b/test/typing/plain_files/engine/engine_result.py index 7ff20b7846d..eedcc309474 100644 --- a/test/typing/plain_files/engine/engine_result.py +++ b/test/typing/plain_files/engine/engine_result.py @@ -1,4 +1,3 @@ -from typing import reveal_type from typing import Tuple from sqlalchemy import column @@ -25,7 +24,7 @@ def row_one(row: Row[Tuple[int, str, bool]]) -> None: # EXPECTED_TYPE: Any reveal_type(rm[column("bar")]) - # EXPECTED_MYPY: Invalid index type "int" for "RowMapping"; expected type "str | SQLCoreOperations[Any]" # noqa: E501 + # EXPECTED_MYPY_RE: Invalid index type "int" for "RowMapping"; expected type "(str \| SQLCoreOperations\[Any\]|Union\[str, SQLCoreOperations\[Any\]\])" # noqa: E501 rm[3] From ab247a08162f838776631c0662b29d4343206990 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 6 May 2025 18:06:15 -0400 Subject: [PATCH 516/544] reorganize ORM Annotated Declarative into its own section The ORM Annotated Declarative section is now very large but has been indented under the "Declarative Table with mapped_column()" section where it does not show up well on top level TOCs and is too deeply nested. Break it out into its own section following the entire "Declarative Table" section, but also maintain a short intro section inside of "Declarative Table" to ensure this use is still prominent. Change-Id: I42f4aff6ed54da249c94ddf50727f9fe3c3bd625 (cherry picked from commit bcc4af9e061074bfdf795403027c851df8bec777) --- doc/build/orm/declarative_tables.rst | 1903 +++++++++++++------------- 1 file changed, 978 insertions(+), 925 deletions(-) diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst index a4b5cbfe66c..5bffe97b0a1 100644 --- a/doc/build/orm/declarative_tables.rst +++ b/doc/build/orm/declarative_tables.rst @@ -108,7 +108,7 @@ further at :ref:`orm_declarative_metadata`. The :func:`_orm.mapped_column` construct accepts all arguments that are accepted by the :class:`_schema.Column` construct, as well as additional -ORM-specific arguments. The :paramref:`_orm.mapped_column.__name` field, +ORM-specific arguments. The :paramref:`_orm.mapped_column.__name` positional parameter, indicating the name of the database column, is typically omitted, as the Declarative process will make use of the attribute name given to the construct and assign this as the name of the column (in the above example, this refers to @@ -133,22 +133,19 @@ itself (more on this at :ref:`mapper_column_distinct_names`). :ref:`mapping_columns_toplevel` - contains additional notes on affecting how :class:`_orm.Mapper` interprets incoming :class:`.Column` objects. -.. _orm_declarative_mapped_column: - -Using Annotated Declarative Table (Type Annotated Forms for ``mapped_column()``) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The :func:`_orm.mapped_column` construct is capable of deriving its column-configuration -information from :pep:`484` type annotations associated with the attribute -as declared in the Declarative mapped class. These type annotations, -if used, **must** -be present within a special SQLAlchemy type called :class:`_orm.Mapped`, which -is a generic_ type that then indicates a specific Python type within it. +ORM Annotated Declarative - Automated Mapping with Type Annotations +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Below illustrates the mapping from the previous section, adding the use of -:class:`_orm.Mapped`:: +The :func:`_orm.mapped_column` construct in modern Python is normally augmented +by the use of :pep:`484` Python type annotations, where it is capable of +deriving its column-configuration information from type annotations associated +with the attribute as declared in the Declarative mapped class. These type +annotations, if used, must be present within a special SQLAlchemy type called +:class:`.Mapped`, which is a generic type that indicates a specific Python type +within it. - from typing import Optional +Using this technique, the example in the previous section can be written +more succinctly as below:: from sqlalchemy import String from sqlalchemy.orm import DeclarativeBase @@ -165,886 +162,955 @@ Below illustrates the mapping from the previous section, adding the use of id: Mapped[int] = mapped_column(primary_key=True) name: Mapped[str] = mapped_column(String(50)) - fullname: Mapped[Optional[str]] - nickname: Mapped[Optional[str]] = mapped_column(String(30)) - -Above, when Declarative processes each class attribute, each -:func:`_orm.mapped_column` will derive additional arguments from the -corresponding :class:`_orm.Mapped` type annotation on the left side, if -present. Additionally, Declarative will generate an empty -:func:`_orm.mapped_column` directive implicitly, whenever a -:class:`_orm.Mapped` type annotation is encountered that does not have -a value assigned to the attribute (this form is inspired by the similar -style used in Python dataclasses_); this :func:`_orm.mapped_column` construct -proceeds to derive its configuration from the :class:`_orm.Mapped` -annotation present. + fullname: Mapped[str | None] + nickname: Mapped[str | None] = mapped_column(String(30)) -.. _orm_declarative_mapped_column_nullability: +The example above demonstrates that if a class attribute is type-hinted with +:class:`.Mapped` but doesn't have an explicit :func:`_orm.mapped_column` assigned +to it, SQLAlchemy will automatically create one. Furthermore, details like the +column's datatype and whether it can be null (nullability) are inferred from +the :class:`.Mapped` annotation. However, you can always explicitly provide these +arguments to :func:`_orm.mapped_column` to override these automatically-derived +settings. -``mapped_column()`` derives the datatype and nullability from the ``Mapped`` annotation -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +For complete details on using the ORM Annotated Declarative system, see +:ref:`orm_declarative_mapped_column` later in this chapter. -The two qualities that :func:`_orm.mapped_column` derives from the -:class:`_orm.Mapped` annotation are: +.. seealso:: -* **datatype** - the Python type given inside :class:`_orm.Mapped`, as contained - within the ``typing.Optional`` construct if present, is associated with a - :class:`_sqltypes.TypeEngine` subclass such as :class:`.Integer`, :class:`.String`, - :class:`.DateTime`, or :class:`.Uuid`, to name a few common types. + :ref:`orm_declarative_mapped_column` - complete reference for ORM Annotated Declarative - The datatype is determined based on a dictionary of Python type to - SQLAlchemy datatype. This dictionary is completely customizable, - as detailed in the next section :ref:`orm_declarative_mapped_column_type_map`. - The default type map is implemented as in the code example below:: +Dataclass features in ``mapped_column()`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - from typing import Any - from typing import Dict - from typing import Type +The :func:`_orm.mapped_column` construct integrates with SQLAlchemy's +"native dataclasses" feature, discussed at +:ref:`orm_declarative_native_dataclasses`. See that section for current +background on additional directives supported by :func:`_orm.mapped_column`. - import datetime - import decimal - import uuid - from sqlalchemy import types - # default type mapping, deriving the type for mapped_column() - # from a Mapped[] annotation - type_map: Dict[Type[Any], TypeEngine[Any]] = { - bool: types.Boolean(), - bytes: types.LargeBinary(), - datetime.date: types.Date(), - datetime.datetime: types.DateTime(), - datetime.time: types.Time(), - datetime.timedelta: types.Interval(), - decimal.Decimal: types.Numeric(), - float: types.Float(), - int: types.Integer(), - str: types.String(), - uuid.UUID: types.Uuid(), - } - If the :func:`_orm.mapped_column` construct indicates an explicit type - as passed to the :paramref:`_orm.mapped_column.__type` argument, then - the given Python type is disregarded. +.. _orm_declarative_metadata: -* **nullability** - The :func:`_orm.mapped_column` construct will indicate - its :class:`_schema.Column` as ``NULL`` or ``NOT NULL`` first and foremost by - the presence of the :paramref:`_orm.mapped_column.nullable` parameter, passed - either as ``True`` or ``False``. Additionally , if the - :paramref:`_orm.mapped_column.primary_key` parameter is present and set to - ``True``, that will also imply that the column should be ``NOT NULL``. +Accessing Table and Metadata +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - In the absence of **both** of these parameters, the presence of - ``typing.Optional[]`` within the :class:`_orm.Mapped` type annotation will be - used to determine nullability, where ``typing.Optional[]`` means ``NULL``, - and the absence of ``typing.Optional[]`` means ``NOT NULL``. If there is no - ``Mapped[]`` annotation present at all, and there is no - :paramref:`_orm.mapped_column.nullable` or - :paramref:`_orm.mapped_column.primary_key` parameter, then SQLAlchemy's usual - default for :class:`_schema.Column` of ``NULL`` is used. +A declaratively mapped class will always include an attribute called +``__table__``; when the above configuration using ``__tablename__`` is +complete, the declarative process makes the :class:`_schema.Table` +available via the ``__table__`` attribute:: - In the example below, the ``id`` and ``data`` columns will be ``NOT NULL``, - and the ``additional_info`` column will be ``NULL``:: - from typing import Optional + # access the Table + user_table = User.__table__ - from sqlalchemy.orm import DeclarativeBase - from sqlalchemy.orm import Mapped - from sqlalchemy.orm import mapped_column +The above table is ultimately the same one that corresponds to the +:attr:`_orm.Mapper.local_table` attribute, which we can see through the +:ref:`runtime inspection system `:: + from sqlalchemy import inspect - class Base(DeclarativeBase): - pass + user_table = inspect(User).local_table +The :class:`_schema.MetaData` collection associated with both the declarative +:class:`_orm.registry` as well as the base class is frequently necessary in +order to run DDL operations such as CREATE, as well as in use with migration +tools such as Alembic. This object is available via the ``.metadata`` +attribute of :class:`_orm.registry` as well as the declarative base class. +Below, for a small script we may wish to emit a CREATE for all tables against a +SQLite database:: - class SomeClass(Base): - __tablename__ = "some_table" + engine = create_engine("sqlite://") - # primary_key=True, therefore will be NOT NULL - id: Mapped[int] = mapped_column(primary_key=True) + Base.metadata.create_all(engine) - # not Optional[], therefore will be NOT NULL - data: Mapped[str] +.. _orm_declarative_table_configuration: - # Optional[], therefore will be NULL - additional_info: Mapped[Optional[str]] +Declarative Table Configuration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - It is also perfectly valid to have a :func:`_orm.mapped_column` whose - nullability is **different** from what would be implied by the annotation. - For example, an ORM mapped attribute may be annotated as allowing ``None`` - within Python code that works with the object as it is first being created - and populated, however the value will ultimately be written to a database - column that is ``NOT NULL``. The :paramref:`_orm.mapped_column.nullable` - parameter, when present, will always take precedence:: +When using Declarative Table configuration with the ``__tablename__`` +declarative class attribute, additional arguments to be supplied to the +:class:`_schema.Table` constructor should be provided using the +``__table_args__`` declarative class attribute. - class SomeClass(Base): - # ... +This attribute accommodates both positional as well as keyword +arguments that are normally sent to the +:class:`_schema.Table` constructor. +The attribute can be specified in one of two forms. One is as a +dictionary:: - # will be String() NOT NULL, but can be None in Python - data: Mapped[Optional[str]] = mapped_column(nullable=False) + class MyClass(Base): + __tablename__ = "sometable" + __table_args__ = {"mysql_engine": "InnoDB"} - Similarly, a non-None attribute that's written to a database column that - for whatever reason needs to be NULL at the schema level, - :paramref:`_orm.mapped_column.nullable` may be set to ``True``:: +The other, a tuple, where each argument is positional +(usually constraints):: - class SomeClass(Base): - # ... + class MyClass(Base): + __tablename__ = "sometable" + __table_args__ = ( + ForeignKeyConstraint(["id"], ["remote_table.id"]), + UniqueConstraint("foo"), + ) - # will be String() NULL, but type checker will not expect - # the attribute to be None - data: Mapped[str] = mapped_column(nullable=True) +Keyword arguments can be specified with the above form by +specifying the last argument as a dictionary:: -.. _orm_declarative_mapped_column_type_map: + class MyClass(Base): + __tablename__ = "sometable" + __table_args__ = ( + ForeignKeyConstraint(["id"], ["remote_table.id"]), + UniqueConstraint("foo"), + {"autoload": True}, + ) -Customizing the Type Map -~~~~~~~~~~~~~~~~~~~~~~~~ +A class may also specify the ``__table_args__`` declarative attribute, +as well as the ``__tablename__`` attribute, in a dynamic style using the +:func:`_orm.declared_attr` method decorator. See +:ref:`orm_mixins_toplevel` for background. -The mapping of Python types to SQLAlchemy :class:`_types.TypeEngine` types -described in the previous section defaults to a hardcoded dictionary -present in the ``sqlalchemy.sql.sqltypes`` module. However, the :class:`_orm.registry` -object that coordinates the Declarative mapping process will first consult -a local, user defined dictionary of types which may be passed -as the :paramref:`_orm.registry.type_annotation_map` parameter when -constructing the :class:`_orm.registry`, which may be associated with -the :class:`_orm.DeclarativeBase` superclass when first used. +.. _orm_declarative_table_schema_name: -As an example, if we wish to make use of the :class:`_sqltypes.BIGINT` datatype for -``int``, the :class:`_sqltypes.TIMESTAMP` datatype with ``timezone=True`` for -``datetime.datetime``, and then only on Microsoft SQL Server we'd like to use -:class:`_sqltypes.NVARCHAR` datatype when Python ``str`` is used, -the registry and Declarative base could be configured as:: +Explicit Schema Name with Declarative Table +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - import datetime +The schema name for a :class:`_schema.Table` as documented at +:ref:`schema_table_schema_name` is applied to an individual :class:`_schema.Table` +using the :paramref:`_schema.Table.schema` argument. When using Declarative +tables, this option is passed like any other to the ``__table_args__`` +dictionary:: - from sqlalchemy import BIGINT, NVARCHAR, String, TIMESTAMP - from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + from sqlalchemy.orm import DeclarativeBase class Base(DeclarativeBase): - type_annotation_map = { - int: BIGINT, - datetime.datetime: TIMESTAMP(timezone=True), - str: String().with_variant(NVARCHAR, "mssql"), - } + pass - class SomeClass(Base): - __tablename__ = "some_table" + class MyClass(Base): + __tablename__ = "sometable" + __table_args__ = {"schema": "some_schema"} - id: Mapped[int] = mapped_column(primary_key=True) - date: Mapped[datetime.datetime] - status: Mapped[str] +The schema name can also be applied to all :class:`_schema.Table` objects +globally by using the :paramref:`_schema.MetaData.schema` parameter documented +at :ref:`schema_metadata_schema_name`. The :class:`_schema.MetaData` object +may be constructed separately and associated with a :class:`_orm.DeclarativeBase` +subclass by assigning to the ``metadata`` attribute directly:: -Below illustrates the CREATE TABLE statement generated for the above mapping, -first on the Microsoft SQL Server backend, illustrating the ``NVARCHAR`` datatype: + from sqlalchemy import MetaData + from sqlalchemy.orm import DeclarativeBase -.. sourcecode:: pycon+sql + metadata_obj = MetaData(schema="some_schema") - >>> from sqlalchemy.schema import CreateTable - >>> from sqlalchemy.dialects import mssql, postgresql - >>> print(CreateTable(SomeClass.__table__).compile(dialect=mssql.dialect())) - {printsql}CREATE TABLE some_table ( - id BIGINT NOT NULL IDENTITY, - date TIMESTAMP NOT NULL, - status NVARCHAR(max) NOT NULL, - PRIMARY KEY (id) - ) -Then on the PostgreSQL backend, illustrating ``TIMESTAMP WITH TIME ZONE``: + class Base(DeclarativeBase): + metadata = metadata_obj -.. sourcecode:: pycon+sql - >>> print(CreateTable(SomeClass.__table__).compile(dialect=postgresql.dialect())) - {printsql}CREATE TABLE some_table ( - id BIGSERIAL NOT NULL, - date TIMESTAMP WITH TIME ZONE NOT NULL, - status VARCHAR NOT NULL, - PRIMARY KEY (id) - ) + class MyClass(Base): + # will use "some_schema" by default + __tablename__ = "sometable" -By making use of methods such as :meth:`.TypeEngine.with_variant`, we're able -to build up a type map that's customized to what we need for different backends, -while still being able to use succinct annotation-only :func:`_orm.mapped_column` -configurations. There are two more levels of Python-type configurability -available beyond this, described in the next two sections. +.. seealso:: -.. _orm_declarative_type_map_union_types: + :ref:`schema_table_schema_name` - in the :ref:`metadata_toplevel` documentation. -Union types inside the Type Map -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _orm_declarative_column_options: -.. versionchanged:: 2.0.37 The features described in this section have been - repaired and enhanced to work consistently. Prior to this change, union - types were supported in ``type_annotation_map``, however the feature - exhibited inconsistent behaviors between union syntaxes as well as in how - ``None`` was handled. Please ensure SQLAlchemy is up to date before - attempting to use the features described in this section. +Setting Load and Persistence Options for Declarative Mapped Columns +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -SQLAlchemy supports mapping union types inside the ``type_annotation_map`` to -allow mapping database types that can support multiple Python types, such as -:class:`_types.JSON` or :class:`_postgresql.JSONB`:: +The :func:`_orm.mapped_column` construct accepts additional ORM-specific +arguments that affect how the generated :class:`_schema.Column` is +mapped, affecting its load and persistence-time behavior. Options +that are commonly used include: - from typing import Union - from sqlalchemy import JSON - from sqlalchemy.dialects import postgresql - from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column - from sqlalchemy.schema import CreateTable +* **deferred column loading** - The :paramref:`_orm.mapped_column.deferred` + boolean establishes the :class:`_schema.Column` using + :ref:`deferred column loading ` by default. In the example + below, the ``User.bio`` column will not be loaded by default, but only + when accessed:: - # new style Union using a pipe operator - json_list = list[int] | list[str] + class User(Base): + __tablename__ = "user" - # old style Union using Union explicitly - json_scalar = Union[float, str, bool] + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] + bio: Mapped[str] = mapped_column(Text, deferred=True) + .. seealso:: - class Base(DeclarativeBase): - type_annotation_map = { - json_list: postgresql.JSONB, - json_scalar: JSON, - } + :ref:`orm_queryguide_column_deferral` - full description of deferred column loading +* **active history** - The :paramref:`_orm.mapped_column.active_history` + ensures that upon change of value for the attribute, the previous value + will have been loaded and made part of the :attr:`.AttributeState.history` + collection when inspecting the history of the attribute. This may incur + additional SQL statements:: - class SomeClass(Base): - __tablename__ = "some_table" + class User(Base): + __tablename__ = "user" id: Mapped[int] = mapped_column(primary_key=True) - list_col: Mapped[list[str] | list[int]] + important_identifier: Mapped[str] = mapped_column(active_history=True) - # uses JSON - scalar_col: Mapped[json_scalar] +See the docstring for :func:`_orm.mapped_column` for a list of supported +parameters. - # uses JSON and is also nullable=True - scalar_col_nullable: Mapped[json_scalar | None] +.. seealso:: - # these forms all use JSON as well due to the json_scalar entry - scalar_col_newstyle: Mapped[float | str | bool] - scalar_col_oldstyle: Mapped[Union[float, str, bool]] - scalar_col_mixedstyle: Mapped[Optional[float | str | bool]] + :ref:`orm_imperative_table_column_options` - describes using + :func:`_orm.column_property` and :func:`_orm.deferred` for use with + Imperative Table configuration -The above example maps the union of ``list[int]`` and ``list[str]`` to the Postgresql -:class:`_postgresql.JSONB` datatype, while naming a union of ``float, -str, bool`` will match to the :class:`_types.JSON` datatype. An equivalent -union, stated in the :class:`_orm.Mapped` construct, will match into the -corresponding entry in the type map. +.. _mapper_column_distinct_names: -The matching of a union type is based on the contents of the union regardless -of how the individual types are named, and additionally excluding the use of -the ``None`` type. That is, ``json_scalar`` will also match to ``str | bool | -float | None``. It will **not** match to a union that is a subset or superset -of this union; that is, ``str | bool`` would not match, nor would ``str | bool -| float | int``. The individual contents of the union excluding ``None`` must -be an exact match. +.. _orm_declarative_table_column_naming: -The ``None`` value is never significant as far as matching -from ``type_annotation_map`` to :class:`_orm.Mapped`, however is significant -as an indicator for nullability of the :class:`_schema.Column`. When ``None`` is present in the -union either as it is placed in the :class:`_orm.Mapped` construct. When -present in :class:`_orm.Mapped`, it indicates the :class:`_schema.Column` -would be nullable, in the absense of more specific indicators. This logic works -in the same way as indicating an ``Optional`` type as described at -:ref:`orm_declarative_mapped_column_nullability`. +Naming Declarative Mapped Columns Explicitly +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The CREATE TABLE statement for the above mapping will look as below: +All of the examples thus far feature the :func:`_orm.mapped_column` construct +linked to an ORM mapped attribute, where the Python attribute name given +to the :func:`_orm.mapped_column` is also that of the column as we see in +CREATE TABLE statements as well as queries. The name for a column as +expressed in SQL may be indicated by passing the string positional argument +:paramref:`_orm.mapped_column.__name` as the first positional argument. +In the example below, the ``User`` class is mapped with alternate names +given to the columns themselves:: + + class User(Base): + __tablename__ = "user" + + id: Mapped[int] = mapped_column("user_id", primary_key=True) + name: Mapped[str] = mapped_column("user_name") + +Where above ``User.id`` resolves to a column named ``user_id`` +and ``User.name`` resolves to a column named ``user_name``. We +may write a :func:`_sql.select` statement using our Python attribute names +and will see the SQL names generated: .. sourcecode:: pycon+sql - >>> print(CreateTable(SomeClass.__table__).compile(dialect=postgresql.dialect())) - {printsql}CREATE TABLE some_table ( - id SERIAL NOT NULL, - list_col JSONB NOT NULL, - scalar_col JSON, - scalar_col_not_null JSON NOT NULL, - PRIMARY KEY (id) - ) + >>> from sqlalchemy import select + >>> print(select(User.id, User.name).where(User.name == "x")) + {printsql}SELECT "user".user_id, "user".user_name + FROM "user" + WHERE "user".user_name = :user_name_1 -While union types use a "loose" matching approach that matches on any equivalent -set of subtypes, Python typing also features a way to create "type aliases" -that are treated as distinct types that are non-equivalent to another type that -includes the same composition. Integration of these types with ``type_annotation_map`` -is described in the next section, :ref:`orm_declarative_type_map_pep695_types`. -.. _orm_declarative_type_map_pep695_types: +.. seealso:: -Support for Type Alias Types (defined by PEP 695) and NewType -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + :ref:`orm_imperative_table_column_naming` - applies to Imperative Table -In contrast to the typing lookup described in -:ref:`orm_declarative_type_map_union_types`, Python typing also includes two -ways to create a composed type in a more formal way, using ``typing.NewType`` as -well as the ``type`` keyword introduced in :pep:`695`. These types behave -differently from ordinary type aliases (i.e. assigning a type to a variable -name), and this difference is honored in how SQLAlchemy resolves these -types from the type map. +.. _orm_declarative_table_adding_columns: -.. versionchanged:: 2.0.37 The behaviors described in this section for ``typing.NewType`` - as well as :pep:`695` ``type`` have been formalized and corrected. - Deprecation warnings are now emitted for "loose matching" patterns that have - worked in some 2.0 releases, but are to be removed in SQLAlchemy 2.1. - Please ensure SQLAlchemy is up to date before attempting to use the features - described in this section. +Appending additional columns to an existing Declarative mapped class +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The typing module allows the creation of "new types" using ``typing.NewType``:: +A declarative table configuration allows the addition of new +:class:`_schema.Column` objects to an existing mapping after the :class:`.Table` +metadata has already been generated. - from typing import NewType +For a declarative class that is declared using a declarative base class, +the underlying metaclass :class:`.DeclarativeMeta` includes a ``__setattr__()`` +method that will intercept additional :func:`_orm.mapped_column` or Core +:class:`.Column` objects and +add them to both the :class:`.Table` using :meth:`.Table.append_column` +as well as to the existing :class:`.Mapper` using :meth:`.Mapper.add_property`:: - nstr30 = NewType("nstr30", str) - nstr50 = NewType("nstr50", str) + MyClass.some_new_column = mapped_column(String) -Additionally, in Python 3.12, a new feature defined by :pep:`695` was introduced which -provides the ``type`` keyword to accomplish a similar task; using -``type`` produces an object that is similar in many ways to ``typing.NewType`` -which is internally referred to as ``typing.TypeAliasType``:: +Using core :class:`_schema.Column`:: - type SmallInt = int - type BigInt = int - type JsonScalar = str | float | bool | None + MyClass.some_new_column = Column(String) -For the purposes of how SQLAlchemy treats these type objects when used -for SQL type lookup inside of :class:`_orm.Mapped`, it's important to note -that Python does not consider two equivalent ``typing.TypeAliasType`` -or ``typing.NewType`` objects to be equal:: +All arguments are supported including an alternate name, such as +``MyClass.some_new_column = mapped_column("some_name", String)``. However, +the SQL type must be passed to the :func:`_orm.mapped_column` or +:class:`_schema.Column` object explicitly, as in the above examples where +the :class:`_sqltypes.String` type is passed. There's no capability for +the :class:`_orm.Mapped` annotation type to take part in the operation. - # two typing.NewType objects are not equal even if they are both str - >>> nstr50 == nstr30 - False +Additional :class:`_schema.Column` objects may also be added to a mapping +in the specific circumstance of using single table inheritance, where +additional columns are present on mapped subclasses that have +no :class:`.Table` of their own. This is illustrated in the section +:ref:`single_inheritance`. - # two TypeAliasType objects are not equal even if they are both int - >>> SmallInt == BigInt - False +.. seealso:: - # an equivalent union is not equal to JsonScalar - >>> JsonScalar == str | float | bool | None - False + :ref:`orm_declarative_table_adding_relationship` - similar examples for :func:`_orm.relationship` -This is the opposite behavior from how ordinary unions are compared, and -informs the correct behavior for SQLAlchemy's ``type_annotation_map``. When -using ``typing.NewType`` or :pep:`695` ``type`` objects, the type object is -expected to be explicit within the ``type_annotation_map`` for it to be matched -from a :class:`_orm.Mapped` type, where the same object must be stated in order -for a match to be made (excluding whether or not the type inside of -:class:`_orm.Mapped` also unions on ``None``). This is distinct from the -behavior described at :ref:`orm_declarative_type_map_union_types`, where a -plain ``Union`` that is referenced directly will match to other ``Unions`` -based on the composition, rather than the object identity, of a particular type -in ``type_annotation_map``. +.. note:: Assignment of mapped + properties to an already mapped class will only + function correctly if the "declarative base" class is used, meaning + the user-defined subclass of :class:`_orm.DeclarativeBase` or the + dynamically generated class returned by :func:`_orm.declarative_base` + or :meth:`_orm.registry.generate_base`. This "base" class includes + a Python metaclass which implements a special ``__setattr__()`` method + that intercepts these operations. + + Runtime assignment of class-mapped attributes to a mapped class will **not** work + if the class is mapped using decorators like :meth:`_orm.registry.mapped` + or imperative functions like :meth:`_orm.registry.map_imperatively`. + + +.. _orm_declarative_mapped_column: + +ORM Annotated Declarative - Complete Guide +------------------------------------------ + +The :func:`_orm.mapped_column` construct is capable of deriving its +column-configuration information from :pep:`484` type annotations associated +with the attribute as declared in the Declarative mapped class. These type +annotations, if used, must be present within a special SQLAlchemy type called +:class:`_orm.Mapped`, which is a generic_ type that then indicates a specific +Python type within it. + +Using this technique, the ``User`` example from previous sections may be +written as below:: + + from sqlalchemy import String + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + + + class Base(DeclarativeBase): + pass + + + class User(Base): + __tablename__ = "user" + + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] = mapped_column(String(50)) + fullname: Mapped[str | None] + nickname: Mapped[str | None] = mapped_column(String(30)) + +Above, when Declarative processes each class attribute, each +:func:`_orm.mapped_column` will derive additional arguments from the +corresponding :class:`_orm.Mapped` type annotation on the left side, if +present. Additionally, Declarative will generate an empty +:func:`_orm.mapped_column` directive implicitly, whenever a +:class:`_orm.Mapped` type annotation is encountered that does not have +a value assigned to the attribute (this form is inspired by the similar +style used in Python dataclasses_); this :func:`_orm.mapped_column` construct +proceeds to derive its configuration from the :class:`_orm.Mapped` +annotation present. + +.. _orm_declarative_mapped_column_nullability: + +``mapped_column()`` derives the datatype and nullability from the ``Mapped`` annotation +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The two qualities that :func:`_orm.mapped_column` derives from the +:class:`_orm.Mapped` annotation are: + +* **datatype** - the Python type given inside :class:`_orm.Mapped`, as contained + within the ``typing.Optional`` construct if present, is associated with a + :class:`_sqltypes.TypeEngine` subclass such as :class:`.Integer`, :class:`.String`, + :class:`.DateTime`, or :class:`.Uuid`, to name a few common types. + + The datatype is determined based on a dictionary of Python type to + SQLAlchemy datatype. This dictionary is completely customizable, + as detailed in the next section :ref:`orm_declarative_mapped_column_type_map`. + The default type map is implemented as in the code example below:: + + from typing import Any + from typing import Dict + from typing import Type + + import datetime + import decimal + import uuid + + from sqlalchemy import types + + # default type mapping, deriving the type for mapped_column() + # from a Mapped[] annotation + type_map: Dict[Type[Any], TypeEngine[Any]] = { + bool: types.Boolean(), + bytes: types.LargeBinary(), + datetime.date: types.Date(), + datetime.datetime: types.DateTime(), + datetime.time: types.Time(), + datetime.timedelta: types.Interval(), + decimal.Decimal: types.Numeric(), + float: types.Float(), + int: types.Integer(), + str: types.String(), + uuid.UUID: types.Uuid(), + } + + If the :func:`_orm.mapped_column` construct indicates an explicit type + as passed to the :paramref:`_orm.mapped_column.__type` argument, then + the given Python type is disregarded. + +* **nullability** - The :func:`_orm.mapped_column` construct will indicate + its :class:`_schema.Column` as ``NULL`` or ``NOT NULL`` first and foremost by + the presence of the :paramref:`_orm.mapped_column.nullable` parameter, passed + either as ``True`` or ``False``. Additionally , if the + :paramref:`_orm.mapped_column.primary_key` parameter is present and set to + ``True``, that will also imply that the column should be ``NOT NULL``. -In the example below, the composed types for ``nstr30``, ``nstr50``, -``SmallInt``, ``BigInt``, and ``JsonScalar`` have no overlap with each other -and can be named distinctly within each :class:`_orm.Mapped` construct, and -are also all explicit in ``type_annotation_map``. Any of these types may -also be unioned with ``None`` or declared as ``Optional[]`` without affecting -the lookup, only deriving column nullability:: + In the absence of **both** of these parameters, the presence of + ``typing.Optional[]`` within the :class:`_orm.Mapped` type annotation will be + used to determine nullability, where ``typing.Optional[]`` means ``NULL``, + and the absence of ``typing.Optional[]`` means ``NOT NULL``. If there is no + ``Mapped[]`` annotation present at all, and there is no + :paramref:`_orm.mapped_column.nullable` or + :paramref:`_orm.mapped_column.primary_key` parameter, then SQLAlchemy's usual + default for :class:`_schema.Column` of ``NULL`` is used. - from typing import NewType + In the example below, the ``id`` and ``data`` columns will be ``NOT NULL``, + and the ``additional_info`` column will be ``NULL``:: - from sqlalchemy import SmallInteger, BigInteger, JSON, String - from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column - from sqlalchemy.schema import CreateTable + from typing import Optional - nstr30 = NewType("nstr30", str) - nstr50 = NewType("nstr50", str) - type SmallInt = int - type BigInt = int - type JsonScalar = str | float | bool | None + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column - class TABase(DeclarativeBase): - type_annotation_map = { - nstr30: String(30), - nstr50: String(50), - SmallInt: SmallInteger, - BigInteger: BigInteger, - JsonScalar: JSON, - } + class Base(DeclarativeBase): + pass - class SomeClass(TABase): - __tablename__ = "some_table" + class SomeClass(Base): + __tablename__ = "some_table" - id: Mapped[int] = mapped_column(primary_key=True) - normal_str: Mapped[str] + # primary_key=True, therefore will be NOT NULL + id: Mapped[int] = mapped_column(primary_key=True) - short_str: Mapped[nstr30] - long_str_nullable: Mapped[nstr50 | None] + # not Optional[], therefore will be NOT NULL + data: Mapped[str] - small_int: Mapped[SmallInt] - big_int: Mapped[BigInteger] - scalar_col: Mapped[JsonScalar] + # Optional[], therefore will be NULL + additional_info: Mapped[Optional[str]] -a CREATE TABLE for the above mapping will illustrate the different variants -of integer and string we've configured, and looks like: + It is also perfectly valid to have a :func:`_orm.mapped_column` whose + nullability is **different** from what would be implied by the annotation. + For example, an ORM mapped attribute may be annotated as allowing ``None`` + within Python code that works with the object as it is first being created + and populated, however the value will ultimately be written to a database + column that is ``NOT NULL``. The :paramref:`_orm.mapped_column.nullable` + parameter, when present, will always take precedence:: -.. sourcecode:: pycon+sql + class SomeClass(Base): + # ... - >>> print(CreateTable(SomeClass.__table__)) - {printsql}CREATE TABLE some_table ( - id INTEGER NOT NULL, - normal_str VARCHAR NOT NULL, - short_str VARCHAR(30) NOT NULL, - long_str_nullable VARCHAR(50), - small_int SMALLINT NOT NULL, - big_int BIGINT NOT NULL, - scalar_col JSON, - PRIMARY KEY (id) - ) + # will be String() NOT NULL, but can be None in Python + data: Mapped[Optional[str]] = mapped_column(nullable=False) -Regarding nullability, the ``JsonScalar`` type includes ``None`` in its -definition, which indicates a nullable column. Similarly the -``long_str_nullable`` column applies a union of ``None`` to ``nstr50``, -which matches to the ``nstr50`` type in the ``type_annotation_map`` while -also applying nullability to the mapped column. The other columns all remain -NOT NULL as they are not indicated as optional. + Similarly, a non-None attribute that's written to a database column that + for whatever reason needs to be NULL at the schema level, + :paramref:`_orm.mapped_column.nullable` may be set to ``True``:: + class SomeClass(Base): + # ... -.. _orm_declarative_mapped_column_type_map_pep593: + # will be String() NULL, but type checker will not expect + # the attribute to be None + data: Mapped[str] = mapped_column(nullable=True) -Mapping Multiple Type Configurations to Python Types -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _orm_declarative_mapped_column_type_map: -As individual Python types may be associated with :class:`_types.TypeEngine` -configurations of any variety by using the :paramref:`_orm.registry.type_annotation_map` -parameter, an additional -capability is the ability to associate a single Python type with different -variants of a SQL type based on additional type qualifiers. One typical -example of this is mapping the Python ``str`` datatype to ``VARCHAR`` -SQL types of different lengths. Another is mapping different varieties of -``decimal.Decimal`` to differently sized ``NUMERIC`` columns. +Customizing the Type Map +^^^^^^^^^^^^^^^^^^^^^^^^ -Python's typing system provides a great way to add additional metadata to a -Python type which is by using the :pep:`593` ``Annotated`` generic type, which -allows additional information to be bundled along with a Python type. The -:func:`_orm.mapped_column` construct will correctly interpret an ``Annotated`` -object by identity when resolving it in the -:paramref:`_orm.registry.type_annotation_map`, as in the example below where we -declare two variants of :class:`.String` and :class:`.Numeric`:: - from decimal import Decimal +The mapping of Python types to SQLAlchemy :class:`_types.TypeEngine` types +described in the previous section defaults to a hardcoded dictionary +present in the ``sqlalchemy.sql.sqltypes`` module. However, the :class:`_orm.registry` +object that coordinates the Declarative mapping process will first consult +a local, user defined dictionary of types which may be passed +as the :paramref:`_orm.registry.type_annotation_map` parameter when +constructing the :class:`_orm.registry`, which may be associated with +the :class:`_orm.DeclarativeBase` superclass when first used. - from typing_extensions import Annotated +As an example, if we wish to make use of the :class:`_sqltypes.BIGINT` datatype for +``int``, the :class:`_sqltypes.TIMESTAMP` datatype with ``timezone=True`` for +``datetime.datetime``, and then only on Microsoft SQL Server we'd like to use +:class:`_sqltypes.NVARCHAR` datatype when Python ``str`` is used, +the registry and Declarative base could be configured as:: - from sqlalchemy import Numeric - from sqlalchemy import String - from sqlalchemy.orm import DeclarativeBase - from sqlalchemy.orm import Mapped - from sqlalchemy.orm import mapped_column - from sqlalchemy.orm import registry + import datetime - str_30 = Annotated[str, 30] - str_50 = Annotated[str, 50] - num_12_4 = Annotated[Decimal, 12] - num_6_2 = Annotated[Decimal, 6] + from sqlalchemy import BIGINT, NVARCHAR, String, TIMESTAMP + from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column class Base(DeclarativeBase): - registry = registry( - type_annotation_map={ - str_30: String(30), - str_50: String(50), - num_12_4: Numeric(12, 4), - num_6_2: Numeric(6, 2), - } - ) + type_annotation_map = { + int: BIGINT, + datetime.datetime: TIMESTAMP(timezone=True), + str: String().with_variant(NVARCHAR, "mssql"), + } -The Python type passed to the ``Annotated`` container, in the above example the -``str`` and ``Decimal`` types, is important only for the benefit of typing -tools; as far as the :func:`_orm.mapped_column` construct is concerned, it will only need -perform a lookup of each type object in the -:paramref:`_orm.registry.type_annotation_map` dictionary without actually -looking inside of the ``Annotated`` object, at least in this particular -context. Similarly, the arguments passed to ``Annotated`` beyond the underlying -Python type itself are also not important, it's only that at least one argument -must be present for the ``Annotated`` construct to be valid. We can then use -these augmented types directly in our mapping where they will be matched to the -more specific type constructions, as in the following example:: class SomeClass(Base): __tablename__ = "some_table" - short_name: Mapped[str_30] = mapped_column(primary_key=True) - long_name: Mapped[str_50] - num_value: Mapped[num_12_4] - short_num_value: Mapped[num_6_2] + id: Mapped[int] = mapped_column(primary_key=True) + date: Mapped[datetime.datetime] + status: Mapped[str] -a CREATE TABLE for the above mapping will illustrate the different variants -of ``VARCHAR`` and ``NUMERIC`` we've configured, and looks like: +Below illustrates the CREATE TABLE statement generated for the above mapping, +first on the Microsoft SQL Server backend, illustrating the ``NVARCHAR`` datatype: .. sourcecode:: pycon+sql >>> from sqlalchemy.schema import CreateTable - >>> print(CreateTable(SomeClass.__table__)) + >>> from sqlalchemy.dialects import mssql, postgresql + >>> print(CreateTable(SomeClass.__table__).compile(dialect=mssql.dialect())) {printsql}CREATE TABLE some_table ( - short_name VARCHAR(30) NOT NULL, - long_name VARCHAR(50) NOT NULL, - num_value NUMERIC(12, 4) NOT NULL, - short_num_value NUMERIC(6, 2) NOT NULL, - PRIMARY KEY (short_name) + id BIGINT NOT NULL IDENTITY, + date TIMESTAMP NOT NULL, + status NVARCHAR(max) NOT NULL, + PRIMARY KEY (id) ) -While variety in linking ``Annotated`` types to different SQL types grants -us a wide degree of flexibility, the next section illustrates a second -way in which ``Annotated`` may be used with Declarative that is even -more open ended. +Then on the PostgreSQL backend, illustrating ``TIMESTAMP WITH TIME ZONE``: +.. sourcecode:: pycon+sql -.. _orm_declarative_mapped_column_pep593: + >>> print(CreateTable(SomeClass.__table__).compile(dialect=postgresql.dialect())) + {printsql}CREATE TABLE some_table ( + id BIGSERIAL NOT NULL, + date TIMESTAMP WITH TIME ZONE NOT NULL, + status VARCHAR NOT NULL, + PRIMARY KEY (id) + ) -Mapping Whole Column Declarations to Python Types -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +By making use of methods such as :meth:`.TypeEngine.with_variant`, we're able +to build up a type map that's customized to what we need for different backends, +while still being able to use succinct annotation-only :func:`_orm.mapped_column` +configurations. There are two more levels of Python-type configurability +available beyond this, described in the next two sections. -The previous section illustrated using :pep:`593` ``Annotated`` type -instances as keys within the :paramref:`_orm.registry.type_annotation_map` -dictionary. In this form, the :func:`_orm.mapped_column` construct does not -actually look inside the ``Annotated`` object itself, it's instead -used only as a dictionary key. However, Declarative also has the ability to extract -an entire pre-established :func:`_orm.mapped_column` construct from -an ``Annotated`` object directly. Using this form, we can define not only -different varieties of SQL datatypes linked to Python types without using -the :paramref:`_orm.registry.type_annotation_map` dictionary, we can also -set up any number of arguments such as nullability, column defaults, -and constraints in a reusable fashion. +.. _orm_declarative_type_map_union_types: -A set of ORM models will usually have some kind of primary -key style that is common to all mapped classes. There also may be -common column configurations such as timestamps with defaults and other fields of -pre-established sizes and configurations. We can compose these configurations -into :func:`_orm.mapped_column` instances that we then bundle directly into -instances of ``Annotated``, which are then re-used in any number of class -declarations. Declarative will unpack an ``Annotated`` object -when provided in this manner, skipping over any other directives that don't -apply to SQLAlchemy and searching only for SQLAlchemy ORM constructs. +Union types inside the Type Map +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The example below illustrates a variety of pre-configured field types used -in this way, where we define ``intpk`` that represents an :class:`.Integer` primary -key column, ``timestamp`` that represents a :class:`.DateTime` type -which will use ``CURRENT_TIMESTAMP`` as a DDL level column default, -and ``required_name`` which is a :class:`.String` of length 30 that's -``NOT NULL``:: - import datetime +.. versionchanged:: 2.0.37 The features described in this section have been + repaired and enhanced to work consistently. Prior to this change, union + types were supported in ``type_annotation_map``, however the feature + exhibited inconsistent behaviors between union syntaxes as well as in how + ``None`` was handled. Please ensure SQLAlchemy is up to date before + attempting to use the features described in this section. - from typing_extensions import Annotated +SQLAlchemy supports mapping union types inside the ``type_annotation_map`` to +allow mapping database types that can support multiple Python types, such as +:class:`_types.JSON` or :class:`_postgresql.JSONB`:: - from sqlalchemy import func - from sqlalchemy import String - from sqlalchemy.orm import mapped_column + from typing import Union + from sqlalchemy import JSON + from sqlalchemy.dialects import postgresql + from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + from sqlalchemy.schema import CreateTable + # new style Union using a pipe operator + json_list = list[int] | list[str] - intpk = Annotated[int, mapped_column(primary_key=True)] - timestamp = Annotated[ - datetime.datetime, - mapped_column(nullable=False, server_default=func.CURRENT_TIMESTAMP()), - ] - required_name = Annotated[str, mapped_column(String(30), nullable=False)] + # old style Union using Union explicitly + json_scalar = Union[float, str, bool] -The above ``Annotated`` objects can then be used directly within -:class:`_orm.Mapped`, where the pre-configured :func:`_orm.mapped_column` -constructs will be extracted and copied to a new instance that will be -specific to each attribute:: class Base(DeclarativeBase): - pass + type_annotation_map = { + json_list: postgresql.JSONB, + json_scalar: JSON, + } class SomeClass(Base): __tablename__ = "some_table" - id: Mapped[intpk] - name: Mapped[required_name] - created_at: Mapped[timestamp] - -``CREATE TABLE`` for our above mapping looks like: - -.. sourcecode:: pycon+sql - - >>> from sqlalchemy.schema import CreateTable - >>> print(CreateTable(SomeClass.__table__)) - {printsql}CREATE TABLE some_table ( - id INTEGER NOT NULL, - name VARCHAR(30) NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, - PRIMARY KEY (id) - ) - -When using ``Annotated`` types in this way, the configuration of the type -may also be affected on a per-attribute basis. For the types in the above -example that feature explicit use of :paramref:`_orm.mapped_column.nullable`, -we can apply the ``Optional[]`` generic modifier to any of our types so that -the field is optional or not at the Python level, which will be independent -of the ``NULL`` / ``NOT NULL`` setting that takes place in the database:: + id: Mapped[int] = mapped_column(primary_key=True) + list_col: Mapped[list[str] | list[int]] - from typing_extensions import Annotated + # uses JSON + scalar_col: Mapped[json_scalar] - import datetime - from typing import Optional + # uses JSON and is also nullable=True + scalar_col_nullable: Mapped[json_scalar | None] - from sqlalchemy.orm import DeclarativeBase + # these forms all use JSON as well due to the json_scalar entry + scalar_col_newstyle: Mapped[float | str | bool] + scalar_col_oldstyle: Mapped[Union[float, str, bool]] + scalar_col_mixedstyle: Mapped[Optional[float | str | bool]] - timestamp = Annotated[ - datetime.datetime, - mapped_column(nullable=False), - ] +The above example maps the union of ``list[int]`` and ``list[str]`` to the Postgresql +:class:`_postgresql.JSONB` datatype, while naming a union of ``float, +str, bool`` will match to the :class:`_types.JSON` datatype. An equivalent +union, stated in the :class:`_orm.Mapped` construct, will match into the +corresponding entry in the type map. +The matching of a union type is based on the contents of the union regardless +of how the individual types are named, and additionally excluding the use of +the ``None`` type. That is, ``json_scalar`` will also match to ``str | bool | +float | None``. It will **not** match to a union that is a subset or superset +of this union; that is, ``str | bool`` would not match, nor would ``str | bool +| float | int``. The individual contents of the union excluding ``None`` must +be an exact match. - class Base(DeclarativeBase): - pass +The ``None`` value is never significant as far as matching +from ``type_annotation_map`` to :class:`_orm.Mapped`, however is significant +as an indicator for nullability of the :class:`_schema.Column`. When ``None`` is present in the +union either as it is placed in the :class:`_orm.Mapped` construct. When +present in :class:`_orm.Mapped`, it indicates the :class:`_schema.Column` +would be nullable, in the absense of more specific indicators. This logic works +in the same way as indicating an ``Optional`` type as described at +:ref:`orm_declarative_mapped_column_nullability`. +The CREATE TABLE statement for the above mapping will look as below: - class SomeClass(Base): - # ... +.. sourcecode:: pycon+sql - # pep-484 type will be Optional, but column will be - # NOT NULL - created_at: Mapped[Optional[timestamp]] + >>> print(CreateTable(SomeClass.__table__).compile(dialect=postgresql.dialect())) + {printsql}CREATE TABLE some_table ( + id SERIAL NOT NULL, + list_col JSONB NOT NULL, + scalar_col JSON, + scalar_col_not_null JSON NOT NULL, + PRIMARY KEY (id) + ) -The :func:`_orm.mapped_column` construct is also reconciled with an explicitly -passed :func:`_orm.mapped_column` construct, whose arguments will take precedence -over those of the ``Annotated`` construct. Below we add a :class:`.ForeignKey` -constraint to our integer primary key and also use an alternate server -default for the ``created_at`` column:: +While union types use a "loose" matching approach that matches on any equivalent +set of subtypes, Python typing also features a way to create "type aliases" +that are treated as distinct types that are non-equivalent to another type that +includes the same composition. Integration of these types with ``type_annotation_map`` +is described in the next section, :ref:`orm_declarative_type_map_pep695_types`. - import datetime +.. _orm_declarative_type_map_pep695_types: - from typing_extensions import Annotated +Support for Type Alias Types (defined by PEP 695) and NewType +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - from sqlalchemy import ForeignKey - from sqlalchemy import func - from sqlalchemy.orm import DeclarativeBase - from sqlalchemy.orm import Mapped - from sqlalchemy.orm import mapped_column - from sqlalchemy.schema import CreateTable - intpk = Annotated[int, mapped_column(primary_key=True)] - timestamp = Annotated[ - datetime.datetime, - mapped_column(nullable=False, server_default=func.CURRENT_TIMESTAMP()), - ] +In contrast to the typing lookup described in +:ref:`orm_declarative_type_map_union_types`, Python typing also includes two +ways to create a composed type in a more formal way, using ``typing.NewType`` as +well as the ``type`` keyword introduced in :pep:`695`. These types behave +differently from ordinary type aliases (i.e. assigning a type to a variable +name), and this difference is honored in how SQLAlchemy resolves these +types from the type map. +.. versionchanged:: 2.0.37 The behaviors described in this section for ``typing.NewType`` + as well as :pep:`695` ``type`` have been formalized and corrected. + Deprecation warnings are now emitted for "loose matching" patterns that have + worked in some 2.0 releases, but are to be removed in SQLAlchemy 2.1. + Please ensure SQLAlchemy is up to date before attempting to use the features + described in this section. - class Base(DeclarativeBase): - pass +The typing module allows the creation of "new types" using ``typing.NewType``:: + from typing import NewType - class Parent(Base): - __tablename__ = "parent" + nstr30 = NewType("nstr30", str) + nstr50 = NewType("nstr50", str) - id: Mapped[intpk] +Additionally, in Python 3.12, a new feature defined by :pep:`695` was introduced which +provides the ``type`` keyword to accomplish a similar task; using +``type`` produces an object that is similar in many ways to ``typing.NewType`` +which is internally referred to as ``typing.TypeAliasType``:: + type SmallInt = int + type BigInt = int + type JsonScalar = str | float | bool | None - class SomeClass(Base): - __tablename__ = "some_table" +For the purposes of how SQLAlchemy treats these type objects when used +for SQL type lookup inside of :class:`_orm.Mapped`, it's important to note +that Python does not consider two equivalent ``typing.TypeAliasType`` +or ``typing.NewType`` objects to be equal:: - # add ForeignKey to mapped_column(Integer, primary_key=True) - id: Mapped[intpk] = mapped_column(ForeignKey("parent.id")) + # two typing.NewType objects are not equal even if they are both str + >>> nstr50 == nstr30 + False - # change server default from CURRENT_TIMESTAMP to UTC_TIMESTAMP - created_at: Mapped[timestamp] = mapped_column(server_default=func.UTC_TIMESTAMP()) + # two TypeAliasType objects are not equal even if they are both int + >>> SmallInt == BigInt + False -The CREATE TABLE statement illustrates these per-attribute settings, -adding a ``FOREIGN KEY`` constraint as well as substituting -``UTC_TIMESTAMP`` for ``CURRENT_TIMESTAMP``: + # an equivalent union is not equal to JsonScalar + >>> JsonScalar == str | float | bool | None + False -.. sourcecode:: pycon+sql +This is the opposite behavior from how ordinary unions are compared, and +informs the correct behavior for SQLAlchemy's ``type_annotation_map``. When +using ``typing.NewType`` or :pep:`695` ``type`` objects, the type object is +expected to be explicit within the ``type_annotation_map`` for it to be matched +from a :class:`_orm.Mapped` type, where the same object must be stated in order +for a match to be made (excluding whether or not the type inside of +:class:`_orm.Mapped` also unions on ``None``). This is distinct from the +behavior described at :ref:`orm_declarative_type_map_union_types`, where a +plain ``Union`` that is referenced directly will match to other ``Unions`` +based on the composition, rather than the object identity, of a particular type +in ``type_annotation_map``. - >>> from sqlalchemy.schema import CreateTable - >>> print(CreateTable(SomeClass.__table__)) - {printsql}CREATE TABLE some_table ( - id INTEGER NOT NULL, - created_at DATETIME DEFAULT UTC_TIMESTAMP() NOT NULL, - PRIMARY KEY (id), - FOREIGN KEY(id) REFERENCES parent (id) - ) +In the example below, the composed types for ``nstr30``, ``nstr50``, +``SmallInt``, ``BigInt``, and ``JsonScalar`` have no overlap with each other +and can be named distinctly within each :class:`_orm.Mapped` construct, and +are also all explicit in ``type_annotation_map``. Any of these types may +also be unioned with ``None`` or declared as ``Optional[]`` without affecting +the lookup, only deriving column nullability:: -.. note:: The feature of :func:`_orm.mapped_column` just described, where - a fully constructed set of column arguments may be indicated using - :pep:`593` ``Annotated`` objects that contain a "template" - :func:`_orm.mapped_column` object to be copied into the attribute, is - currently not implemented for other ORM constructs such as - :func:`_orm.relationship` and :func:`_orm.composite`. While this functionality - is in theory possible, for the moment attempting to use ``Annotated`` - to indicate further arguments for :func:`_orm.relationship` and similar - will raise a ``NotImplementedError`` exception at runtime, but - may be implemented in future releases. + from typing import NewType -.. _orm_declarative_mapped_column_enums: + from sqlalchemy import SmallInteger, BigInteger, JSON, String + from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + from sqlalchemy.schema import CreateTable -Using Python ``Enum`` or pep-586 ``Literal`` types in the type map -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + nstr30 = NewType("nstr30", str) + nstr50 = NewType("nstr50", str) + type SmallInt = int + type BigInt = int + type JsonScalar = str | float | bool | None -.. versionadded:: 2.0.0b4 - Added ``Enum`` support -.. versionadded:: 2.0.1 - Added ``Literal`` support + class TABase(DeclarativeBase): + type_annotation_map = { + nstr30: String(30), + nstr50: String(50), + SmallInt: SmallInteger, + BigInteger: BigInteger, + JsonScalar: JSON, + } -User-defined Python types which derive from the Python built-in ``enum.Enum`` -as well as the ``typing.Literal`` -class are automatically linked to the SQLAlchemy :class:`.Enum` datatype -when used in an ORM declarative mapping. The example below uses -a custom ``enum.Enum`` within the ``Mapped[]`` constructor:: - import enum + class SomeClass(TABase): + __tablename__ = "some_table" - from sqlalchemy.orm import DeclarativeBase - from sqlalchemy.orm import Mapped - from sqlalchemy.orm import mapped_column + id: Mapped[int] = mapped_column(primary_key=True) + normal_str: Mapped[str] + short_str: Mapped[nstr30] + long_str_nullable: Mapped[nstr50 | None] - class Base(DeclarativeBase): - pass + small_int: Mapped[SmallInt] + big_int: Mapped[BigInteger] + scalar_col: Mapped[JsonScalar] +a CREATE TABLE for the above mapping will illustrate the different variants +of integer and string we've configured, and looks like: - class Status(enum.Enum): - PENDING = "pending" - RECEIVED = "received" - COMPLETED = "completed" +.. sourcecode:: pycon+sql + >>> print(CreateTable(SomeClass.__table__)) + {printsql}CREATE TABLE some_table ( + id INTEGER NOT NULL, + normal_str VARCHAR NOT NULL, + short_str VARCHAR(30) NOT NULL, + long_str_nullable VARCHAR(50), + small_int SMALLINT NOT NULL, + big_int BIGINT NOT NULL, + scalar_col JSON, + PRIMARY KEY (id) + ) - class SomeClass(Base): - __tablename__ = "some_table" +Regarding nullability, the ``JsonScalar`` type includes ``None`` in its +definition, which indicates a nullable column. Similarly the +``long_str_nullable`` column applies a union of ``None`` to ``nstr50``, +which matches to the ``nstr50`` type in the ``type_annotation_map`` while +also applying nullability to the mapped column. The other columns all remain +NOT NULL as they are not indicated as optional. - id: Mapped[int] = mapped_column(primary_key=True) - status: Mapped[Status] -In the above example, the mapped attribute ``SomeClass.status`` will be -linked to a :class:`.Column` with the datatype of ``Enum(Status)``. -We can see this for example in the CREATE TABLE output for the PostgreSQL -database: +.. _orm_declarative_mapped_column_type_map_pep593: -.. sourcecode:: sql +Mapping Multiple Type Configurations to Python Types +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - CREATE TYPE status AS ENUM ('PENDING', 'RECEIVED', 'COMPLETED') - CREATE TABLE some_table ( - id SERIAL NOT NULL, - status status NOT NULL, - PRIMARY KEY (id) - ) +As individual Python types may be associated with :class:`_types.TypeEngine` +configurations of any variety by using the :paramref:`_orm.registry.type_annotation_map` +parameter, an additional +capability is the ability to associate a single Python type with different +variants of a SQL type based on additional type qualifiers. One typical +example of this is mapping the Python ``str`` datatype to ``VARCHAR`` +SQL types of different lengths. Another is mapping different varieties of +``decimal.Decimal`` to differently sized ``NUMERIC`` columns. -In a similar way, ``typing.Literal`` may be used instead, using -a ``typing.Literal`` that consists of all strings:: +Python's typing system provides a great way to add additional metadata to a +Python type which is by using the :pep:`593` ``Annotated`` generic type, which +allows additional information to be bundled along with a Python type. The +:func:`_orm.mapped_column` construct will correctly interpret an ``Annotated`` +object by identity when resolving it in the +:paramref:`_orm.registry.type_annotation_map`, as in the example below where we +declare two variants of :class:`.String` and :class:`.Numeric`:: + from decimal import Decimal - from typing import Literal + from typing_extensions import Annotated + from sqlalchemy import Numeric + from sqlalchemy import String from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column + from sqlalchemy.orm import registry - - class Base(DeclarativeBase): - pass + str_30 = Annotated[str, 30] + str_50 = Annotated[str, 50] + num_12_4 = Annotated[Decimal, 12] + num_6_2 = Annotated[Decimal, 6] - Status = Literal["pending", "received", "completed"] + class Base(DeclarativeBase): + registry = registry( + type_annotation_map={ + str_30: String(30), + str_50: String(50), + num_12_4: Numeric(12, 4), + num_6_2: Numeric(6, 2), + } + ) +The Python type passed to the ``Annotated`` container, in the above example the +``str`` and ``Decimal`` types, is important only for the benefit of typing +tools; as far as the :func:`_orm.mapped_column` construct is concerned, it will only need +perform a lookup of each type object in the +:paramref:`_orm.registry.type_annotation_map` dictionary without actually +looking inside of the ``Annotated`` object, at least in this particular +context. Similarly, the arguments passed to ``Annotated`` beyond the underlying +Python type itself are also not important, it's only that at least one argument +must be present for the ``Annotated`` construct to be valid. We can then use +these augmented types directly in our mapping where they will be matched to the +more specific type constructions, as in the following example:: class SomeClass(Base): __tablename__ = "some_table" - id: Mapped[int] = mapped_column(primary_key=True) - status: Mapped[Status] + short_name: Mapped[str_30] = mapped_column(primary_key=True) + long_name: Mapped[str_50] + num_value: Mapped[num_12_4] + short_num_value: Mapped[num_6_2] -The entries used in :paramref:`_orm.registry.type_annotation_map` link the base -``enum.Enum`` Python type as well as the ``typing.Literal`` type to the -SQLAlchemy :class:`.Enum` SQL type, using a special form which indicates to the -:class:`.Enum` datatype that it should automatically configure itself against -an arbitrary enumerated type. This configuration, which is implicit by default, -would be indicated explicitly as:: +a CREATE TABLE for the above mapping will illustrate the different variants +of ``VARCHAR`` and ``NUMERIC`` we've configured, and looks like: - import enum - import typing +.. sourcecode:: pycon+sql - import sqlalchemy - from sqlalchemy.orm import DeclarativeBase + >>> from sqlalchemy.schema import CreateTable + >>> print(CreateTable(SomeClass.__table__)) + {printsql}CREATE TABLE some_table ( + short_name VARCHAR(30) NOT NULL, + long_name VARCHAR(50) NOT NULL, + num_value NUMERIC(12, 4) NOT NULL, + short_num_value NUMERIC(6, 2) NOT NULL, + PRIMARY KEY (short_name) + ) +While variety in linking ``Annotated`` types to different SQL types grants +us a wide degree of flexibility, the next section illustrates a second +way in which ``Annotated`` may be used with Declarative that is even +more open ended. - class Base(DeclarativeBase): - type_annotation_map = { - enum.Enum: sqlalchemy.Enum(enum.Enum), - typing.Literal: sqlalchemy.Enum(enum.Enum), - } -The resolution logic within Declarative is able to resolve subclasses -of ``enum.Enum`` as well as instances of ``typing.Literal`` to match the -``enum.Enum`` or ``typing.Literal`` entry in the -:paramref:`_orm.registry.type_annotation_map` dictionary. The :class:`.Enum` -SQL type then knows how to produce a configured version of itself with the -appropriate settings, including default string length. If a ``typing.Literal`` -that does not consist of only string values is passed, an informative -error is raised. +.. _orm_declarative_mapped_column_pep593: -``typing.TypeAliasType`` can also be used to create enums, by assigning them -to a ``typing.Literal`` of strings:: +Mapping Whole Column Declarations to Python Types +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - from typing import Literal - type Status = Literal["on", "off", "unknown"] +The previous section illustrated using :pep:`593` ``Annotated`` type +instances as keys within the :paramref:`_orm.registry.type_annotation_map` +dictionary. In this form, the :func:`_orm.mapped_column` construct does not +actually look inside the ``Annotated`` object itself, it's instead +used only as a dictionary key. However, Declarative also has the ability to extract +an entire pre-established :func:`_orm.mapped_column` construct from +an ``Annotated`` object directly. Using this form, we can define not only +different varieties of SQL datatypes linked to Python types without using +the :paramref:`_orm.registry.type_annotation_map` dictionary, we can also +set up any number of arguments such as nullability, column defaults, +and constraints in a reusable fashion. -Since this is a ``typing.TypeAliasType``, it represents a unique type object, -so it must be placed in the ``type_annotation_map`` for it to be looked up -successfully, keyed to the :class:`.Enum` type as follows:: +A set of ORM models will usually have some kind of primary +key style that is common to all mapped classes. There also may be +common column configurations such as timestamps with defaults and other fields of +pre-established sizes and configurations. We can compose these configurations +into :func:`_orm.mapped_column` instances that we then bundle directly into +instances of ``Annotated``, which are then re-used in any number of class +declarations. Declarative will unpack an ``Annotated`` object +when provided in this manner, skipping over any other directives that don't +apply to SQLAlchemy and searching only for SQLAlchemy ORM constructs. - import enum - import sqlalchemy +The example below illustrates a variety of pre-configured field types used +in this way, where we define ``intpk`` that represents an :class:`.Integer` primary +key column, ``timestamp`` that represents a :class:`.DateTime` type +which will use ``CURRENT_TIMESTAMP`` as a DDL level column default, +and ``required_name`` which is a :class:`.String` of length 30 that's +``NOT NULL``:: + import datetime - class Base(DeclarativeBase): - type_annotation_map = {Status: sqlalchemy.Enum(enum.Enum)} + from typing_extensions import Annotated -Since SQLAlchemy supports mapping different ``typing.TypeAliasType`` -objects that are otherwise structurally equivalent individually, -these must be present in ``type_annotation_map`` to avoid ambiguity. + from sqlalchemy import func + from sqlalchemy import String + from sqlalchemy.orm import mapped_column -Native Enums and Naming -+++++++++++++++++++++++ -The :paramref:`.sqltypes.Enum.native_enum` parameter refers to if the -:class:`.sqltypes.Enum` datatype should create a so-called "native" -enum, which on MySQL/MariaDB is the ``ENUM`` datatype and on PostgreSQL is -a new ``TYPE`` object created by ``CREATE TYPE``, or a "non-native" enum, -which means that ``VARCHAR`` will be used to create the datatype. For -backends other than MySQL/MariaDB or PostgreSQL, ``VARCHAR`` is used in -all cases (third party dialects may have their own behaviors). + intpk = Annotated[int, mapped_column(primary_key=True)] + timestamp = Annotated[ + datetime.datetime, + mapped_column(nullable=False, server_default=func.CURRENT_TIMESTAMP()), + ] + required_name = Annotated[str, mapped_column(String(30), nullable=False)] -Because PostgreSQL's ``CREATE TYPE`` requires that there's an explicit name -for the type to be created, special fallback logic exists when working -with implicitly generated :class:`.sqltypes.Enum` without specifying an -explicit :class:`.sqltypes.Enum` datatype within a mapping: +The above ``Annotated`` objects can then be used directly within +:class:`_orm.Mapped`, where the pre-configured :func:`_orm.mapped_column` +constructs will be extracted and copied to a new instance that will be +specific to each attribute:: -1. If the :class:`.sqltypes.Enum` is linked to an ``enum.Enum`` object, - the :paramref:`.sqltypes.Enum.native_enum` parameter defaults to - ``True`` and the name of the enum will be taken from the name of the - ``enum.Enum`` datatype. The PostgreSQL backend will assume ``CREATE TYPE`` - with this name. -2. If the :class:`.sqltypes.Enum` is linked to a ``typing.Literal`` object, - the :paramref:`.sqltypes.Enum.native_enum` parameter defaults to - ``False``; no name is generated and ``VARCHAR`` is assumed. + class Base(DeclarativeBase): + pass -To use ``typing.Literal`` with a PostgreSQL ``CREATE TYPE`` type, an -explicit :class:`.sqltypes.Enum` must be used, either within the -type map:: - import enum - import typing + class SomeClass(Base): + __tablename__ = "some_table" - import sqlalchemy - from sqlalchemy.orm import DeclarativeBase + id: Mapped[intpk] + name: Mapped[required_name] + created_at: Mapped[timestamp] - Status = Literal["pending", "received", "completed"] +``CREATE TABLE`` for our above mapping looks like: +.. sourcecode:: pycon+sql - class Base(DeclarativeBase): - type_annotation_map = { - Status: sqlalchemy.Enum("pending", "received", "completed", name="status_enum"), - } + >>> from sqlalchemy.schema import CreateTable + >>> print(CreateTable(SomeClass.__table__)) + {printsql}CREATE TABLE some_table ( + id INTEGER NOT NULL, + name VARCHAR(30) NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + PRIMARY KEY (id) + ) -Or alternatively within :func:`_orm.mapped_column`:: +When using ``Annotated`` types in this way, the configuration of the type +may also be affected on a per-attribute basis. For the types in the above +example that feature explicit use of :paramref:`_orm.mapped_column.nullable`, +we can apply the ``Optional[]`` generic modifier to any of our types so that +the field is optional or not at the Python level, which will be independent +of the ``NULL`` / ``NOT NULL`` setting that takes place in the database:: - import enum - import typing + from typing_extensions import Annotated + + import datetime + from typing import Optional - import sqlalchemy from sqlalchemy.orm import DeclarativeBase - Status = Literal["pending", "received", "completed"] + timestamp = Annotated[ + datetime.datetime, + mapped_column(nullable=False), + ] class Base(DeclarativeBase): @@ -1052,378 +1118,365 @@ Or alternatively within :func:`_orm.mapped_column`:: class SomeClass(Base): - __tablename__ = "some_table" + # ... - id: Mapped[int] = mapped_column(primary_key=True) - status: Mapped[Status] = mapped_column( - sqlalchemy.Enum("pending", "received", "completed", name="status_enum") - ) + # pep-484 type will be Optional, but column will be + # NOT NULL + created_at: Mapped[Optional[timestamp]] -Altering the Configuration of the Default Enum -+++++++++++++++++++++++++++++++++++++++++++++++ +The :func:`_orm.mapped_column` construct is also reconciled with an explicitly +passed :func:`_orm.mapped_column` construct, whose arguments will take precedence +over those of the ``Annotated`` construct. Below we add a :class:`.ForeignKey` +constraint to our integer primary key and also use an alternate server +default for the ``created_at`` column:: -In order to modify the fixed configuration of the :class:`.enum.Enum` datatype -that's generated implicitly, specify new entries in the -:paramref:`_orm.registry.type_annotation_map`, indicating additional arguments. -For example, to use "non native enumerations" unconditionally, the -:paramref:`.Enum.native_enum` parameter may be set to False for all types:: + import datetime - import enum - import typing - import sqlalchemy + from typing_extensions import Annotated + + from sqlalchemy import ForeignKey + from sqlalchemy import func from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column + from sqlalchemy.schema import CreateTable + + intpk = Annotated[int, mapped_column(primary_key=True)] + timestamp = Annotated[ + datetime.datetime, + mapped_column(nullable=False, server_default=func.CURRENT_TIMESTAMP()), + ] class Base(DeclarativeBase): - type_annotation_map = { - enum.Enum: sqlalchemy.Enum(enum.Enum, native_enum=False), - typing.Literal: sqlalchemy.Enum(enum.Enum, native_enum=False), - } + pass -.. versionchanged:: 2.0.1 Implemented support for overriding parameters - such as :paramref:`_sqltypes.Enum.native_enum` within the - :class:`_sqltypes.Enum` datatype when establishing the - :paramref:`_orm.registry.type_annotation_map`. Previously, this - functionality was not working. -To use a specific configuration for a specific ``enum.Enum`` subtype, such -as setting the string length to 50 when using the example ``Status`` -datatype:: + class Parent(Base): + __tablename__ = "parent" - import enum - import sqlalchemy - from sqlalchemy.orm import DeclarativeBase + id: Mapped[intpk] - class Status(enum.Enum): - PENDING = "pending" - RECEIVED = "received" - COMPLETED = "completed" + class SomeClass(Base): + __tablename__ = "some_table" + # add ForeignKey to mapped_column(Integer, primary_key=True) + id: Mapped[intpk] = mapped_column(ForeignKey("parent.id")) - class Base(DeclarativeBase): - type_annotation_map = { - Status: sqlalchemy.Enum(Status, length=50, native_enum=False) - } + # change server default from CURRENT_TIMESTAMP to UTC_TIMESTAMP + created_at: Mapped[timestamp] = mapped_column(server_default=func.UTC_TIMESTAMP()) -By default :class:`_sqltypes.Enum` that are automatically generated are not -associated with the :class:`_sql.MetaData` instance used by the ``Base``, so if -the metadata defines a schema it will not be automatically associated with the -enum. To automatically associate the enum with the schema in the metadata or -table they belong to the :paramref:`_sqltypes.Enum.inherit_schema` can be set:: +The CREATE TABLE statement illustrates these per-attribute settings, +adding a ``FOREIGN KEY`` constraint as well as substituting +``UTC_TIMESTAMP`` for ``CURRENT_TIMESTAMP``: - from enum import Enum - import sqlalchemy as sa - from sqlalchemy.orm import DeclarativeBase +.. sourcecode:: pycon+sql + >>> from sqlalchemy.schema import CreateTable + >>> print(CreateTable(SomeClass.__table__)) + {printsql}CREATE TABLE some_table ( + id INTEGER NOT NULL, + created_at DATETIME DEFAULT UTC_TIMESTAMP() NOT NULL, + PRIMARY KEY (id), + FOREIGN KEY(id) REFERENCES parent (id) + ) - class Base(DeclarativeBase): - metadata = sa.MetaData(schema="my_schema") - type_annotation_map = {Enum: sa.Enum(Enum, inherit_schema=True)} +.. note:: The feature of :func:`_orm.mapped_column` just described, where + a fully constructed set of column arguments may be indicated using + :pep:`593` ``Annotated`` objects that contain a "template" + :func:`_orm.mapped_column` object to be copied into the attribute, is + currently not implemented for other ORM constructs such as + :func:`_orm.relationship` and :func:`_orm.composite`. While this functionality + is in theory possible, for the moment attempting to use ``Annotated`` + to indicate further arguments for :func:`_orm.relationship` and similar + will raise a ``NotImplementedError`` exception at runtime, but + may be implemented in future releases. -Linking Specific ``enum.Enum`` or ``typing.Literal`` to other datatypes -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. _orm_declarative_mapped_column_enums: -The above examples feature the use of an :class:`_sqltypes.Enum` that is -automatically configuring itself to the arguments / attributes present on -an ``enum.Enum`` or ``typing.Literal`` type object. For use cases where -specific kinds of ``enum.Enum`` or ``typing.Literal`` should be linked to -other types, these specific types may be placed in the type map also. -In the example below, an entry for ``Literal[]`` that contains non-string -types is linked to the :class:`_sqltypes.JSON` datatype:: +Using Python ``Enum`` or pep-586 ``Literal`` types in the type map +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - from typing import Literal +.. versionadded:: 2.0.0b4 - Added ``Enum`` support - from sqlalchemy import JSON - from sqlalchemy.orm import DeclarativeBase +.. versionadded:: 2.0.1 - Added ``Literal`` support - my_literal = Literal[0, 1, True, False, "true", "false"] +User-defined Python types which derive from the Python built-in ``enum.Enum`` +as well as the ``typing.Literal`` +class are automatically linked to the SQLAlchemy :class:`.Enum` datatype +when used in an ORM declarative mapping. The example below uses +a custom ``enum.Enum`` within the ``Mapped[]`` constructor:: + import enum - class Base(DeclarativeBase): - type_annotation_map = {my_literal: JSON} + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column -In the above configuration, the ``my_literal`` datatype will resolve to a -:class:`._sqltypes.JSON` instance. Other ``Literal`` variants will continue -to resolve to :class:`_sqltypes.Enum` datatypes. + class Base(DeclarativeBase): + pass -Dataclass features in ``mapped_column()`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The :func:`_orm.mapped_column` construct integrates with SQLAlchemy's -"native dataclasses" feature, discussed at -:ref:`orm_declarative_native_dataclasses`. See that section for current -background on additional directives supported by :func:`_orm.mapped_column`. + class Status(enum.Enum): + PENDING = "pending" + RECEIVED = "received" + COMPLETED = "completed" + class SomeClass(Base): + __tablename__ = "some_table" -.. _orm_declarative_metadata: + id: Mapped[int] = mapped_column(primary_key=True) + status: Mapped[Status] -Accessing Table and Metadata -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +In the above example, the mapped attribute ``SomeClass.status`` will be +linked to a :class:`.Column` with the datatype of ``Enum(Status)``. +We can see this for example in the CREATE TABLE output for the PostgreSQL +database: -A declaratively mapped class will always include an attribute called -``__table__``; when the above configuration using ``__tablename__`` is -complete, the declarative process makes the :class:`_schema.Table` -available via the ``__table__`` attribute:: +.. sourcecode:: sql + CREATE TYPE status AS ENUM ('PENDING', 'RECEIVED', 'COMPLETED') - # access the Table - user_table = User.__table__ + CREATE TABLE some_table ( + id SERIAL NOT NULL, + status status NOT NULL, + PRIMARY KEY (id) + ) -The above table is ultimately the same one that corresponds to the -:attr:`_orm.Mapper.local_table` attribute, which we can see through the -:ref:`runtime inspection system `:: +In a similar way, ``typing.Literal`` may be used instead, using +a ``typing.Literal`` that consists of all strings:: - from sqlalchemy import inspect - user_table = inspect(User).local_table + from typing import Literal -The :class:`_schema.MetaData` collection associated with both the declarative -:class:`_orm.registry` as well as the base class is frequently necessary in -order to run DDL operations such as CREATE, as well as in use with migration -tools such as Alembic. This object is available via the ``.metadata`` -attribute of :class:`_orm.registry` as well as the declarative base class. -Below, for a small script we may wish to emit a CREATE for all tables against a -SQLite database:: + from sqlalchemy.orm import DeclarativeBase + from sqlalchemy.orm import Mapped + from sqlalchemy.orm import mapped_column - engine = create_engine("sqlite://") - Base.metadata.create_all(engine) + class Base(DeclarativeBase): + pass -.. _orm_declarative_table_configuration: -Declarative Table Configuration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Status = Literal["pending", "received", "completed"] -When using Declarative Table configuration with the ``__tablename__`` -declarative class attribute, additional arguments to be supplied to the -:class:`_schema.Table` constructor should be provided using the -``__table_args__`` declarative class attribute. -This attribute accommodates both positional as well as keyword -arguments that are normally sent to the -:class:`_schema.Table` constructor. -The attribute can be specified in one of two forms. One is as a -dictionary:: + class SomeClass(Base): + __tablename__ = "some_table" - class MyClass(Base): - __tablename__ = "sometable" - __table_args__ = {"mysql_engine": "InnoDB"} + id: Mapped[int] = mapped_column(primary_key=True) + status: Mapped[Status] -The other, a tuple, where each argument is positional -(usually constraints):: +The entries used in :paramref:`_orm.registry.type_annotation_map` link the base +``enum.Enum`` Python type as well as the ``typing.Literal`` type to the +SQLAlchemy :class:`.Enum` SQL type, using a special form which indicates to the +:class:`.Enum` datatype that it should automatically configure itself against +an arbitrary enumerated type. This configuration, which is implicit by default, +would be indicated explicitly as:: - class MyClass(Base): - __tablename__ = "sometable" - __table_args__ = ( - ForeignKeyConstraint(["id"], ["remote_table.id"]), - UniqueConstraint("foo"), - ) + import enum + import typing -Keyword arguments can be specified with the above form by -specifying the last argument as a dictionary:: + import sqlalchemy + from sqlalchemy.orm import DeclarativeBase - class MyClass(Base): - __tablename__ = "sometable" - __table_args__ = ( - ForeignKeyConstraint(["id"], ["remote_table.id"]), - UniqueConstraint("foo"), - {"autoload": True}, - ) -A class may also specify the ``__table_args__`` declarative attribute, -as well as the ``__tablename__`` attribute, in a dynamic style using the -:func:`_orm.declared_attr` method decorator. See -:ref:`orm_mixins_toplevel` for background. + class Base(DeclarativeBase): + type_annotation_map = { + enum.Enum: sqlalchemy.Enum(enum.Enum), + typing.Literal: sqlalchemy.Enum(enum.Enum), + } -.. _orm_declarative_table_schema_name: +The resolution logic within Declarative is able to resolve subclasses +of ``enum.Enum`` as well as instances of ``typing.Literal`` to match the +``enum.Enum`` or ``typing.Literal`` entry in the +:paramref:`_orm.registry.type_annotation_map` dictionary. The :class:`.Enum` +SQL type then knows how to produce a configured version of itself with the +appropriate settings, including default string length. If a ``typing.Literal`` +that does not consist of only string values is passed, an informative +error is raised. -Explicit Schema Name with Declarative Table -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +``typing.TypeAliasType`` can also be used to create enums, by assigning them +to a ``typing.Literal`` of strings:: -The schema name for a :class:`_schema.Table` as documented at -:ref:`schema_table_schema_name` is applied to an individual :class:`_schema.Table` -using the :paramref:`_schema.Table.schema` argument. When using Declarative -tables, this option is passed like any other to the ``__table_args__`` -dictionary:: + from typing import Literal - from sqlalchemy.orm import DeclarativeBase + type Status = Literal["on", "off", "unknown"] +Since this is a ``typing.TypeAliasType``, it represents a unique type object, +so it must be placed in the ``type_annotation_map`` for it to be looked up +successfully, keyed to the :class:`.Enum` type as follows:: - class Base(DeclarativeBase): - pass + import enum + import sqlalchemy - class MyClass(Base): - __tablename__ = "sometable" - __table_args__ = {"schema": "some_schema"} + class Base(DeclarativeBase): + type_annotation_map = {Status: sqlalchemy.Enum(enum.Enum)} -The schema name can also be applied to all :class:`_schema.Table` objects -globally by using the :paramref:`_schema.MetaData.schema` parameter documented -at :ref:`schema_metadata_schema_name`. The :class:`_schema.MetaData` object -may be constructed separately and associated with a :class:`_orm.DeclarativeBase` -subclass by assigning to the ``metadata`` attribute directly:: +Since SQLAlchemy supports mapping different ``typing.TypeAliasType`` +objects that are otherwise structurally equivalent individually, +these must be present in ``type_annotation_map`` to avoid ambiguity. - from sqlalchemy import MetaData - from sqlalchemy.orm import DeclarativeBase +Native Enums and Naming +~~~~~~~~~~~~~~~~~~~~~~~~ - metadata_obj = MetaData(schema="some_schema") +The :paramref:`.sqltypes.Enum.native_enum` parameter refers to if the +:class:`.sqltypes.Enum` datatype should create a so-called "native" +enum, which on MySQL/MariaDB is the ``ENUM`` datatype and on PostgreSQL is +a new ``TYPE`` object created by ``CREATE TYPE``, or a "non-native" enum, +which means that ``VARCHAR`` will be used to create the datatype. For +backends other than MySQL/MariaDB or PostgreSQL, ``VARCHAR`` is used in +all cases (third party dialects may have their own behaviors). +Because PostgreSQL's ``CREATE TYPE`` requires that there's an explicit name +for the type to be created, special fallback logic exists when working +with implicitly generated :class:`.sqltypes.Enum` without specifying an +explicit :class:`.sqltypes.Enum` datatype within a mapping: - class Base(DeclarativeBase): - metadata = metadata_obj +1. If the :class:`.sqltypes.Enum` is linked to an ``enum.Enum`` object, + the :paramref:`.sqltypes.Enum.native_enum` parameter defaults to + ``True`` and the name of the enum will be taken from the name of the + ``enum.Enum`` datatype. The PostgreSQL backend will assume ``CREATE TYPE`` + with this name. +2. If the :class:`.sqltypes.Enum` is linked to a ``typing.Literal`` object, + the :paramref:`.sqltypes.Enum.native_enum` parameter defaults to + ``False``; no name is generated and ``VARCHAR`` is assumed. +To use ``typing.Literal`` with a PostgreSQL ``CREATE TYPE`` type, an +explicit :class:`.sqltypes.Enum` must be used, either within the +type map:: - class MyClass(Base): - # will use "some_schema" by default - __tablename__ = "sometable" + import enum + import typing -.. seealso:: + import sqlalchemy + from sqlalchemy.orm import DeclarativeBase - :ref:`schema_table_schema_name` - in the :ref:`metadata_toplevel` documentation. + Status = Literal["pending", "received", "completed"] -.. _orm_declarative_column_options: -Setting Load and Persistence Options for Declarative Mapped Columns -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + class Base(DeclarativeBase): + type_annotation_map = { + Status: sqlalchemy.Enum("pending", "received", "completed", name="status_enum"), + } -The :func:`_orm.mapped_column` construct accepts additional ORM-specific -arguments that affect how the generated :class:`_schema.Column` is -mapped, affecting its load and persistence-time behavior. Options -that are commonly used include: +Or alternatively within :func:`_orm.mapped_column`:: -* **deferred column loading** - The :paramref:`_orm.mapped_column.deferred` - boolean establishes the :class:`_schema.Column` using - :ref:`deferred column loading ` by default. In the example - below, the ``User.bio`` column will not be loaded by default, but only - when accessed:: + import enum + import typing - class User(Base): - __tablename__ = "user" + import sqlalchemy + from sqlalchemy.orm import DeclarativeBase - id: Mapped[int] = mapped_column(primary_key=True) - name: Mapped[str] - bio: Mapped[str] = mapped_column(Text, deferred=True) + Status = Literal["pending", "received", "completed"] - .. seealso:: - :ref:`orm_queryguide_column_deferral` - full description of deferred column loading + class Base(DeclarativeBase): + pass -* **active history** - The :paramref:`_orm.mapped_column.active_history` - ensures that upon change of value for the attribute, the previous value - will have been loaded and made part of the :attr:`.AttributeState.history` - collection when inspecting the history of the attribute. This may incur - additional SQL statements:: - class User(Base): - __tablename__ = "user" + class SomeClass(Base): + __tablename__ = "some_table" id: Mapped[int] = mapped_column(primary_key=True) - important_identifier: Mapped[str] = mapped_column(active_history=True) - -See the docstring for :func:`_orm.mapped_column` for a list of supported -parameters. - -.. seealso:: + status: Mapped[Status] = mapped_column( + sqlalchemy.Enum("pending", "received", "completed", name="status_enum") + ) - :ref:`orm_imperative_table_column_options` - describes using - :func:`_orm.column_property` and :func:`_orm.deferred` for use with - Imperative Table configuration +Altering the Configuration of the Default Enum +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. _mapper_column_distinct_names: +In order to modify the fixed configuration of the :class:`.enum.Enum` datatype +that's generated implicitly, specify new entries in the +:paramref:`_orm.registry.type_annotation_map`, indicating additional arguments. +For example, to use "non native enumerations" unconditionally, the +:paramref:`.Enum.native_enum` parameter may be set to False for all types:: -.. _orm_declarative_table_column_naming: + import enum + import typing + import sqlalchemy + from sqlalchemy.orm import DeclarativeBase -Naming Declarative Mapped Columns Explicitly -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -All of the examples thus far feature the :func:`_orm.mapped_column` construct -linked to an ORM mapped attribute, where the Python attribute name given -to the :func:`_orm.mapped_column` is also that of the column as we see in -CREATE TABLE statements as well as queries. The name for a column as -expressed in SQL may be indicated by passing the string positional argument -:paramref:`_orm.mapped_column.__name` as the first positional argument. -In the example below, the ``User`` class is mapped with alternate names -given to the columns themselves:: + class Base(DeclarativeBase): + type_annotation_map = { + enum.Enum: sqlalchemy.Enum(enum.Enum, native_enum=False), + typing.Literal: sqlalchemy.Enum(enum.Enum, native_enum=False), + } - class User(Base): - __tablename__ = "user" +.. versionchanged:: 2.0.1 Implemented support for overriding parameters + such as :paramref:`_sqltypes.Enum.native_enum` within the + :class:`_sqltypes.Enum` datatype when establishing the + :paramref:`_orm.registry.type_annotation_map`. Previously, this + functionality was not working. - id: Mapped[int] = mapped_column("user_id", primary_key=True) - name: Mapped[str] = mapped_column("user_name") +To use a specific configuration for a specific ``enum.Enum`` subtype, such +as setting the string length to 50 when using the example ``Status`` +datatype:: -Where above ``User.id`` resolves to a column named ``user_id`` -and ``User.name`` resolves to a column named ``user_name``. We -may write a :func:`_sql.select` statement using our Python attribute names -and will see the SQL names generated: + import enum + import sqlalchemy + from sqlalchemy.orm import DeclarativeBase -.. sourcecode:: pycon+sql - >>> from sqlalchemy import select - >>> print(select(User.id, User.name).where(User.name == "x")) - {printsql}SELECT "user".user_id, "user".user_name - FROM "user" - WHERE "user".user_name = :user_name_1 + class Status(enum.Enum): + PENDING = "pending" + RECEIVED = "received" + COMPLETED = "completed" -.. seealso:: + class Base(DeclarativeBase): + type_annotation_map = { + Status: sqlalchemy.Enum(Status, length=50, native_enum=False) + } - :ref:`orm_imperative_table_column_naming` - applies to Imperative Table +By default :class:`_sqltypes.Enum` that are automatically generated are not +associated with the :class:`_sql.MetaData` instance used by the ``Base``, so if +the metadata defines a schema it will not be automatically associated with the +enum. To automatically associate the enum with the schema in the metadata or +table they belong to the :paramref:`_sqltypes.Enum.inherit_schema` can be set:: -.. _orm_declarative_table_adding_columns: + from enum import Enum + import sqlalchemy as sa + from sqlalchemy.orm import DeclarativeBase -Appending additional columns to an existing Declarative mapped class -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -A declarative table configuration allows the addition of new -:class:`_schema.Column` objects to an existing mapping after the :class:`.Table` -metadata has already been generated. + class Base(DeclarativeBase): + metadata = sa.MetaData(schema="my_schema") + type_annotation_map = {Enum: sa.Enum(Enum, inherit_schema=True)} -For a declarative class that is declared using a declarative base class, -the underlying metaclass :class:`.DeclarativeMeta` includes a ``__setattr__()`` -method that will intercept additional :func:`_orm.mapped_column` or Core -:class:`.Column` objects and -add them to both the :class:`.Table` using :meth:`.Table.append_column` -as well as to the existing :class:`.Mapper` using :meth:`.Mapper.add_property`:: +Linking Specific ``enum.Enum`` or ``typing.Literal`` to other datatypes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - MyClass.some_new_column = mapped_column(String) +The above examples feature the use of an :class:`_sqltypes.Enum` that is +automatically configuring itself to the arguments / attributes present on +an ``enum.Enum`` or ``typing.Literal`` type object. For use cases where +specific kinds of ``enum.Enum`` or ``typing.Literal`` should be linked to +other types, these specific types may be placed in the type map also. +In the example below, an entry for ``Literal[]`` that contains non-string +types is linked to the :class:`_sqltypes.JSON` datatype:: -Using core :class:`_schema.Column`:: - MyClass.some_new_column = Column(String) + from typing import Literal -All arguments are supported including an alternate name, such as -``MyClass.some_new_column = mapped_column("some_name", String)``. However, -the SQL type must be passed to the :func:`_orm.mapped_column` or -:class:`_schema.Column` object explicitly, as in the above examples where -the :class:`_sqltypes.String` type is passed. There's no capability for -the :class:`_orm.Mapped` annotation type to take part in the operation. + from sqlalchemy import JSON + from sqlalchemy.orm import DeclarativeBase -Additional :class:`_schema.Column` objects may also be added to a mapping -in the specific circumstance of using single table inheritance, where -additional columns are present on mapped subclasses that have -no :class:`.Table` of their own. This is illustrated in the section -:ref:`single_inheritance`. + my_literal = Literal[0, 1, True, False, "true", "false"] -.. seealso:: - :ref:`orm_declarative_table_adding_relationship` - similar examples for :func:`_orm.relationship` + class Base(DeclarativeBase): + type_annotation_map = {my_literal: JSON} -.. note:: Assignment of mapped - properties to an already mapped class will only - function correctly if the "declarative base" class is used, meaning - the user-defined subclass of :class:`_orm.DeclarativeBase` or the - dynamically generated class returned by :func:`_orm.declarative_base` - or :meth:`_orm.registry.generate_base`. This "base" class includes - a Python metaclass which implements a special ``__setattr__()`` method - that intercepts these operations. +In the above configuration, the ``my_literal`` datatype will resolve to a +:class:`._sqltypes.JSON` instance. Other ``Literal`` variants will continue +to resolve to :class:`_sqltypes.Enum` datatypes. - Runtime assignment of class-mapped attributes to a mapped class will **not** work - if the class is mapped using decorators like :meth:`_orm.registry.mapped` - or imperative functions like :meth:`_orm.registry.map_imperatively`. .. _orm_imperative_table_configuration: From 7a91b6645109759a09736b7b298e3dbf11f5c139 Mon Sep 17 00:00:00 2001 From: Justine Krejcha Date: Tue, 6 May 2025 15:18:02 -0400 Subject: [PATCH 517/544] typing: pg: type NamedType create/drops (fixes #12557) Type the `create` and `drop` functions for `NamedType`s Also partially type the SchemaType create/drop functions more generally One change to this is that the default parameter of `None` is removed. It doesn't work and will fail with a `AttributeError` at runtime since it immediately tries to access a property of `None` which doesn't exist. Fixes #12557 This pull request is: - [X] A documentation / typographical / small typing error fix - Good to go, no issue or tests are needed - [X] A short code fix - please include the issue number, and create an issue if none exists, which must include a complete example of the issue. one line code fixes without an issue and demonstration will not be accepted. - Please include: `Fixes: #` in the commit message - please include tests. one line code fixes without tests will not be accepted. - [ ] A new feature implementation - please include the issue number, and create an issue if none exists, which must include a complete example of how the feature would look. - Please include: `Fixes: #` in the commit message - please include tests. **Have a nice day!** Closes: #12558 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12558 Pull-request-sha: 75c8d81bfb68f45299a9448d45dda446532205d3 Change-Id: I173771d365f34f54ab474b9661e1cdc70cc4de84 (cherry picked from commit b4d7bf7a2f74db73e12f47ca4cb45666bf08439e) --- .../dialects/postgresql/named_types.py | 55 +++++++++++++++---- lib/sqlalchemy/engine/base.py | 17 +++--- lib/sqlalchemy/engine/mock.py | 13 +++-- lib/sqlalchemy/schema.py | 1 + lib/sqlalchemy/sql/_typing.py | 5 ++ lib/sqlalchemy/sql/base.py | 13 ++++- lib/sqlalchemy/sql/ddl.py | 3 +- lib/sqlalchemy/sql/schema.py | 7 +-- lib/sqlalchemy/sql/sqltypes.py | 29 +++++++--- test/sql/test_types.py | 1 + 10 files changed, 105 insertions(+), 39 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/named_types.py b/lib/sqlalchemy/dialects/postgresql/named_types.py index e1b8e84ce85..c9d6e5844cf 100644 --- a/lib/sqlalchemy/dialects/postgresql/named_types.py +++ b/lib/sqlalchemy/dialects/postgresql/named_types.py @@ -7,7 +7,9 @@ # mypy: ignore-errors from __future__ import annotations +from types import ModuleType from typing import Any +from typing import Dict from typing import Optional from typing import Type from typing import TYPE_CHECKING @@ -25,10 +27,11 @@ from ...sql.ddl import InvokeDropDDLBase if TYPE_CHECKING: + from ...sql._typing import _CreateDropBind from ...sql._typing import _TypeEngineArgument -class NamedType(sqltypes.TypeEngine): +class NamedType(schema.SchemaVisitable, sqltypes.TypeEngine): """Base for named types.""" __abstract__ = True @@ -36,7 +39,9 @@ class NamedType(sqltypes.TypeEngine): DDLDropper: Type[NamedTypeDropper] create_type: bool - def create(self, bind, checkfirst=True, **kw): + def create( + self, bind: _CreateDropBind, checkfirst: bool = True, **kw: Any + ) -> None: """Emit ``CREATE`` DDL for this type. :param bind: a connectable :class:`_engine.Engine`, @@ -50,7 +55,9 @@ def create(self, bind, checkfirst=True, **kw): """ bind._run_ddl_visitor(self.DDLGenerator, self, checkfirst=checkfirst) - def drop(self, bind, checkfirst=True, **kw): + def drop( + self, bind: _CreateDropBind, checkfirst: bool = True, **kw: Any + ) -> None: """Emit ``DROP`` DDL for this type. :param bind: a connectable :class:`_engine.Engine`, @@ -63,7 +70,9 @@ def drop(self, bind, checkfirst=True, **kw): """ bind._run_ddl_visitor(self.DDLDropper, self, checkfirst=checkfirst) - def _check_for_name_in_memos(self, checkfirst, kw): + def _check_for_name_in_memos( + self, checkfirst: bool, kw: Dict[str, Any] + ) -> bool: """Look in the 'ddl runner' for 'memos', then note our name in that collection. @@ -87,7 +96,13 @@ def _check_for_name_in_memos(self, checkfirst, kw): else: return False - def _on_table_create(self, target, bind, checkfirst=False, **kw): + def _on_table_create( + self, + target: Any, + bind: _CreateDropBind, + checkfirst: bool = False, + **kw: Any, + ) -> None: if ( checkfirst or ( @@ -97,7 +112,13 @@ def _on_table_create(self, target, bind, checkfirst=False, **kw): ) and not self._check_for_name_in_memos(checkfirst, kw): self.create(bind=bind, checkfirst=checkfirst) - def _on_table_drop(self, target, bind, checkfirst=False, **kw): + def _on_table_drop( + self, + target: Any, + bind: _CreateDropBind, + checkfirst: bool = False, + **kw: Any, + ) -> None: if ( not self.metadata and not kw.get("_is_metadata_operation", False) @@ -105,11 +126,23 @@ def _on_table_drop(self, target, bind, checkfirst=False, **kw): ): self.drop(bind=bind, checkfirst=checkfirst) - def _on_metadata_create(self, target, bind, checkfirst=False, **kw): + def _on_metadata_create( + self, + target: Any, + bind: _CreateDropBind, + checkfirst: bool = False, + **kw: Any, + ) -> None: if not self._check_for_name_in_memos(checkfirst, kw): self.create(bind=bind, checkfirst=checkfirst) - def _on_metadata_drop(self, target, bind, checkfirst=False, **kw): + def _on_metadata_drop( + self, + target: Any, + bind: _CreateDropBind, + checkfirst: bool = False, + **kw: Any, + ) -> None: if not self._check_for_name_in_memos(checkfirst, kw): self.drop(bind=bind, checkfirst=checkfirst) @@ -314,7 +347,7 @@ def adapt_emulated_to_native(cls, impl, **kw): return cls(**kw) - def create(self, bind=None, checkfirst=True): + def create(self, bind: _CreateDropBind, checkfirst: bool = True) -> None: """Emit ``CREATE TYPE`` for this :class:`_postgresql.ENUM`. @@ -335,7 +368,7 @@ def create(self, bind=None, checkfirst=True): super().create(bind, checkfirst=checkfirst) - def drop(self, bind=None, checkfirst=True): + def drop(self, bind: _CreateDropBind, checkfirst: bool = True) -> None: """Emit ``DROP TYPE`` for this :class:`_postgresql.ENUM`. @@ -355,7 +388,7 @@ def drop(self, bind=None, checkfirst=True): super().drop(bind, checkfirst=checkfirst) - def get_dbapi_type(self, dbapi): + def get_dbapi_type(self, dbapi: ModuleType) -> None: """dont return dbapi.STRING for ENUM in PostgreSQL, since that's a different type""" diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index e9925eb9300..4292ed6d100 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -70,12 +70,11 @@ from ..sql._typing import _InfoType from ..sql.compiler import Compiled from ..sql.ddl import ExecutableDDLElement - from ..sql.ddl import SchemaDropper - from ..sql.ddl import SchemaGenerator + from ..sql.ddl import InvokeDDLBase from ..sql.functions import FunctionElement from ..sql.schema import DefaultGenerator from ..sql.schema import HasSchemaAttr - from ..sql.schema import SchemaItem + from ..sql.schema import SchemaVisitable from ..sql.selectable import TypedReturnsRows @@ -2441,8 +2440,8 @@ def _handle_dbapi_exception_noconnection( def _run_ddl_visitor( self, - visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]], - element: SchemaItem, + visitorcallable: Type[InvokeDDLBase], + element: SchemaVisitable, **kwargs: Any, ) -> None: """run a DDL visitor. @@ -2451,7 +2450,9 @@ def _run_ddl_visitor( options given to the visitor so that "checkfirst" is skipped. """ - visitorcallable(self.dialect, self, **kwargs).traverse_single(element) + visitorcallable( + dialect=self.dialect, connection=self, **kwargs + ).traverse_single(element) class ExceptionContextImpl(ExceptionContext): @@ -3239,8 +3240,8 @@ def begin(self) -> Iterator[Connection]: def _run_ddl_visitor( self, - visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]], - element: SchemaItem, + visitorcallable: Type[InvokeDDLBase], + element: SchemaVisitable, **kwargs: Any, ) -> None: with self.begin() as conn: diff --git a/lib/sqlalchemy/engine/mock.py b/lib/sqlalchemy/engine/mock.py index 08dba5a6456..a96af36ccda 100644 --- a/lib/sqlalchemy/engine/mock.py +++ b/lib/sqlalchemy/engine/mock.py @@ -27,10 +27,9 @@ from .interfaces import Dialect from .url import URL from ..sql.base import Executable - from ..sql.ddl import SchemaDropper - from ..sql.ddl import SchemaGenerator + from ..sql.ddl import InvokeDDLBase from ..sql.schema import HasSchemaAttr - from ..sql.schema import SchemaItem + from ..sql.visitors import Visitable class MockConnection: @@ -53,12 +52,14 @@ def execution_options(self, **kw: Any) -> MockConnection: def _run_ddl_visitor( self, - visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]], - element: SchemaItem, + visitorcallable: Type[InvokeDDLBase], + element: Visitable, **kwargs: Any, ) -> None: kwargs["checkfirst"] = False - visitorcallable(self.dialect, self, **kwargs).traverse_single(element) + visitorcallable( + dialect=self.dialect, connection=self, **kwargs + ).traverse_single(element) def execute( self, diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index 32adc9bb218..16f7ec37b3c 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -65,6 +65,7 @@ from .sql.schema import PrimaryKeyConstraint as PrimaryKeyConstraint from .sql.schema import SchemaConst as SchemaConst from .sql.schema import SchemaItem as SchemaItem +from .sql.schema import SchemaVisitable as SchemaVisitable from .sql.schema import Sequence as Sequence from .sql.schema import Table as Table from .sql.schema import UniqueConstraint as UniqueConstraint diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index b1af53f7777..8fe86f63748 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -70,7 +70,10 @@ from .sqltypes import TableValueType from .sqltypes import TupleType from .type_api import TypeEngine + from ..engine import Connection from ..engine import Dialect + from ..engine import Engine + from ..engine.mock import MockConnection from ..util.typing import TypeGuard _T = TypeVar("_T", bound=Any) @@ -301,6 +304,8 @@ def dialect(self) -> Dialect: ... _AutoIncrementType = Union[bool, Literal["auto", "ignore_fk"]] +_CreateDropBind = Union["Engine", "Connection", "MockConnection"] + if TYPE_CHECKING: def is_sql_compiler(c: Compiled) -> TypeGuard[SQLCompiler]: ... diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 8b9883ff1c9..102fddd9447 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -1326,8 +1326,19 @@ def _set_parent_with_dispatch( self.dispatch.after_parent_attach(self, parent) +class SchemaVisitable(SchemaEventTarget, visitors.Visitable): + """Base class for elements that are targets of a :class:`.SchemaVisitor`. + + .. versionadded:: 2.0.41 + + """ + + class SchemaVisitor(ClauseVisitor): - """Define the visiting for ``SchemaItem`` objects.""" + """Define the visiting for ``SchemaItem`` and more + generally ``SchemaVisitable`` objects. + + """ __traverse_options__ = {"schema_visitor": True} diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index a81f5f9cbe1..69287d6f215 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -865,8 +865,9 @@ class DropConstraintComment(_CreateDropBase["Constraint"]): class InvokeDDLBase(SchemaVisitor): - def __init__(self, connection): + def __init__(self, connection, **kw): self.connection = connection + assert not kw, f"Unexpected keywords: {kw.keys()}" @contextlib.contextmanager def with_ddl_events(self, target, **kw): diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index d65b2de06ae..68f31de9386 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -68,6 +68,7 @@ from .base import DialectKWArgs from .base import Executable from .base import SchemaEventTarget as SchemaEventTarget +from .base import SchemaVisitable as SchemaVisitable from .coercions import _document_text_coercion from .elements import ClauseElement from .elements import ColumnClause @@ -91,6 +92,7 @@ if typing.TYPE_CHECKING: from ._typing import _AutoIncrementType + from ._typing import _CreateDropBind from ._typing import _DDLColumnArgument from ._typing import _InfoType from ._typing import _TextCoercedExpressionArgument @@ -108,7 +110,6 @@ from ..engine.interfaces import _CoreMultiExecuteParams from ..engine.interfaces import CoreExecuteOptionsParameter from ..engine.interfaces import ExecutionContext - from ..engine.mock import MockConnection from ..engine.reflection import _ReflectionInfo from ..sql.selectable import FromClause @@ -117,8 +118,6 @@ _TAB = TypeVar("_TAB", bound="Table") -_CreateDropBind = Union["Engine", "Connection", "MockConnection"] - _ConstraintNameArgument = Optional[Union[str, _NoneName]] _ServerDefaultArgument = Union[ @@ -212,7 +211,7 @@ def replace( @inspection._self_inspects -class SchemaItem(SchemaEventTarget, visitors.Visitable): +class SchemaItem(SchemaVisitable): """Base class for items that define a database schema.""" __visit_name__ = "schema_item" diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index d0d89e73168..5e68da263a2 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -67,6 +67,7 @@ if TYPE_CHECKING: from ._typing import _ColumnExpressionArgument + from ._typing import _CreateDropBind from ._typing import _TypeEngineArgument from .elements import ColumnElement from .operators import OperatorType @@ -1123,21 +1124,23 @@ def adapt( kw.setdefault("_adapted_from", self) return super().adapt(cls, **kw) - def create(self, bind, checkfirst=False): + def create(self, bind: _CreateDropBind, checkfirst: bool = False) -> None: """Issue CREATE DDL for this type, if applicable.""" t = self.dialect_impl(bind.dialect) if isinstance(t, SchemaType) and t.__class__ is not self.__class__: t.create(bind, checkfirst=checkfirst) - def drop(self, bind, checkfirst=False): + def drop(self, bind: _CreateDropBind, checkfirst: bool = False) -> None: """Issue DROP DDL for this type, if applicable.""" t = self.dialect_impl(bind.dialect) if isinstance(t, SchemaType) and t.__class__ is not self.__class__: t.drop(bind, checkfirst=checkfirst) - def _on_table_create(self, target, bind, **kw): + def _on_table_create( + self, target: Any, bind: _CreateDropBind, **kw: Any + ) -> None: if not self._is_impl_for_variant(bind.dialect, kw): return @@ -1145,7 +1148,9 @@ def _on_table_create(self, target, bind, **kw): if isinstance(t, SchemaType) and t.__class__ is not self.__class__: t._on_table_create(target, bind, **kw) - def _on_table_drop(self, target, bind, **kw): + def _on_table_drop( + self, target: Any, bind: _CreateDropBind, **kw: Any + ) -> None: if not self._is_impl_for_variant(bind.dialect, kw): return @@ -1153,7 +1158,9 @@ def _on_table_drop(self, target, bind, **kw): if isinstance(t, SchemaType) and t.__class__ is not self.__class__: t._on_table_drop(target, bind, **kw) - def _on_metadata_create(self, target, bind, **kw): + def _on_metadata_create( + self, target: Any, bind: _CreateDropBind, **kw: Any + ) -> None: if not self._is_impl_for_variant(bind.dialect, kw): return @@ -1161,7 +1168,9 @@ def _on_metadata_create(self, target, bind, **kw): if isinstance(t, SchemaType) and t.__class__ is not self.__class__: t._on_metadata_create(target, bind, **kw) - def _on_metadata_drop(self, target, bind, **kw): + def _on_metadata_drop( + self, target: Any, bind: _CreateDropBind, **kw: Any + ) -> None: if not self._is_impl_for_variant(bind.dialect, kw): return @@ -1169,7 +1178,9 @@ def _on_metadata_drop(self, target, bind, **kw): if isinstance(t, SchemaType) and t.__class__ is not self.__class__: t._on_metadata_drop(target, bind, **kw) - def _is_impl_for_variant(self, dialect, kw): + def _is_impl_for_variant( + self, dialect: Dialect, kw: Dict[str, Any] + ) -> Optional[bool]: variant_mapping = kw.pop("variant_mapping", None) if not variant_mapping: @@ -1186,7 +1197,7 @@ def _is_impl_for_variant(self, dialect, kw): # since PostgreSQL is the only DB that has ARRAY this can only # be integration tested by PG-specific tests - def _we_are_the_impl(typ): + def _we_are_the_impl(typ: SchemaType) -> bool: return ( typ is self or isinstance(typ, ARRAY) @@ -1199,6 +1210,8 @@ def _we_are_the_impl(typ): return True elif dialect.name not in variant_mapping: return _we_are_the_impl(variant_mapping["_default"]) + else: + return None _EnumTupleArg = Union[Sequence[enum.Enum], Sequence[str]] diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 5693ba70260..abea93418c4 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -297,6 +297,7 @@ def test_adapt_method(self, is_down_adaption, typ, target_adaptions): "schema", "metadata", "name", + "dispatch", ): continue # assert each value was copied, or that From 34fee7be2d66de7b6cc7355919cd4e3f83b91b88 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 8 May 2025 08:34:21 -0400 Subject: [PATCH 518/544] dont render URL in unparseable URL error message The error message that is emitted when a URL cannot be parsed no longer includes the URL itself within the error message. Fixes: #12579 Change-Id: Icd17bd4fe0930036662b6a4fe0264cb13df04ba7 (cherry picked from commit aaa28f457eaa3f98c417666b4d0ad4d70ccb1ac0) --- doc/build/changelog/unreleased_20/12579.rst | 7 +++++++ lib/sqlalchemy/engine/url.py | 2 +- test/engine/test_parseconnect.py | 7 +++++++ 3 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/12579.rst diff --git a/doc/build/changelog/unreleased_20/12579.rst b/doc/build/changelog/unreleased_20/12579.rst new file mode 100644 index 00000000000..70c619db09c --- /dev/null +++ b/doc/build/changelog/unreleased_20/12579.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, engine + :tickets: 12579 + + The error message that is emitted when a URL cannot be parsed no longer + includes the URL itself within the error message. + diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index bb004f11cd4..20079a6b535 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -920,5 +920,5 @@ def _parse_url(name: str) -> URL: else: raise exc.ArgumentError( - "Could not parse SQLAlchemy URL from string '%s'" % name + "Could not parse SQLAlchemy URL from given URL string" ) diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py index 16b129fd8a3..7c562bf39d1 100644 --- a/test/engine/test_parseconnect.py +++ b/test/engine/test_parseconnect.py @@ -791,6 +791,13 @@ def test_bad_args(self): module=mock_dbapi, ) + def test_cant_parse_str(self): + with expect_raises_message( + exc.ArgumentError, + r"^Could not parse SQLAlchemy URL from given URL string$", + ): + create_engine("notarealurl") + def test_urlattr(self): """test the url attribute on ``Engine``.""" From adef933f8d129382f92c2af81fdace9c93006ab0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 6 Mar 2025 09:12:43 -0500 Subject: [PATCH 519/544] implement pep-649 workarounds, test suite passing for python 3.14 Changes to the test suite to accommodate Python 3.14 as of version 3.14.0b1 Originally this included a major breaking change to how python 3.14 implemented :pep:`649`, however this was resolved by [1]. As of a7, greenlet is skipped due to issues in a7 and later b1 in [2]. 1. the change to rewrite all conditionals in annotation related tests is reverted. 2. test_memusage needed an explicit set_start_method() call so that it can continue to use plain fork 3. unfortunately at the moment greenlet has to be re-disabled for 3.14. 4. Changes to tox overall, remove pysqlcipher which hasn't worked in years, etc. 5. we need to support upcoming typing-extensions also, install the beta 6. 3.14.0a7 introduces major regressions to our runtime typing utilities, unfortunately, it's not clear if these can be resolved 7. for 3.14.0b1, we have to vendor get_annotations to work around [3] [1] https://github.com/python/cpython/issues/130881 [2] https://github.com/python-greenlet/greenlet/issues/440 [3] https://github.com/python/cpython/issues/133684 py314: yes Fixes: #12405 References: #12399 Change-Id: I8715d02fae599472dd64a2a46ccf8986239ecd99 --- doc/build/changelog/unreleased_20/12405.rst | 10 ++ lib/sqlalchemy/testing/requirements.py | 46 ++++++ lib/sqlalchemy/util/__init__.py | 1 + lib/sqlalchemy/util/compat.py | 2 + lib/sqlalchemy/util/langhelpers.py | 80 ++++++++++- lib/sqlalchemy/util/typing.py | 14 +- pyproject.toml | 7 + test/aaa_profiling/test_memusage.py | 14 +- test/base/test_concurrency_py3k.py | 2 + test/base/test_typing_utils.py | 150 +++++++++++++------- test/ext/asyncio/test_engine_py3k.py | 16 ++- test/typing/test_overloads.py | 10 +- tox.ini | 28 ++-- 13 files changed, 293 insertions(+), 87 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12405.rst diff --git a/doc/build/changelog/unreleased_20/12405.rst b/doc/build/changelog/unreleased_20/12405.rst new file mode 100644 index 00000000000..f90546ad5ae --- /dev/null +++ b/doc/build/changelog/unreleased_20/12405.rst @@ -0,0 +1,10 @@ +.. change:: + :tags: bug, orm + :tickets: 12405 + + Changes to the test suite to accommodate Python 3.14 and its new + implementation of :pep:`649`, which highly modifies how typing annotations + are interpreted at runtime. Use of the new + ``annotationlib.get_annotations()`` function is enabled when python 3.14 is + present, and many other changes to how pep-484 type objects are interpreted + at runtime are made. diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index a2c3aa531dc..fd64b1ffd43 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -19,6 +19,7 @@ from __future__ import annotations +import os import platform from . import asyncio as _test_asyncio @@ -1498,6 +1499,10 @@ def timing_intensive(self): return config.add_to_marker.timing_intensive + @property + def posix(self): + return exclusions.skip_if(lambda: os.name != "posix") + @property def memory_intensive(self): from . import config @@ -1539,6 +1544,27 @@ def check(config): return exclusions.skip_if(check) + @property + def up_to_date_typealias_type(self): + # this checks a particular quirk found in typing_extensions <=4.12.0 + # using older python versions like 3.10 or 3.9, we use TypeAliasType + # from typing_extensions which does not provide for sufficient + # introspection prior to 4.13.0 + def check(config): + import typing + import typing_extensions + + TypeAliasType = getattr( + typing, "TypeAliasType", typing_extensions.TypeAliasType + ) + TV = typing.TypeVar("TV") + TA_generic = TypeAliasType( # type: ignore + "TA_generic", typing.List[TV], type_params=(TV,) + ) + return hasattr(TA_generic[int], "__value__") + + return exclusions.only_if(check) + @property def python38(self): return exclusions.only_if( @@ -1569,6 +1595,26 @@ def python312(self): lambda: util.py312, "Python 3.12 or above required" ) + @property + def fail_python314b1(self): + return exclusions.fails_if( + lambda: util.compat.py314b1, "Fails as of python 3.14.0b1" + ) + + @property + def not_python314(self): + """This requirement is interim to assist with backporting of + issue #12405. + + SQLAlchemy 2.0 still includes the ``await_fallback()`` method that + makes use of ``asyncio.get_event_loop_policy()``. This is removed + in SQLAlchemy 2.1. + + """ + return exclusions.skip_if( + lambda: util.py314, "Python 3.14 or above not supported" + ) + @property def cpython(self): return exclusions.only_if( diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index 8353c9a7a35..1ccebc47fce 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -65,6 +65,7 @@ from .compat import py311 as py311 from .compat import py312 as py312 from .compat import py313 as py313 +from .compat import py314 as py314 from .compat import py38 as py38 from .compat import py39 as py39 from .compat import pypy as pypy diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index c8b5e7a2203..2ee47031184 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -32,6 +32,8 @@ from typing import TypeVar +py314b1 = sys.version_info >= (3, 14, 0, "beta", 1) +py314 = sys.version_info >= (3, 14) py313 = sys.version_info >= (3, 13) py312 = sys.version_info >= (3, 12) py311 = sys.version_info >= (3, 11) diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index ae640c5ec28..ebdd8ffa045 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -60,7 +60,85 @@ _HM = TypeVar("_HM", bound="hybridmethod[Any]") -if compat.py310: +if compat.py314: + # vendor a minimal form of get_annotations per + # https://github.com/python/cpython/issues/133684#issuecomment-2863841891 + + from annotationlib import call_annotate_function # type: ignore + from annotationlib import Format + + def _get_and_call_annotate(obj, format): # noqa: A002 + annotate = getattr(obj, "__annotate__", None) + if annotate is not None: + ann = call_annotate_function(annotate, format, owner=obj) + if not isinstance(ann, dict): + raise ValueError(f"{obj!r}.__annotate__ returned a non-dict") + return ann + return None + + # this is ported from py3.13.0a7 + _BASE_GET_ANNOTATIONS = type.__dict__["__annotations__"].__get__ # type: ignore # noqa: E501 + + def _get_dunder_annotations(obj): + if isinstance(obj, type): + try: + ann = _BASE_GET_ANNOTATIONS(obj) + except AttributeError: + # For static types, the descriptor raises AttributeError. + return {} + else: + ann = getattr(obj, "__annotations__", None) + if ann is None: + return {} + + if not isinstance(ann, dict): + raise ValueError( + f"{obj!r}.__annotations__ is neither a dict nor None" + ) + return dict(ann) + + def _vendored_get_annotations( + obj: Any, *, format: Format # noqa: A002 + ) -> Mapping[str, Any]: + """A sparse implementation of annotationlib.get_annotations()""" + + try: + ann = _get_dunder_annotations(obj) + except Exception: + pass + else: + if ann is not None: + return dict(ann) + + # But if __annotations__ threw a NameError, we try calling __annotate__ + ann = _get_and_call_annotate(obj, format) + if ann is None: + # If that didn't work either, we have a very weird object: + # evaluating + # __annotations__ threw NameError and there is no __annotate__. + # In that case, + # we fall back to trying __annotations__ again. + ann = _get_dunder_annotations(obj) + + if ann is None: + if isinstance(obj, type) or callable(obj): + return {} + raise TypeError(f"{obj!r} does not have annotations") + + if not ann: + return {} + + return dict(ann) + + def get_annotations(obj: Any) -> Mapping[str, Any]: + # FORWARDREF has the effect of giving us ForwardRefs and not + # actually trying to evaluate the annotations. We need this so + # that the annotations act as much like + # "from __future__ import annotations" as possible, which is going + # away in future python as a separate mode + return _vendored_get_annotations(obj, format=Format.FORWARDREF) + +elif compat.py310: def get_annotations(obj: Any) -> Mapping[str, Any]: return inspect.get_annotations(obj) diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index e44c623d3dc..794dd18591c 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -73,7 +73,9 @@ else: NoneType = type(None) # type: ignore -NoneFwd = ForwardRef("None") + +def is_fwd_none(typ: Any) -> bool: + return isinstance(typ, ForwardRef) and typ.__forward_arg__ == "None" _AnnotationScanType = Union[ @@ -397,7 +399,7 @@ def recursive_value(inner_type): if isinstance(t, list): stack.extend(t) else: - types.add(None if t in {NoneType, NoneFwd} else t) + types.add(None if t is NoneType or is_fwd_none(t) else t) return types else: return {res} @@ -469,8 +471,7 @@ def de_optionalize_union_types( typ.discard(None) # type: ignore - typ.discard(NoneType) - typ.discard(NoneFwd) + typ = {t for t in typ if t is not NoneType and not is_fwd_none(t)} return make_union_type(*typ) @@ -546,7 +547,8 @@ def _de_optionalize_fwd_ref_union_types( def make_union_type(*types: _AnnotationScanType) -> Type[Any]: """Make a Union type.""" - return Union.__getitem__(types) # type: ignore + + return Union[types] # type: ignore def includes_none(type_: Any) -> bool: @@ -572,7 +574,7 @@ def includes_none(type_: Any) -> bool: if is_newtype(type_): return includes_none(type_.__supertype__) try: - return type_ in (NoneFwd, NoneType, None) + return type_ in (NoneType, None) or is_fwd_none(type_) except TypeError: # if type_ is Column, mapped_column(), etc. the use of "in" # resolves to ``__eq__()`` which then gives us an expression object diff --git a/pyproject.toml b/pyproject.toml index 59107eb23cb..31863651faf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,13 @@ filterwarnings = [ # sqlite3 warnings due to test/dialect/test_sqlite.py->test_native_datetime, # which is asserting that these deprecated-in-py312 handlers are functional "ignore:The default (date)?(time)?(stamp)? (adapter|converter):DeprecationWarning", + + # warning regarding using "fork" mode for multiprocessing when the parent + # has threads; using pytest-xdist introduces threads in the parent + # and we use multiprocessing in test/aaa_profiling/test_memusage.py where + # we require "fork" mode + # https://github.com/python/cpython/pull/100229#issuecomment-2704616288 + "ignore:This process .* is multi-threaded:DeprecationWarning", ] markers = [ "memory_intensive: memory / CPU intensive suite tests", diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py index 69952ac1890..b6745e8b0b3 100644 --- a/test/aaa_profiling/test_memusage.py +++ b/test/aaa_profiling/test_memusage.py @@ -223,10 +223,14 @@ def run_plain(*func_args): # return run_plain def run_in_process(*func_args): - queue = multiprocessing.Queue() - proc = multiprocessing.Process( - target=profile, args=(queue, func_args) - ) + # see + # https://docs.python.org/3.14/whatsnew/3.14.html + # #incompatible-changes - the default run type is no longer + # "fork", but since we are running closures in the process + # we need forked mode + ctx = multiprocessing.get_context("fork") + queue = ctx.Queue() + proc = ctx.Process(target=profile, args=(queue, func_args)) proc.start() while True: row = queue.get() @@ -394,7 +398,7 @@ def go(): @testing.add_to_marker.memory_intensive class MemUsageWBackendTest(fixtures.MappedTest, EnsureZeroed): - __requires__ = "cpython", "memory_process_intensive", "no_asyncio" + __requires__ = "cpython", "posix", "memory_process_intensive", "no_asyncio" __sparse_backend__ = True # ensure a pure growing test trips the assertion diff --git a/test/base/test_concurrency_py3k.py b/test/base/test_concurrency_py3k.py index 63a9f850800..6cfa0383d6d 100644 --- a/test/base/test_concurrency_py3k.py +++ b/test/base/test_concurrency_py3k.py @@ -4,6 +4,7 @@ import threading from sqlalchemy import exc +from sqlalchemy import testing from sqlalchemy.testing import async_test from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_raises @@ -80,6 +81,7 @@ def go(): with expect_raises_message(ValueError, "sync error"): await greenlet_spawn(go) + @testing.requires.not_python314 def test_await_fallback_no_greenlet(self): to_await = run1() await_fallback(to_await) diff --git a/test/base/test_typing_utils.py b/test/base/test_typing_utils.py index f6afed47eed..51f5e13c418 100644 --- a/test/base/test_typing_utils.py +++ b/test/base/test_typing_utils.py @@ -10,8 +10,8 @@ from sqlalchemy.testing.assertions import eq_ from sqlalchemy.testing.assertions import is_ from sqlalchemy.util import py310 -from sqlalchemy.util import py311 from sqlalchemy.util import py312 +from sqlalchemy.util import py314 from sqlalchemy.util import py38 from sqlalchemy.util import typing as sa_typing @@ -42,7 +42,7 @@ def null_union_types(): def generic_unions(): # remove new-style unions `int | str` that are not generic res = union_types() + null_union_types() - if py310: + if py310 and not py314: new_ut = type(int | str) res = [t for t in res if not isinstance(t, new_ut)] return res @@ -200,6 +200,29 @@ def new_types(): ] +def compare_type_by_string(a, b): + """python 3.14 has made ForwardRefs not really comparable or reliably + hashable. + + As we need to compare types here, including structures like + `Union["str", "int"]`, without having to dive into cpython's source code + each time a new release comes out, compare based on stringification, + which still presents changing rules but at least are easy to diagnose + and correct for different python versions. + + See discussion at https://github.com/python/cpython/issues/129463 + for background + + """ + + if isinstance(a, (set, list)): + a = sorted(a, key=lambda x: str(x)) + if isinstance(b, (set, list)): + b = sorted(b, key=lambda x: str(x)) + + eq_(str(a), str(b)) + + def annotated_l(): return [A_str, A_null_str, A_union, A_null_union] @@ -234,14 +257,6 @@ def test_unions_are_the_same(self): is_(typing.Union, typing_extensions.Union) is_(typing.Optional, typing_extensions.Optional) - def test_make_union(self): - v = int, str - eq_(typing.Union[int, str], typing.Union.__getitem__(v)) - if py311: - # need eval since it's a syntax error in python < 3.11 - eq_(typing.Union[int, str], eval("typing.Union[*(int, str)]")) - eq_(typing.Union[int, str], eval("typing.Union[*v]")) - @requires.python312 def test_make_type_alias_type(self): # verify that TypeAliasType('foo', int) it the same as 'type foo = int' @@ -253,9 +268,11 @@ def test_make_type_alias_type(self): eq_(x_type.__value__, x.__value__) def test_make_fw_ref(self): - eq_(make_fw_ref("str"), typing.ForwardRef("str")) - eq_(make_fw_ref("str|int"), typing.ForwardRef("str|int")) - eq_( + compare_type_by_string(make_fw_ref("str"), typing.ForwardRef("str")) + compare_type_by_string( + make_fw_ref("str|int"), typing.ForwardRef("str|int") + ) + compare_type_by_string( make_fw_ref("Optional[Union[str, int]]"), typing.ForwardRef("Optional[Union[str, int]]"), ) @@ -317,8 +334,11 @@ class W(typing.Generic[TV]): ] for t in all_types(): - # use is since union compare equal between new/old style - exp = any(t is k for k in generics) + if py314: + exp = any(t == k for k in generics) + else: + # use is since union compare equal between new/old style + exp = any(t is k for k in generics) eq_(sa_typing.is_generic(t), exp, t) def test_is_pep695(self): @@ -360,70 +380,82 @@ def test_pep695_value(self): eq_(sa_typing.pep695_values(TAext_null_union), {int, str, None}) eq_(sa_typing.pep695_values(TA_null_union2), {int, str, None}) eq_(sa_typing.pep695_values(TAext_null_union2), {int, str, None}) - eq_( + + compare_type_by_string( sa_typing.pep695_values(TA_null_union3), - {int, typing.ForwardRef("typing.Union[None, bool]")}, + [int, typing.ForwardRef("typing.Union[None, bool]")], ) - eq_( + + compare_type_by_string( sa_typing.pep695_values(TAext_null_union3), {int, typing.ForwardRef("typing.Union[None, bool]")}, ) - eq_( + + compare_type_by_string( sa_typing.pep695_values(TA_null_union4), - {int, typing.ForwardRef("TA_null_union2")}, + [int, typing.ForwardRef("TA_null_union2")], ) - eq_( + compare_type_by_string( sa_typing.pep695_values(TAext_null_union4), {int, typing.ForwardRef("TAext_null_union2")}, ) + eq_(sa_typing.pep695_values(TA_union_ta), {int, str}) eq_(sa_typing.pep695_values(TAext_union_ta), {int, str}) eq_(sa_typing.pep695_values(TA_null_union_ta), {int, str, None, float}) - eq_( + + compare_type_by_string( sa_typing.pep695_values(TAext_null_union_ta), {int, str, None, float}, ) - eq_( + + compare_type_by_string( sa_typing.pep695_values(TA_list), - {int, str, typing.List[typing.ForwardRef("TA_list")]}, + [int, str, typing.List[typing.ForwardRef("TA_list")]], ) - eq_( + + compare_type_by_string( sa_typing.pep695_values(TAext_list), {int, str, typing.List[typing.ForwardRef("TAext_list")]}, ) - eq_( + + compare_type_by_string( sa_typing.pep695_values(TA_recursive), - {typing.ForwardRef("TA_recursive"), str}, + [str, typing.ForwardRef("TA_recursive")], ) - eq_( + compare_type_by_string( sa_typing.pep695_values(TAext_recursive), {typing.ForwardRef("TAext_recursive"), str}, ) - eq_( + compare_type_by_string( sa_typing.pep695_values(TA_null_recursive), - {typing.ForwardRef("TA_recursive"), str, None}, + [str, typing.ForwardRef("TA_recursive"), None], ) - eq_( + compare_type_by_string( sa_typing.pep695_values(TAext_null_recursive), {typing.ForwardRef("TAext_recursive"), str, None}, ) - eq_( + compare_type_by_string( sa_typing.pep695_values(TA_recursive_a), - {typing.ForwardRef("TA_recursive_b"), int}, + [int, typing.ForwardRef("TA_recursive_b")], ) - eq_( + compare_type_by_string( sa_typing.pep695_values(TAext_recursive_a), {typing.ForwardRef("TAext_recursive_b"), int}, ) - eq_( + compare_type_by_string( sa_typing.pep695_values(TA_recursive_b), - {typing.ForwardRef("TA_recursive_a"), str}, + [str, typing.ForwardRef("TA_recursive_a")], ) - eq_( + compare_type_by_string( sa_typing.pep695_values(TAext_recursive_b), {typing.ForwardRef("TAext_recursive_a"), str}, ) + + @requires.up_to_date_typealias_type + def test_pep695_value_generics(self): # generics + eq_(sa_typing.pep695_values(TA_generic), {typing.List[TV]}) eq_(sa_typing.pep695_values(TAext_generic), {typing.List[TV]}) eq_(sa_typing.pep695_values(TA_generic_typed), {typing.List[TV]}) @@ -459,17 +491,23 @@ def test_de_optionalize_union_types(self): fn(typing.Optional[typing.Union[int, str]]), typing.Union[int, str] ) eq_(fn(typing.Union[int, str, None]), typing.Union[int, str]) + eq_(fn(typing.Union[int, str, "None"]), typing.Union[int, str]) eq_(fn(make_fw_ref("None")), typing_extensions.Never) eq_(fn(make_fw_ref("typing.Union[None]")), typing_extensions.Never) eq_(fn(make_fw_ref("Union[None, str]")), typing.ForwardRef("str")) - eq_( + + compare_type_by_string( fn(make_fw_ref("Union[None, str, int]")), typing.Union["str", "int"], ) - eq_(fn(make_fw_ref("Optional[int]")), typing.ForwardRef("int")) - eq_( + + compare_type_by_string( + fn(make_fw_ref("Optional[int]")), typing.ForwardRef("int") + ) + + compare_type_by_string( fn(make_fw_ref("typing.Optional[Union[int | str]]")), typing.ForwardRef("Union[int | str]"), ) @@ -482,9 +520,12 @@ def test_de_optionalize_union_types(self): for t in union_types() + type_aliases() + new_types() + annotated_l(): eq_(fn(t), t) - eq_( + compare_type_by_string( fn(make_fw_ref("Union[typing.Dict[str, int], int, None]")), - typing.Union["typing.Dict[str, int]", "int"], + typing.Union[ + "typing.Dict[str, int]", + "int", + ], ) def test_make_union_type(self): @@ -508,18 +549,9 @@ def test_make_union_type(self): typing.Union[bool, TAext_int, NT_str], ) + @requires.up_to_date_typealias_type @requires.python38 - def test_includes_none(self): - eq_(sa_typing.includes_none(None), True) - eq_(sa_typing.includes_none(type(None)), True) - eq_(sa_typing.includes_none(typing.ForwardRef("None")), True) - eq_(sa_typing.includes_none(int), False) - for t in union_types(): - eq_(sa_typing.includes_none(t), False) - - for t in null_union_types(): - eq_(sa_typing.includes_none(t), True, str(t)) - + def test_includes_none_generics(self): # TODO: these are false negatives false_negatives = { TA_null_union4, # does not evaluate FW ref @@ -532,6 +564,18 @@ def test_includes_none(self): exp = "null" in t.__name__ eq_(sa_typing.includes_none(t), exp, str(t)) + @requires.python38 + def test_includes_none(self): + eq_(sa_typing.includes_none(None), True) + eq_(sa_typing.includes_none(type(None)), True) + eq_(sa_typing.includes_none(typing.ForwardRef("None")), True) + eq_(sa_typing.includes_none(int), False) + for t in union_types(): + eq_(sa_typing.includes_none(t), False) + + for t in null_union_types(): + eq_(sa_typing.includes_none(t), True, str(t)) + for t in annotated_l(): eq_( sa_typing.includes_none(t), diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index 231d32cbe18..05941a79a2a 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -269,9 +269,16 @@ async def test_engine_eq_ne(self, async_engine): is_false(async_engine == None) - @async_test - async def test_no_attach_to_event_loop(self, testing_engine): - """test #6409""" + def test_no_attach_to_event_loop(self, testing_engine): + """test #6409 + + note this test does not seem to trigger the bug that was originally + fixed in #6409, when using python 3.10 and higher (the original issue + can repro in 3.8 at least, based on my testing). It's been simplified + to no longer explicitly create a new loop, asyncio.run() already + creates a new loop. + + """ import asyncio import threading @@ -279,9 +286,6 @@ async def test_no_attach_to_event_loop(self, testing_engine): errs = [] def go(): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - async def main(): tasks = [task() for _ in range(2)] diff --git a/test/typing/test_overloads.py b/test/typing/test_overloads.py index 66209f50365..2a99915748e 100644 --- a/test/typing/test_overloads.py +++ b/test/typing/test_overloads.py @@ -9,6 +9,7 @@ from sqlalchemy.sql.base import Executable from sqlalchemy.testing import fixtures from sqlalchemy.testing.assertions import eq_ +from sqlalchemy.util.typing import is_fwd_ref engine_execution_options = { "compiled_cache": "Optional[CompiledCacheType]", @@ -78,7 +79,12 @@ def test_methods(self, class_, expected): @testing.combinations( (CoreExecuteOptionsParameter, core_execution_options), - (OrmExecuteOptionsParameter, orm_execution_options), + # https://github.com/python/cpython/issues/133701 + ( + OrmExecuteOptionsParameter, + orm_execution_options, + testing.requires.fail_python314b1, + ), ) def test_typed_dicts(self, typ, expected): # we currently expect these to be union types with first entry @@ -90,7 +96,7 @@ def test_typed_dicts(self, typ, expected): expected.pop("opt") assert_annotations = { - key: fwd_ref.__forward_arg__ + key: fwd_ref.__forward_arg__ if is_fwd_ref(fwd_ref) else fwd_ref for key, fwd_ref in typed_dict.__annotations__.items() } eq_(assert_annotations, expected) diff --git a/tox.ini b/tox.ini index 76469ebef7f..200c6b6bf0e 100644 --- a/tox.ini +++ b/tox.ini @@ -28,9 +28,11 @@ usedevelop= cov: True extras= - py{3,37,38,39,310,311,312,313}: {[greenletextras]extras} + # this can be limited to specific python versions IF there is no + # greenlet available for the most recent python. otherwise + # keep this present in all cases + py{38,39,310,311,312,313}: {[greenletextras]extras} - py{37,38,39,310}-sqlite_file: sqlcipher postgresql: postgresql postgresql: postgresql_pg8000 postgresql: postgresql_psycopg @@ -50,14 +52,13 @@ install_command= python -I -m pip install --only-binary=pymssql {opts} {packages} deps= + typing-extensions>=4.13.0rc1; python_version > '3.7' + pytest>=7.0.0,<8.4 # tracked by https://github.com/pytest-dev/pytest-xdist/issues/907 pytest-xdist!=3.3.0 - py313: git+https://github.com/python-greenlet/greenlet.git\#egg=greenlet - dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git\#egg=aiosqlite - dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git\#egg=sqlcipher3 dbapimain-postgresql: git+https://github.com/psycopg/psycopg2.git\#egg=psycopg2 dbapimain-postgresql: git+https://github.com/MagicStack/asyncpg.git\#egg=asyncpg @@ -118,20 +119,19 @@ setenv= oracle: ORACLE={env:TOX_ORACLE:--db oracle} oracle: EXTRA_ORACLE_DRIVERS={env:EXTRA_ORACLE_DRIVERS:--dbdriver cx_oracle --dbdriver oracledb --dbdriver oracledb_async} - py{313,314}-oracle: EXTRA_ORACLE_DRIVERS={env:EXTRA_ORACLE_DRIVERS:--dbdriver cx_oracle --dbdriver oracledb} sqlite: SQLITE={env:TOX_SQLITE:--db sqlite} sqlite_file: SQLITE={env:TOX_SQLITE_FILE:--db sqlite_file} - sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric --dbdriver aiosqlite} - py{313,314}-sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric} - + py{38,39,310,311,312,313}-sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric --dbdriver aiosqlite} + py{314}-sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric} sqlite-nogreenlet: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric} - py{37,38,39}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite --dbdriver pysqlcipher} + # note all of these would need limiting for py314 if we want tests to run until + # greenlet is available. I just dont see any clean way to do this in tox without writing + # all the versions out every time and it's ridiculous - # omit pysqlcipher for Python 3.10 - py{3,310,311,312}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite} + sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite} postgresql: POSTGRESQL={env:TOX_POSTGRESQL:--db postgresql} @@ -150,10 +150,10 @@ setenv= mssql: MSSQL={env:TOX_MSSQL:--db mssql} mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc --dbdriver pymssql} - py{313,314}-mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc} + py{314}-mssql: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver aioodbc} mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc --dbdriver pymssql} - py{313,314}-mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc} + py{314}-mssql-nogreenlet: EXTRA_MSSQL_DRIVERS={env:EXTRA_MSSQL_DRIVERS:--dbdriver pyodbc} oracle,mssql,sqlite_file: IDENTS=--write-idents db_idents.txt From 39b2cf52170b806ef324241aa5fe3e086111e6ce Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 12 May 2025 15:25:07 -0400 Subject: [PATCH 520/544] rewrite the docs on SQLite transaction handling SQLite has added the new "connection.autocommit" mode and associated fixes for pep-249 as of python 3.12. they plan to default to using this attribute as of python 3.16. Get on top of things by rewriting the whole doc section here, removing old cruft about sqlalchemy isolation levels that was not correct in any case, update recipes in a more succinct and unified way. References: #12585 Change-Id: I9d1de8dcc27f1731ecd3c723718942148dcd0a1a (cherry picked from commit 10ff201db40e069e8f90bb0883a916ba3d9cc96e) --- lib/sqlalchemy/dialects/sqlite/aiosqlite.py | 29 +- lib/sqlalchemy/dialects/sqlite/base.py | 304 ++++++++++++-------- lib/sqlalchemy/dialects/sqlite/pysqlite.py | 72 +---- 3 files changed, 192 insertions(+), 213 deletions(-) diff --git a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py index 828022454d4..b8cb8c3819b 100644 --- a/lib/sqlalchemy/dialects/sqlite/aiosqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/aiosqlite.py @@ -50,33 +50,10 @@ Serializable isolation / Savepoints / Transactional DDL (asyncio version) ------------------------------------------------------------------------- -Similarly to pysqlite, aiosqlite does not support SAVEPOINT feature. +A newly revised version of this important section is now available +at the top level of the SQLAlchemy SQLite documentation, in the section +:ref:`sqlite_transactions`. -The solution is similar to :ref:`pysqlite_serializable`. This is achieved by the event listeners in async:: - - from sqlalchemy import create_engine, event - from sqlalchemy.ext.asyncio import create_async_engine - - engine = create_async_engine("sqlite+aiosqlite:///myfile.db") - - - @event.listens_for(engine.sync_engine, "connect") - def do_connect(dbapi_connection, connection_record): - # disable aiosqlite's emitting of the BEGIN statement entirely. - # also stops it from emitting COMMIT before any DDL. - dbapi_connection.isolation_level = None - - - @event.listens_for(engine.sync_engine, "begin") - def do_begin(conn): - # emit our own BEGIN - conn.exec_driver_sql("BEGIN") - -.. warning:: When using the above recipe, it is advised to not use the - :paramref:`.Connection.execution_options.isolation_level` setting on - :class:`_engine.Connection` and :func:`_sa.create_engine` - with the SQLite driver, - as this function necessarily will also alter the ".isolation_level" setting. .. _aiosqlite_pooling: diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 719c0860b4f..cc43a826f5a 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -136,99 +136,199 @@ def bi_c(element, compiler, **kw): `Datatypes In SQLite Version 3 `_ -.. _sqlite_concurrency: - -Database Locking Behavior / Concurrency ---------------------------------------- - -SQLite is not designed for a high level of write concurrency. The database -itself, being a file, is locked completely during write operations within -transactions, meaning exactly one "connection" (in reality a file handle) -has exclusive access to the database during this period - all other -"connections" will be blocked during this time. - -The Python DBAPI specification also calls for a connection model that is -always in a transaction; there is no ``connection.begin()`` method, -only ``connection.commit()`` and ``connection.rollback()``, upon which a -new transaction is to be begun immediately. This may seem to imply -that the SQLite driver would in theory allow only a single filehandle on a -particular database file at any time; however, there are several -factors both within SQLite itself as well as within the pysqlite driver -which loosen this restriction significantly. - -However, no matter what locking modes are used, SQLite will still always -lock the database file once a transaction is started and DML (e.g. INSERT, -UPDATE, DELETE) has at least been emitted, and this will block -other transactions at least at the point that they also attempt to emit DML. -By default, the length of time on this block is very short before it times out -with an error. - -This behavior becomes more critical when used in conjunction with the -SQLAlchemy ORM. SQLAlchemy's :class:`.Session` object by default runs -within a transaction, and with its autoflush model, may emit DML preceding -any SELECT statement. This may lead to a SQLite database that locks -more quickly than is expected. The locking mode of SQLite and the pysqlite -driver can be manipulated to some degree, however it should be noted that -achieving a high degree of write-concurrency with SQLite is a losing battle. - -For more information on SQLite's lack of write concurrency by design, please -see -`Situations Where Another RDBMS May Work Better - High Concurrency -`_ near the bottom of the page. - -The following subsections introduce areas that are impacted by SQLite's -file-based architecture and additionally will usually require workarounds to -work when using the pysqlite driver. +.. _sqlite_transactions: + +Transactions with SQLite and the sqlite3 driver +----------------------------------------------- + +As a file-based database, SQLite's approach to transactions differs from +traditional databases in many ways. Additionally, the ``sqlite3`` driver +standard with Python (as well as the async version ``aiosqlite`` which builds +on top of it) has several quirks, workarounds, and API features in the +area of transaction control, all of which generally need to be addressed when +constructing a SQLAlchemy application that uses SQLite. + +Legacy Transaction Mode with the sqlite3 driver +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The most important aspect of transaction handling with the sqlite3 driver is +that it defaults (which will continue through Python 3.15 before being +removed in Python 3.16) to legacy transactional behavior which does +not strictly follow :pep:`249`. The way in which the driver diverges from the +PEP is that it does not "begin" a transaction automatically as dictated by +:pep:`249` except in the case of DML statements, e.g. INSERT, UPDATE, and +DELETE. Normally, :pep:`249` dictates that a BEGIN must be emitted upon +the first SQL statement of any kind, so that all subsequent operations will +be established within a transaction until ``connection.commit()`` has been +called. The ``sqlite3`` driver, in an effort to be easier to use in +highly concurrent environments, skips this step for DQL (e.g. SELECT) statements, +and also skips it for DDL (e.g. CREATE TABLE etc.) statements for more legacy +reasons. Statements such as SAVEPOINT are also skipped. + +In modern versions of the ``sqlite3`` driver as of Python 3.12, this legacy +mode of operation is referred to as +`"legacy transaction control" `_, and is in +effect by default due to the ``Connection.autocommit`` parameter being set to +the constant ``sqlite3.LEGACY_TRANSACTION_CONTROL``. Prior to Python 3.12, +the ``Connection.autocommit`` attribute did not exist. + +The implications of legacy transaction mode include: + +* **Incorrect support for transactional DDL** - statements like CREATE TABLE, ALTER TABLE, + CREATE INDEX etc. will not automatically BEGIN a transaction if one were not + started already, leading to the changes by each statement being + "autocommitted" immediately unless BEGIN were otherwise emitted first. Very + old (pre Python 3.6) versions of SQLite would also force a COMMIT for these + operations even if a transaction were present, however this is no longer the + case. +* **SERIALIZABLE behavior not fully functional** - SQLite's transaction isolation + behavior is normally consistent with SERIALIZABLE isolation, as it is a file- + based system that locks the database file entirely for write operations, + preventing COMMIT until all reader transactions (and associated file locks) + have completed. However, sqlite3's legacy transaction mode fails to emit BEGIN for SELECT + statements, which causes these SELECT statements to no longer be "repeatable", + failing one of the consistency guarantees of SERIALIZABLE. +* **Incorrect behavior for SAVEPOINT** - as the SAVEPOINT statement does not + imply a BEGIN, a new SAVEPOINT emitted before a BEGIN will function on its + own but fails to participate in the enclosing transaction, meaning a ROLLBACK + of the transaction will not rollback elements that were part of a released + savepoint. + +Legacy transaction mode first existed in order to faciliate working around +SQLite's file locks. Because SQLite relies upon whole-file locks, it is easy to +get "database is locked" errors, particularly when newer features like "write +ahead logging" are disabled. This is a key reason why ``sqlite3``'s legacy +transaction mode is still the default mode of operation; disabling it will +produce behavior that is more susceptible to locked database errors. However +note that **legacy transaction mode will no longer be the default** in a future +Python version (3.16 as of this writing). + +.. _sqlite_enabling_transactions: + +Enabling Non-Legacy SQLite Transactional Modes with the sqlite3 or aiosqlite driver +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Current SQLAlchemy support allows either for setting the +``.Connection.autocommit`` attribute, most directly by using a +:func:`._sa.create_engine` parameter, or if on an older version of Python where +the attribute is not available, using event hooks to control the behavior of +BEGIN. + +* **Enabling modern sqlite3 transaction control via the autocommit connect parameter** (Python 3.12 and above) + + To use SQLite in the mode described at `Transaction control via the autocommit attribute `_, + the most straightforward approach is to set the attribute to its recommended value + of ``False`` at the connect level using :paramref:`_sa.create_engine.connect_args``:: + + from sqlalchemy import create_engine + + engine = create_engine( + "sqlite:///myfile.db", connect_args={"autocommit": False} + ) + + This parameter is also passed through when using the aiosqlite driver:: + + from sqlalchemy.ext.asyncio import create_async_engine + + engine = create_async_engine( + "sqlite+aiosqlite:///myfile.db", connect_args={"autocommit": False} + ) + + The parameter can also be set at the attribute level using the :meth:`.PoolEvents.connect` + event hook, however this will only work for sqlite3, as aiosqlite does not yet expose this + attribute on its ``Connection`` object:: + + from sqlalchemy import create_engine, event + + engine = create_engine("sqlite:///myfile.db") + + + @event.listens_for(engine, "connect") + def do_connect(dbapi_connection, connection_record): + # enable autocommit=False mode + dbapi_connection.autocommit = False + +* **Using SQLAlchemy to emit BEGIN in lieu of SQLite's transaction control** (all Python versions, sqlite3 and aiosqlite) + + For older versions of ``sqlite3`` or for cross-compatiblity with older and + newer versions, SQLAlchemy can also take over the job of transaction control. + This is achieved by using the :meth:`.ConnectionEvents.begin` hook + to emit the "BEGIN" command directly, while also disabling SQLite's control + of this command using the :meth:`.PoolEvents.connect` event hook to set the + ``Connection.isolation_level`` attribute to ``None``:: + + + from sqlalchemy import create_engine, event + + engine = create_engine("sqlite:///myfile.db") + + + @event.listens_for(engine, "connect") + def do_connect(dbapi_connection, connection_record): + # disable sqlite3's emitting of the BEGIN statement entirely. + dbapi_connection.isolation_level = None + + + @event.listens_for(engine, "begin") + def do_begin(conn): + # emit our own BEGIN. sqlite3 still emits COMMIT/ROLLBACK correctly + conn.exec_driver_sql("BEGIN") + + When using the asyncio variant ``aiosqlite``, refer to ``engine.sync_engine`` + as in the example below:: + + from sqlalchemy import create_engine, event + from sqlalchemy.ext.asyncio import create_async_engine + + engine = create_async_engine("sqlite+aiosqlite:///myfile.db") + + + @event.listens_for(engine.sync_engine, "connect") + def do_connect(dbapi_connection, connection_record): + # disable aiosqlite's emitting of the BEGIN statement entirely. + dbapi_connection.isolation_level = None + + + @event.listens_for(engine.sync_engine, "begin") + def do_begin(conn): + # emit our own BEGIN. aiosqlite still emits COMMIT/ROLLBACK correctly + conn.exec_driver_sql("BEGIN") .. _sqlite_isolation_level: -Transaction Isolation Level / Autocommit ----------------------------------------- - -SQLite supports "transaction isolation" in a non-standard way, along two -axes. One is that of the -`PRAGMA read_uncommitted `_ -instruction. This setting can essentially switch SQLite between its -default mode of ``SERIALIZABLE`` isolation, and a "dirty read" isolation -mode normally referred to as ``READ UNCOMMITTED``. - -SQLAlchemy ties into this PRAGMA statement using the -:paramref:`_sa.create_engine.isolation_level` parameter of -:func:`_sa.create_engine`. -Valid values for this parameter when used with SQLite are ``"SERIALIZABLE"`` -and ``"READ UNCOMMITTED"`` corresponding to a value of 0 and 1, respectively. -SQLite defaults to ``SERIALIZABLE``, however its behavior is impacted by -the pysqlite driver's default behavior. - -When using the pysqlite driver, the ``"AUTOCOMMIT"`` isolation level is also -available, which will alter the pysqlite connection using the ``.isolation_level`` -attribute on the DBAPI connection and set it to None for the duration -of the setting. - -.. versionadded:: 1.3.16 added support for SQLite AUTOCOMMIT isolation level - when using the pysqlite / sqlite3 SQLite driver. - - -The other axis along which SQLite's transactional locking is impacted is -via the nature of the ``BEGIN`` statement used. The three varieties -are "deferred", "immediate", and "exclusive", as described at -`BEGIN TRANSACTION `_. A straight -``BEGIN`` statement uses the "deferred" mode, where the database file is -not locked until the first read or write operation, and read access remains -open to other transactions until the first write operation. But again, -it is critical to note that the pysqlite driver interferes with this behavior -by *not even emitting BEGIN* until the first write operation. +Using SQLAlchemy's Driver Level AUTOCOMMIT Feature with SQLite +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. warning:: +SQLAlchemy has a comprehensive database isolation feature with optional +autocommit support that is introduced in the section :ref:`dbapi_autocommit`. - SQLite's transactional scope is impacted by unresolved - issues in the pysqlite driver, which defers BEGIN statements to a greater - degree than is often feasible. See the section :ref:`pysqlite_serializable` - or :ref:`aiosqlite_serializable` for techniques to work around this behavior. +For the ``sqlite3`` and ``aiosqlite`` drivers, SQLAlchemy only includes +built-in support for "AUTOCOMMIT". Note that this mode is currently incompatible +with the non-legacy isolation mode hooks documented in the previous +section at :ref:`sqlite_enabling_transactions`. -.. seealso:: +To use the ``sqlite3`` driver with SQLAlchemy driver-level autocommit, +create an engine setting the :paramref:`_sa.create_engine.isolation_level` +parameter to "AUTOCOMMIT":: + + eng = create_engine("sqlite:///myfile.db", isolation_level="AUTOCOMMIT") + +When using the above mode, any event hooks that set the sqlite3 ``Connection.autocommit`` +parameter away from its default of ``sqlite3.LEGACY_TRANSACTION_CONTROL`` +as well as hooks that emit ``BEGIN`` should be disabled. + +Additional Reading for SQLite / sqlite3 transaction control +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Links with important information on SQLite, the sqlite3 driver, +as well as long historical conversations on how things got to their current state: + +* `Isolation in SQLite `_ - on the SQLite website +* `Transaction control `_ - describes the sqlite3 autocommit attribute as well + as the legacy isolation_level attribute. +* `sqlite3 SELECT does not BEGIN a transaction, but should according to spec `_ - imported Python standard library issue on github +* `sqlite3 module breaks transactions and potentially corrupts data `_ - imported Python standard library issue on github - :ref:`dbapi_autocommit` INSERT/UPDATE/DELETE...RETURNING --------------------------------- @@ -268,38 +368,6 @@ def bi_c(element, compiler, **kw): .. versionadded:: 2.0 Added support for SQLite RETURNING -SAVEPOINT Support ----------------------------- - -SQLite supports SAVEPOINTs, which only function once a transaction is -begun. SQLAlchemy's SAVEPOINT support is available using the -:meth:`_engine.Connection.begin_nested` method at the Core level, and -:meth:`.Session.begin_nested` at the ORM level. However, SAVEPOINTs -won't work at all with pysqlite unless workarounds are taken. - -.. warning:: - - SQLite's SAVEPOINT feature is impacted by unresolved - issues in the pysqlite and aiosqlite drivers, which defer BEGIN statements - to a greater degree than is often feasible. See the sections - :ref:`pysqlite_serializable` and :ref:`aiosqlite_serializable` - for techniques to work around this behavior. - -Transactional DDL ----------------------------- - -The SQLite database supports transactional :term:`DDL` as well. -In this case, the pysqlite driver is not only failing to start transactions, -it also is ending any existing transaction when DDL is detected, so again, -workarounds are required. - -.. warning:: - - SQLite's transactional DDL is impacted by unresolved issues - in the pysqlite driver, which fails to emit BEGIN and additionally - forces a COMMIT to cancel any transaction when DDL is encountered. - See the section :ref:`pysqlite_serializable` - for techniques to work around this behavior. .. _sqlite_foreign_keys: diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index 9dafda6d9df..4a777e3b81d 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -354,76 +354,10 @@ def process_result_value(self, value, dialect): Serializable isolation / Savepoints / Transactional DDL ------------------------------------------------------- -In the section :ref:`sqlite_concurrency`, we refer to the pysqlite -driver's assortment of issues that prevent several features of SQLite -from working correctly. The pysqlite DBAPI driver has several -long-standing bugs which impact the correctness of its transactional -behavior. In its default mode of operation, SQLite features such as -SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are -non-functional, and in order to use these features, workarounds must -be taken. +A newly revised version of this important section is now available +at the top level of the SQLAlchemy SQLite documentation, in the section +:ref:`sqlite_transactions`. -The issue is essentially that the driver attempts to second-guess the user's -intent, failing to start transactions and sometimes ending them prematurely, in -an effort to minimize the SQLite databases's file locking behavior, even -though SQLite itself uses "shared" locks for read-only activities. - -SQLAlchemy chooses to not alter this behavior by default, as it is the -long-expected behavior of the pysqlite driver; if and when the pysqlite -driver attempts to repair these issues, that will be more of a driver towards -defaults for SQLAlchemy. - -The good news is that with a few events, we can implement transactional -support fully, by disabling pysqlite's feature entirely and emitting BEGIN -ourselves. This is achieved using two event listeners:: - - from sqlalchemy import create_engine, event - - engine = create_engine("sqlite:///myfile.db") - - - @event.listens_for(engine, "connect") - def do_connect(dbapi_connection, connection_record): - # disable pysqlite's emitting of the BEGIN statement entirely. - # also stops it from emitting COMMIT before any DDL. - dbapi_connection.isolation_level = None - - - @event.listens_for(engine, "begin") - def do_begin(conn): - # emit our own BEGIN - conn.exec_driver_sql("BEGIN") - -.. warning:: When using the above recipe, it is advised to not use the - :paramref:`.Connection.execution_options.isolation_level` setting on - :class:`_engine.Connection` and :func:`_sa.create_engine` - with the SQLite driver, - as this function necessarily will also alter the ".isolation_level" setting. - - -Above, we intercept a new pysqlite connection and disable any transactional -integration. Then, at the point at which SQLAlchemy knows that transaction -scope is to begin, we emit ``"BEGIN"`` ourselves. - -When we take control of ``"BEGIN"``, we can also control directly SQLite's -locking modes, introduced at -`BEGIN TRANSACTION `_, -by adding the desired locking mode to our ``"BEGIN"``:: - - @event.listens_for(engine, "begin") - def do_begin(conn): - conn.exec_driver_sql("BEGIN EXCLUSIVE") - -.. seealso:: - - `BEGIN TRANSACTION `_ - - on the SQLite site - - `sqlite3 SELECT does not BEGIN a transaction `_ - - on the Python bug tracker - - `sqlite3 module breaks transactions and potentially corrupts data `_ - - on the Python bug tracker .. _pysqlite_udfs: From 64e765215d01f9f5eea34f569f143ebad3d0865d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 13 May 2025 11:28:25 -0400 Subject: [PATCH 521/544] remove __getattr__ from root Removed ``__getattr__()`` rule from ``sqlalchemy/__init__.py`` that appeared to be trying to correct for a previous typographical error in the imports. This rule interferes with type checking and is removed. Fixes: #12588 Change-Id: I682b1f3c13b842d6f43ed02d28d9774b55477516 (cherry picked from commit c3f1ea62286a0b038482437923c4d1c53d668dcb) --- doc/build/changelog/unreleased_20/12588.rst | 8 ++++++++ lib/sqlalchemy/__init__.py | 11 ----------- 2 files changed, 8 insertions(+), 11 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12588.rst diff --git a/doc/build/changelog/unreleased_20/12588.rst b/doc/build/changelog/unreleased_20/12588.rst new file mode 100644 index 00000000000..2d30a768f75 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12588.rst @@ -0,0 +1,8 @@ +.. change:: + :tags: bug, typing + :tickets: 12588 + + Removed ``__getattr__()`` rule from ``sqlalchemy/__init__.py`` that + appeared to be trying to correct for a previous typographical error in the + imports. This rule interferes with type checking and is removed. + diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 71b701c920c..748879e160f 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -281,14 +281,3 @@ def __go(lcls: Any) -> None: __go(locals()) - - -def __getattr__(name: str) -> Any: - if name == "SingleonThreadPool": - _util.warn_deprecated( - "SingleonThreadPool was a typo in the v2 series. " - "Please use the correct SingletonThreadPool name.", - "2.0.24", - ) - return SingletonThreadPool - raise AttributeError(f"module {__name__!r} has no attribute {name!r}") From 0f77bf10acf108578335089bfb387fe8cb69ed99 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 14 May 2025 08:20:03 -0400 Subject: [PATCH 522/544] changelog edits Change-Id: Ib2bb33698f58a62c945d147c39d3ac6af908b802 (cherry picked from commit c7d5c2ab5a7c5c97f80a904fcd3d5dcc9ebe954d) --- doc/build/changelog/unreleased_20/12405.rst | 16 +++++++++------- doc/build/changelog/unreleased_20/12488.rst | 6 +++--- doc/build/changelog/unreleased_20/12566.rst | 6 +++--- 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/doc/build/changelog/unreleased_20/12405.rst b/doc/build/changelog/unreleased_20/12405.rst index f90546ad5ae..f05d714bbad 100644 --- a/doc/build/changelog/unreleased_20/12405.rst +++ b/doc/build/changelog/unreleased_20/12405.rst @@ -1,10 +1,12 @@ .. change:: - :tags: bug, orm + :tags: bug, platform :tickets: 12405 - Changes to the test suite to accommodate Python 3.14 and its new - implementation of :pep:`649`, which highly modifies how typing annotations - are interpreted at runtime. Use of the new - ``annotationlib.get_annotations()`` function is enabled when python 3.14 is - present, and many other changes to how pep-484 type objects are interpreted - at runtime are made. + Adjusted the test suite as well as the ORM's method of scanning classes for + annotations to work under current beta releases of Python 3.14 (currently + 3.14.0b1) as part of an ongoing effort to support the production release of + this Python release. Further changes to Python's means of working with + annotations is expected in subsequent beta releases for which SQLAlchemy's + test suite will need further adjustments. + + diff --git a/doc/build/changelog/unreleased_20/12488.rst b/doc/build/changelog/unreleased_20/12488.rst index d81d025bdd8..55c6e7b6556 100644 --- a/doc/build/changelog/unreleased_20/12488.rst +++ b/doc/build/changelog/unreleased_20/12488.rst @@ -2,7 +2,7 @@ :tags: bug, mysql :tickets: 12488 - Fixed regression caused by the DEFAULT rendering changes in 2.0.40 - :ticket:`12425` where using lowercase `on update` in a MySQL server default - would incorrectly apply parenthesis, leading to errors when MySQL + Fixed regression caused by the DEFAULT rendering changes in version 2.0.40 + via :ticket:`12425` where using lowercase ``on update`` in a MySQL server + default would incorrectly apply parenthesis, leading to errors when MySQL interpreted the rendered DDL. Pull request courtesy Alexander Ruehe. diff --git a/doc/build/changelog/unreleased_20/12566.rst b/doc/build/changelog/unreleased_20/12566.rst index 194936f9675..42d5eed1752 100644 --- a/doc/build/changelog/unreleased_20/12566.rst +++ b/doc/build/changelog/unreleased_20/12566.rst @@ -2,6 +2,6 @@ :tags: bug, sqlite :tickets: 12566 - Fixed and added test support for a few SQLite SQL functions hardcoded into - the compiler most notably the "localtimestamp" function which rendered with - incorrect internal quoting. + Fixed and added test support for some SQLite SQL functions hardcoded into + the compiler, most notably the ``localtimestamp`` function which rendered + with incorrect internal quoting. From ec0604322a42165014e0040128f73d79534b06d3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 14 May 2025 08:24:44 -0400 Subject: [PATCH 523/544] use pep639 license Removed the "license classifier" from setup.cfg for SQLAlchemy 2.0, which eliminates loud deprecation warnings when building the package. SQLAlchemy 2.1 will use a full :pep:`639` configuration in pyproject.toml while SQLAlchemy 2.0 remains using ``setup.cfg`` for setup. Change-Id: If732dca7f9b57a4c6a789a68ecc77f0293be4786 (cherry picked from commit c93f50421ba5e96079cc89db80282aaaf6e09a6e) --- doc/build/changelog/unreleased_20/use_pep639.rst | 9 +++++++++ setup.cfg | 1 - 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/use_pep639.rst diff --git a/doc/build/changelog/unreleased_20/use_pep639.rst b/doc/build/changelog/unreleased_20/use_pep639.rst new file mode 100644 index 00000000000..ff73d877288 --- /dev/null +++ b/doc/build/changelog/unreleased_20/use_pep639.rst @@ -0,0 +1,9 @@ +.. change:: + :tags: bug, installation + + Removed the "license classifier" from setup.cfg for SQLAlchemy 2.0, which + eliminates loud deprecation warnings when building the package. SQLAlchemy + 2.1 will use a full :pep:`639` configuration in pyproject.toml while + SQLAlchemy 2.0 remains using ``setup.cfg`` for setup. + + diff --git a/setup.cfg b/setup.cfg index 9b42a19a037..de35dd2e158 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,7 +12,6 @@ license_files = LICENSE classifiers = Development Status :: 5 - Production/Stable Intended Audience :: Developers - License :: OSI Approved :: MIT License Operating System :: OS Independent Programming Language :: Python Programming Language :: Python :: 3 From 152aa55fbba7c7ef11df4f009af8eddc3ed92eea Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 14 May 2025 13:04:02 -0400 Subject: [PATCH 524/544] - 2.0.41 --- doc/build/changelog/changelog_20.rst | 88 ++++++++++++++++++- doc/build/changelog/unreleased_20/10665.rst | 11 --- doc/build/changelog/unreleased_20/12317.rst | 16 ---- doc/build/changelog/unreleased_20/12405.rst | 12 --- doc/build/changelog/unreleased_20/12488.rst | 8 -- doc/build/changelog/unreleased_20/12566.rst | 7 -- doc/build/changelog/unreleased_20/12579.rst | 7 -- doc/build/changelog/unreleased_20/12588.rst | 8 -- .../changelog/unreleased_20/use_pep639.rst | 9 -- doc/build/conf.py | 4 +- 10 files changed, 89 insertions(+), 81 deletions(-) delete mode 100644 doc/build/changelog/unreleased_20/10665.rst delete mode 100644 doc/build/changelog/unreleased_20/12317.rst delete mode 100644 doc/build/changelog/unreleased_20/12405.rst delete mode 100644 doc/build/changelog/unreleased_20/12488.rst delete mode 100644 doc/build/changelog/unreleased_20/12566.rst delete mode 100644 doc/build/changelog/unreleased_20/12579.rst delete mode 100644 doc/build/changelog/unreleased_20/12588.rst delete mode 100644 doc/build/changelog/unreleased_20/use_pep639.rst diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index b87bce8e239..4d9dca6d65f 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -10,7 +10,93 @@ .. changelog:: :version: 2.0.41 - :include_notes_from: unreleased_20 + :released: May 14, 2025 + + .. change:: + :tags: usecase, postgresql + :tickets: 10665 + + Added support for ``postgresql_include`` keyword argument to + :class:`_schema.UniqueConstraint` and :class:`_schema.PrimaryKeyConstraint`. + Pull request courtesy Denis Laxalde. + + .. seealso:: + + :ref:`postgresql_constraint_options` + + .. change:: + :tags: usecase, oracle + :tickets: 12317, 12341 + + Added new datatype :class:`_oracle.VECTOR` and accompanying DDL and DQL + support to fully support this type for Oracle Database. This change + includes the base :class:`_oracle.VECTOR` type that adds new type-specific + methods ``l2_distance``, ``cosine_distance``, ``inner_product`` as well as + new parameters ``oracle_vector`` for the :class:`.Index` construct, + allowing vector indexes to be configured, and ``oracle_fetch_approximate`` + for the :meth:`.Select.fetch` clause. Pull request courtesy Suraj Shaw. + + .. seealso:: + + :ref:`oracle_vector_datatype` + + + .. change:: + :tags: bug, platform + :tickets: 12405 + + Adjusted the test suite as well as the ORM's method of scanning classes for + annotations to work under current beta releases of Python 3.14 (currently + 3.14.0b1) as part of an ongoing effort to support the production release of + this Python release. Further changes to Python's means of working with + annotations is expected in subsequent beta releases for which SQLAlchemy's + test suite will need further adjustments. + + + + .. change:: + :tags: bug, mysql + :tickets: 12488 + + Fixed regression caused by the DEFAULT rendering changes in version 2.0.40 + via :ticket:`12425` where using lowercase ``on update`` in a MySQL server + default would incorrectly apply parenthesis, leading to errors when MySQL + interpreted the rendered DDL. Pull request courtesy Alexander Ruehe. + + .. change:: + :tags: bug, sqlite + :tickets: 12566 + + Fixed and added test support for some SQLite SQL functions hardcoded into + the compiler, most notably the ``localtimestamp`` function which rendered + with incorrect internal quoting. + + .. change:: + :tags: bug, engine + :tickets: 12579 + + The error message that is emitted when a URL cannot be parsed no longer + includes the URL itself within the error message. + + + .. change:: + :tags: bug, typing + :tickets: 12588 + + Removed ``__getattr__()`` rule from ``sqlalchemy/__init__.py`` that + appeared to be trying to correct for a previous typographical error in the + imports. This rule interferes with type checking and is removed. + + + .. change:: + :tags: bug, installation + + Removed the "license classifier" from setup.cfg for SQLAlchemy 2.0, which + eliminates loud deprecation warnings when building the package. SQLAlchemy + 2.1 will use a full :pep:`639` configuration in pyproject.toml while + SQLAlchemy 2.0 remains using ``setup.cfg`` for setup. + + .. changelog:: :version: 2.0.40 diff --git a/doc/build/changelog/unreleased_20/10665.rst b/doc/build/changelog/unreleased_20/10665.rst deleted file mode 100644 index 967dda14b1d..00000000000 --- a/doc/build/changelog/unreleased_20/10665.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. change:: - :tags: usecase, postgresql - :tickets: 10665 - - Added support for ``postgresql_include`` keyword argument to - :class:`_schema.UniqueConstraint` and :class:`_schema.PrimaryKeyConstraint`. - Pull request courtesy Denis Laxalde. - - .. seealso:: - - :ref:`postgresql_constraint_options` diff --git a/doc/build/changelog/unreleased_20/12317.rst b/doc/build/changelog/unreleased_20/12317.rst deleted file mode 100644 index 13f69693e60..00000000000 --- a/doc/build/changelog/unreleased_20/12317.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. change:: - :tags: usecase, oracle - :tickets: 12317, 12341 - - Added new datatype :class:`_oracle.VECTOR` and accompanying DDL and DQL - support to fully support this type for Oracle Database. This change - includes the base :class:`_oracle.VECTOR` type that adds new type-specific - methods ``l2_distance``, ``cosine_distance``, ``inner_product`` as well as - new parameters ``oracle_vector`` for the :class:`.Index` construct, - allowing vector indexes to be configured, and ``oracle_fetch_approximate`` - for the :meth:`.Select.fetch` clause. Pull request courtesy Suraj Shaw. - - .. seealso:: - - :ref:`oracle_vector_datatype` - diff --git a/doc/build/changelog/unreleased_20/12405.rst b/doc/build/changelog/unreleased_20/12405.rst deleted file mode 100644 index f05d714bbad..00000000000 --- a/doc/build/changelog/unreleased_20/12405.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. change:: - :tags: bug, platform - :tickets: 12405 - - Adjusted the test suite as well as the ORM's method of scanning classes for - annotations to work under current beta releases of Python 3.14 (currently - 3.14.0b1) as part of an ongoing effort to support the production release of - this Python release. Further changes to Python's means of working with - annotations is expected in subsequent beta releases for which SQLAlchemy's - test suite will need further adjustments. - - diff --git a/doc/build/changelog/unreleased_20/12488.rst b/doc/build/changelog/unreleased_20/12488.rst deleted file mode 100644 index 55c6e7b6556..00000000000 --- a/doc/build/changelog/unreleased_20/12488.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 12488 - - Fixed regression caused by the DEFAULT rendering changes in version 2.0.40 - via :ticket:`12425` where using lowercase ``on update`` in a MySQL server - default would incorrectly apply parenthesis, leading to errors when MySQL - interpreted the rendered DDL. Pull request courtesy Alexander Ruehe. diff --git a/doc/build/changelog/unreleased_20/12566.rst b/doc/build/changelog/unreleased_20/12566.rst deleted file mode 100644 index 42d5eed1752..00000000000 --- a/doc/build/changelog/unreleased_20/12566.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, sqlite - :tickets: 12566 - - Fixed and added test support for some SQLite SQL functions hardcoded into - the compiler, most notably the ``localtimestamp`` function which rendered - with incorrect internal quoting. diff --git a/doc/build/changelog/unreleased_20/12579.rst b/doc/build/changelog/unreleased_20/12579.rst deleted file mode 100644 index 70c619db09c..00000000000 --- a/doc/build/changelog/unreleased_20/12579.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. change:: - :tags: bug, engine - :tickets: 12579 - - The error message that is emitted when a URL cannot be parsed no longer - includes the URL itself within the error message. - diff --git a/doc/build/changelog/unreleased_20/12588.rst b/doc/build/changelog/unreleased_20/12588.rst deleted file mode 100644 index 2d30a768f75..00000000000 --- a/doc/build/changelog/unreleased_20/12588.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. change:: - :tags: bug, typing - :tickets: 12588 - - Removed ``__getattr__()`` rule from ``sqlalchemy/__init__.py`` that - appeared to be trying to correct for a previous typographical error in the - imports. This rule interferes with type checking and is removed. - diff --git a/doc/build/changelog/unreleased_20/use_pep639.rst b/doc/build/changelog/unreleased_20/use_pep639.rst deleted file mode 100644 index ff73d877288..00000000000 --- a/doc/build/changelog/unreleased_20/use_pep639.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, installation - - Removed the "license classifier" from setup.cfg for SQLAlchemy 2.0, which - eliminates loud deprecation warnings when building the package. SQLAlchemy - 2.1 will use a full :pep:`639` configuration in pyproject.toml while - SQLAlchemy 2.0 remains using ``setup.cfg`` for setup. - - diff --git a/doc/build/conf.py b/doc/build/conf.py index 846f1bd3304..b91d5cd9c8c 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -244,9 +244,9 @@ # The short X.Y version. version = "2.0" # The full version, including alpha/beta/rc tags. -release = "2.0.40" +release = "2.0.41" -release_date = "March 27, 2025" +release_date = "May 14, 2025" site_base = os.environ.get("RTD_SITE_BASE", "https://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" From 78746059e14b11833fa705249c326a481fe4093e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 14 May 2025 13:11:05 -0400 Subject: [PATCH 525/544] Version 2.0.42 placeholder --- doc/build/changelog/changelog_20.rst | 4 ++++ lib/sqlalchemy/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_20.rst b/doc/build/changelog/changelog_20.rst index 4d9dca6d65f..4c607422b8e 100644 --- a/doc/build/changelog/changelog_20.rst +++ b/doc/build/changelog/changelog_20.rst @@ -8,6 +8,10 @@ :start-line: 5 +.. changelog:: + :version: 2.0.42 + :include_notes_from: unreleased_20 + .. changelog:: :version: 2.0.41 :released: May 14, 2025 diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 748879e160f..ec8060bccd0 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -269,7 +269,7 @@ from .types import VARBINARY as VARBINARY from .types import VARCHAR as VARCHAR -__version__ = "2.0.41" +__version__ = "2.0.42" def __go(lcls: Any) -> None: From 92a13fe0db614ffa68720e938d3d699b39170faf Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 15 May 2025 13:39:36 -0400 Subject: [PATCH 526/544] expand column options for composites up front at the attribute level Implemented the :func:`_orm.defer`, :func:`_orm.undefer` and :func:`_orm.load_only` loader options to work for composite attributes, a use case that had never been supported previously. Fixes: #12593 Change-Id: Ie7892a710f30b69c83f586f7492174a3b8198f80 (cherry picked from commit b25ce03c8d0d2a9d4f186b9b2b2c82b02b9645b7) --- doc/build/changelog/unreleased_20/12593.rst | 7 + lib/sqlalchemy/orm/attributes.py | 26 ++-- lib/sqlalchemy/orm/descriptor_props.py | 11 ++ lib/sqlalchemy/orm/strategy_options.py | 31 ++++- test/orm/test_composites.py | 140 +++++++++++++++++++- 5 files changed, 196 insertions(+), 19 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12593.rst diff --git a/doc/build/changelog/unreleased_20/12593.rst b/doc/build/changelog/unreleased_20/12593.rst new file mode 100644 index 00000000000..945e0d65f5b --- /dev/null +++ b/doc/build/changelog/unreleased_20/12593.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, orm + :tickets: 12593 + + Implemented the :func:`_orm.defer`, :func:`_orm.undefer` and + :func:`_orm.load_only` loader options to work for composite attributes, a + use case that had never been supported previously. diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 8207b4cace2..283cbc60484 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -462,6 +462,9 @@ def hasparent( ) -> bool: return self.impl.hasparent(state, optimistic=optimistic) is not False + def _column_strategy_attrs(self) -> Sequence[QueryableAttribute[Any]]: + return (self,) + def __getattr__(self, key: str) -> Any: try: return util.MemoizedSlots.__getattr__(self, key) @@ -595,7 +598,7 @@ def create_proxied_attribute( # TODO: can move this to descriptor_props if the need for this # function is removed from ext/hybrid.py - class Proxy(QueryableAttribute[Any]): + class Proxy(QueryableAttribute[_T_co]): """Presents the :class:`.QueryableAttribute` interface as a proxy on top of a Python descriptor / :class:`.PropComparator` combination. @@ -610,13 +613,13 @@ class Proxy(QueryableAttribute[Any]): def __init__( self, - class_, - key, - descriptor, - comparator, - adapt_to_entity=None, - doc=None, - original_property=None, + class_: _ExternalEntityType[Any], + key: str, + descriptor: Any, + comparator: interfaces.PropComparator[_T_co], + adapt_to_entity: Optional[AliasedInsp[Any]] = None, + doc: Optional[str] = None, + original_property: Optional[QueryableAttribute[_T_co]] = None, ): self.class_ = class_ self.key = key @@ -641,6 +644,13 @@ def parent(self): ("_parententity", visitors.ExtendedInternalTraversal.dp_multi), ] + def _column_strategy_attrs(self) -> Sequence[QueryableAttribute[Any]]: + prop = self.original_property + if prop is None: + return () + else: + return prop._column_strategy_attrs() + @property def _impl_uses_objects(self): return ( diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index f01cc1788b3..2d1ec13f19e 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -99,6 +99,11 @@ class DescriptorProperty(MapperProperty[_T]): descriptor: DescriptorReference[Any] + def _column_strategy_attrs(self) -> Sequence[QueryableAttribute[Any]]: + raise NotImplementedError( + "This MapperProperty does not implement column loader strategies" + ) + def get_history( self, state: InstanceState[Any], @@ -500,6 +505,9 @@ def props(self) -> Sequence[MapperProperty[Any]]: props.append(prop) return props + def _column_strategy_attrs(self) -> Sequence[QueryableAttribute[Any]]: + return self._comparable_elements + @util.non_memoized_property @util.preload_module("orm.properties") def columns(self) -> Sequence[Column[Any]]: @@ -999,6 +1007,9 @@ def _proxied_object( ) return attr.property + def _column_strategy_attrs(self) -> Sequence[QueryableAttribute[Any]]: + return (getattr(self.parent.class_, self.name),) + def _comparator_factory(self, mapper: Mapper[Any]) -> SQLORMOperations[_T]: prop = self._proxied_object diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index f2e6948a7ba..17bbe353495 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -6,9 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: allow-untyped-defs, allow-untyped-calls -""" - -""" +""" """ from __future__ import annotations @@ -224,7 +222,7 @@ def load_only(self, *attrs: _AttrType, raiseload: bool = False) -> Self: """ cloned = self._set_column_strategy( - attrs, + _expand_column_strategy_attrs(attrs), {"deferred": False, "instrument": True}, ) @@ -638,7 +636,9 @@ def defer(self, key: _AttrType, raiseload: bool = False) -> Self: strategy = {"deferred": True, "instrument": True} if raiseload: strategy["raiseload"] = True - return self._set_column_strategy((key,), strategy) + return self._set_column_strategy( + _expand_column_strategy_attrs((key,)), strategy + ) def undefer(self, key: _AttrType) -> Self: r"""Indicate that the given column-oriented attribute should be @@ -677,7 +677,8 @@ def undefer(self, key: _AttrType) -> Self: """ # noqa: E501 return self._set_column_strategy( - (key,), {"deferred": False, "instrument": True} + _expand_column_strategy_attrs((key,)), + {"deferred": False, "instrument": True}, ) def undefer_group(self, name: str) -> Self: @@ -2394,6 +2395,23 @@ def loader_unbound_fn(fn: _FN) -> _FN: return fn +def _expand_column_strategy_attrs( + attrs: Tuple[_AttrType, ...], +) -> Tuple[_AttrType, ...]: + return cast( + "Tuple[_AttrType, ...]", + tuple( + a + for attr in attrs + for a in ( + cast("QueryableAttribute[Any]", attr)._column_strategy_attrs() + if hasattr(attr, "_column_strategy_attrs") + else (attr,) + ) + ), + ) + + # standalone functions follow. docstrings are filled in # by the ``@loader_unbound_fn`` decorator. @@ -2407,6 +2425,7 @@ def contains_eager(*keys: _AttrType, **kw: Any) -> _AbstractLoad: def load_only(*attrs: _AttrType, raiseload: bool = False) -> _AbstractLoad: # TODO: attrs against different classes. we likely have to # add some extra state to Load of some kind + attrs = _expand_column_strategy_attrs(attrs) _, lead_element, _ = _parse_attr_argument(attrs[0]) return Load(lead_element).load_only(*attrs, raiseload=raiseload) diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py index f9a1ba38659..cd205be5b48 100644 --- a/test/orm/test_composites.py +++ b/test/orm/test_composites.py @@ -16,9 +16,13 @@ from sqlalchemy.orm import Composite from sqlalchemy.orm import composite from sqlalchemy.orm import configure_mappers +from sqlalchemy.orm import defer +from sqlalchemy.orm import load_only from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship from sqlalchemy.orm import Session +from sqlalchemy.orm import undefer +from sqlalchemy.orm import undefer_group from sqlalchemy.orm.attributes import LoaderCallableStatus from sqlalchemy.testing import assert_raises_message from sqlalchemy.testing import eq_ @@ -1470,7 +1474,7 @@ def test_query_aliased(self): eq_(sess.query(ae).filter(ae.c == C("a2b1", b2)).one(), a2) -class ConfigurationTest(fixtures.MappedTest): +class ConfigAndDeferralTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table( @@ -1508,7 +1512,7 @@ def __ne__(self, other): class Edge(cls.Comparable): pass - def _test_roundtrip(self): + def _test_roundtrip(self, *, assert_deferred=False, options=()): Edge, Point = self.classes.Edge, self.classes.Point e1 = Edge(start=Point(3, 4), end=Point(5, 6)) @@ -1516,7 +1520,19 @@ def _test_roundtrip(self): sess.add(e1) sess.commit() - eq_(sess.query(Edge).one(), Edge(start=Point(3, 4), end=Point(5, 6))) + stmt = select(Edge) + if options: + stmt = stmt.options(*options) + e1 = sess.execute(stmt).scalar_one() + + names = ["start", "end", "x1", "x2", "y1", "y2"] + for name in names: + if assert_deferred: + assert name not in e1.__dict__ + else: + assert name in e1.__dict__ + + eq_(e1, Edge(start=Point(3, 4), end=Point(5, 6))) def test_columns(self): edge, Edge, Point = ( @@ -1562,7 +1578,7 @@ def test_strings(self): self._test_roundtrip() - def test_deferred(self): + def test_deferred_config(self): edge, Edge, Point = ( self.tables.edge, self.classes.Edge, @@ -1580,7 +1596,121 @@ def test_deferred(self): ), }, ) - self._test_roundtrip() + self._test_roundtrip(assert_deferred=True) + + def test_defer_option_on_cols(self): + edge, Edge, Point = ( + self.tables.edge, + self.classes.Edge, + self.classes.Point, + ) + self.mapper_registry.map_imperatively( + Edge, + edge, + properties={ + "start": sa.orm.composite( + Point, + edge.c.x1, + edge.c.y1, + ), + "end": sa.orm.composite( + Point, + edge.c.x2, + edge.c.y2, + ), + }, + ) + self._test_roundtrip( + assert_deferred=True, + options=( + defer(Edge.x1), + defer(Edge.x2), + defer(Edge.y1), + defer(Edge.y2), + ), + ) + + def test_defer_option_on_composite(self): + edge, Edge, Point = ( + self.tables.edge, + self.classes.Edge, + self.classes.Point, + ) + self.mapper_registry.map_imperatively( + Edge, + edge, + properties={ + "start": sa.orm.composite( + Point, + edge.c.x1, + edge.c.y1, + ), + "end": sa.orm.composite( + Point, + edge.c.x2, + edge.c.y2, + ), + }, + ) + self._test_roundtrip( + assert_deferred=True, options=(defer(Edge.start), defer(Edge.end)) + ) + + @testing.variation("composite_only", [True, False]) + def test_load_only_option_on_composite(self, composite_only): + edge, Edge, Point = ( + self.tables.edge, + self.classes.Edge, + self.classes.Point, + ) + self.mapper_registry.map_imperatively( + Edge, + edge, + properties={ + "start": sa.orm.composite( + Point, edge.c.x1, edge.c.y1, deferred=True + ), + "end": sa.orm.composite( + Point, + edge.c.x2, + edge.c.y2, + ), + }, + ) + + if composite_only: + self._test_roundtrip( + assert_deferred=False, + options=(load_only(Edge.start, Edge.end),), + ) + else: + self._test_roundtrip( + assert_deferred=False, + options=(load_only(Edge.start, Edge.x2, Edge.y2),), + ) + + def test_defer_option_on_composite_via_group(self): + edge, Edge, Point = ( + self.tables.edge, + self.classes.Edge, + self.classes.Point, + ) + self.mapper_registry.map_imperatively( + Edge, + edge, + properties={ + "start": sa.orm.composite( + Point, edge.c.x1, edge.c.y1, deferred=True, group="s" + ), + "end": sa.orm.composite( + Point, edge.c.x2, edge.c.y2, deferred=True + ), + }, + ) + self._test_roundtrip( + assert_deferred=False, + options=(undefer_group("s"), undefer(Edge.end)), + ) def test_check_prop_type(self): edge, Edge, Point = ( From beab61a926dbcce1ec4db3c514602af2d589b11c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 16 May 2025 10:33:03 -0400 Subject: [PATCH 527/544] i think we dont need DOMAIN.adapt() this seems to be redundant vs. what constructor copy does. Issues are afoot w/ domain in any case see multiple issues at [1] [1] https://github.com/sqlalchemy/sqlalchemy/discussions/12592 Change-Id: I49879df6b78170435f021889f8f56ec43abc75c7 Change-Id: Id8fba884d47f3a494764262e23b3cc889f2cd033 (cherry picked from commit 37e1654bff3415856fc217f687bb0fbfac6666ba) --- lib/sqlalchemy/dialects/postgresql/named_types.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/named_types.py b/lib/sqlalchemy/dialects/postgresql/named_types.py index c9d6e5844cf..5807041ead3 100644 --- a/lib/sqlalchemy/dialects/postgresql/named_types.py +++ b/lib/sqlalchemy/dialects/postgresql/named_types.py @@ -503,20 +503,6 @@ def __init__( def __test_init__(cls): return cls("name", sqltypes.Integer) - def adapt(self, impl, **kw): - if self.default: - kw["default"] = self.default - if self.constraint_name is not None: - kw["constraint_name"] = self.constraint_name - if self.not_null: - kw["not_null"] = self.not_null - if self.check is not None: - kw["check"] = str(self.check) - if self.create_type: - kw["create_type"] = self.create_type - - return super().adapt(impl, **kw) - class CreateEnumType(schema._CreateDropBase): __visit_name__ = "create_enum_type" From eca7a97de36ceb5ef58652a486dc6125aa5fa21a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 18 May 2025 13:54:09 -0400 Subject: [PATCH 528/544] backport mysql / base portions of #10415 to 2.0 For a backport of the mysql typing change in [1] to be useful, we need to have most of the asyncio typing installed as well. To make this easier include that we will backport aiomysql / asyncmy over to the connectors/asyncio.py connector which is already in use. [1] https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/5829 Change-Id: I0414ed1f736a329ecdc9a662dbee71d621a463ae --- lib/sqlalchemy/connectors/aioodbc.py | 10 + lib/sqlalchemy/connectors/asyncio.py | 263 ++++++++++++++++------ lib/sqlalchemy/dialects/mysql/aiomysql.py | 172 ++------------ lib/sqlalchemy/dialects/mysql/asyncmy.py | 192 +++------------- 4 files changed, 247 insertions(+), 390 deletions(-) diff --git a/lib/sqlalchemy/connectors/aioodbc.py b/lib/sqlalchemy/connectors/aioodbc.py index 39b2a8a2382..6e4b864e7dc 100644 --- a/lib/sqlalchemy/connectors/aioodbc.py +++ b/lib/sqlalchemy/connectors/aioodbc.py @@ -20,6 +20,7 @@ from ..util.concurrency import await_fallback from ..util.concurrency import await_only + if TYPE_CHECKING: from ..engine.interfaces import ConnectArgsType from ..engine.url import URL @@ -58,6 +59,15 @@ def autocommit(self, value): self._connection._conn.autocommit = value + def ping(self, reconnect): + return self.await_(self._connection.ping(reconnect)) + + def add_output_converter(self, *arg, **kw): + self._connection.add_output_converter(*arg, **kw) + + def character_set_name(self): + return self._connection.character_set_name() + def cursor(self, server_side=False): # aioodbc sets connection=None when closed and just fails with # AttributeError here. Here we use the same ProgrammingError + diff --git a/lib/sqlalchemy/connectors/asyncio.py b/lib/sqlalchemy/connectors/asyncio.py index c4f0d715413..c036d3fc7e6 100644 --- a/lib/sqlalchemy/connectors/asyncio.py +++ b/lib/sqlalchemy/connectors/asyncio.py @@ -4,18 +4,102 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors """generic asyncio-adapted versions of DBAPI connection and cursor""" from __future__ import annotations +import asyncio import collections +import sys +from typing import Any +from typing import AsyncIterator +from typing import Deque +from typing import Iterator +from typing import NoReturn +from typing import Optional +from typing import Sequence from ..engine import AdaptedConnection -from ..util.concurrency import asyncio +from ..engine.interfaces import _DBAPICursorDescription +from ..engine.interfaces import _DBAPIMultiExecuteParams +from ..engine.interfaces import _DBAPISingleExecuteParams from ..util.concurrency import await_fallback from ..util.concurrency import await_only +from ..util.typing import Protocol +from ..util.typing import Self + + +class AsyncIODBAPIConnection(Protocol): + """protocol representing an async adapted version of a + :pep:`249` database connection. + + + """ + + async def close(self) -> None: ... + + async def commit(self) -> None: ... + + def cursor(self, *args: Any, **kwargs: Any) -> AsyncIODBAPICursor: ... + + async def rollback(self) -> None: ... + + +class AsyncIODBAPICursor(Protocol): + """protocol representing an async adapted version + of a :pep:`249` database cursor. + + + """ + + def __aenter__(self) -> Any: ... + + @property + def description( + self, + ) -> _DBAPICursorDescription: + """The description attribute of the Cursor.""" + ... + + @property + def rowcount(self) -> int: ... + + arraysize: int + + lastrowid: int + + async def close(self) -> None: ... + + async def execute( + self, + operation: Any, + parameters: Optional[_DBAPISingleExecuteParams] = None, + ) -> Any: ... + + async def executemany( + self, + operation: Any, + parameters: _DBAPIMultiExecuteParams, + ) -> Any: ... + + async def fetchone(self) -> Optional[Any]: ... + + async def fetchmany(self, size: Optional[int] = ...) -> Sequence[Any]: ... + + async def fetchall(self) -> Sequence[Any]: ... + + async def setinputsizes(self, sizes: Sequence[Any]) -> None: ... + + def setoutputsize(self, size: Any, column: Any) -> None: ... + + async def callproc( + self, procname: str, parameters: Sequence[Any] = ... + ) -> Any: ... + + async def nextset(self) -> Optional[bool]: ... + + def __aiter__(self) -> AsyncIterator[Any]: ... class AsyncAdapt_dbapi_cursor: @@ -28,96 +112,136 @@ class AsyncAdapt_dbapi_cursor: "_rows", ) - def __init__(self, adapt_connection): + _cursor: AsyncIODBAPICursor + _adapt_connection: AsyncAdapt_dbapi_connection + _connection: AsyncIODBAPIConnection + _rows: Deque[Any] + + def __init__(self, adapt_connection: AsyncAdapt_dbapi_connection): self._adapt_connection = adapt_connection self._connection = adapt_connection._connection + self.await_ = adapt_connection.await_ - cursor = self._connection.cursor() + cursor = self._make_new_cursor(self._connection) self._cursor = self._aenter_cursor(cursor) if not self.server_side: self._rows = collections.deque() - def _aenter_cursor(self, cursor): - return self.await_(cursor.__aenter__()) + def _aenter_cursor(self, cursor: AsyncIODBAPICursor) -> AsyncIODBAPICursor: + return self.await_(cursor.__aenter__()) # type: ignore[no-any-return] + + def _make_new_cursor( + self, connection: AsyncIODBAPIConnection + ) -> AsyncIODBAPICursor: + return connection.cursor() @property - def description(self): + def description(self) -> Optional[_DBAPICursorDescription]: return self._cursor.description @property - def rowcount(self): + def rowcount(self) -> int: return self._cursor.rowcount @property - def arraysize(self): + def arraysize(self) -> int: return self._cursor.arraysize @arraysize.setter - def arraysize(self, value): + def arraysize(self, value: int) -> None: self._cursor.arraysize = value @property - def lastrowid(self): + def lastrowid(self) -> int: return self._cursor.lastrowid - def close(self): + def close(self) -> None: # note we aren't actually closing the cursor here, # we are just letting GC do it. see notes in aiomysql dialect self._rows.clear() - def execute(self, operation, parameters=None): - return self.await_(self._execute_async(operation, parameters)) - - def executemany(self, operation, seq_of_parameters): - return self.await_( - self._executemany_async(operation, seq_of_parameters) - ) + def execute( + self, + operation: Any, + parameters: Optional[_DBAPISingleExecuteParams] = None, + ) -> Any: + try: + return self.await_(self._execute_async(operation, parameters)) + except Exception as error: + self._adapt_connection._handle_exception(error) + + def executemany( + self, + operation: Any, + seq_of_parameters: _DBAPIMultiExecuteParams, + ) -> Any: + try: + return self.await_( + self._executemany_async(operation, seq_of_parameters) + ) + except Exception as error: + self._adapt_connection._handle_exception(error) - async def _execute_async(self, operation, parameters): + async def _execute_async( + self, operation: Any, parameters: Optional[_DBAPISingleExecuteParams] + ) -> Any: async with self._adapt_connection._execute_mutex: - result = await self._cursor.execute(operation, parameters or ()) + if parameters is None: + result = await self._cursor.execute(operation) + else: + result = await self._cursor.execute(operation, parameters) if self._cursor.description and not self.server_side: self._rows = collections.deque(await self._cursor.fetchall()) return result - async def _executemany_async(self, operation, seq_of_parameters): + async def _executemany_async( + self, + operation: Any, + seq_of_parameters: _DBAPIMultiExecuteParams, + ) -> Any: async with self._adapt_connection._execute_mutex: return await self._cursor.executemany(operation, seq_of_parameters) - def nextset(self): + def nextset(self) -> None: self.await_(self._cursor.nextset()) if self._cursor.description and not self.server_side: self._rows = collections.deque( self.await_(self._cursor.fetchall()) ) - def setinputsizes(self, *inputsizes): + def setinputsizes(self, *inputsizes: Any) -> None: # NOTE: this is overrridden in aioodbc due to # see https://github.com/aio-libs/aioodbc/issues/451 # right now return self.await_(self._cursor.setinputsizes(*inputsizes)) - def __iter__(self): + def __enter__(self) -> Self: + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + self.close() + + def __iter__(self) -> Iterator[Any]: while self._rows: yield self._rows.popleft() - def fetchone(self): + def fetchone(self) -> Optional[Any]: if self._rows: return self._rows.popleft() else: return None - def fetchmany(self, size=None): + def fetchmany(self, size: Optional[int] = None) -> Sequence[Any]: if size is None: size = self.arraysize rr = self._rows return [rr.popleft() for _ in range(min(size, len(rr)))] - def fetchall(self): + def fetchall(self) -> Sequence[Any]: retval = list(self._rows) self._rows.clear() return retval @@ -127,30 +251,21 @@ class AsyncAdapt_dbapi_ss_cursor(AsyncAdapt_dbapi_cursor): __slots__ = () server_side = True - def __init__(self, adapt_connection): - self._adapt_connection = adapt_connection - self._connection = adapt_connection._connection - self.await_ = adapt_connection.await_ - - cursor = self._connection.cursor() - - self._cursor = self.await_(cursor.__aenter__()) - - def close(self): + def close(self) -> None: if self._cursor is not None: self.await_(self._cursor.close()) - self._cursor = None + self._cursor = None # type: ignore - def fetchone(self): + def fetchone(self) -> Optional[Any]: return self.await_(self._cursor.fetchone()) - def fetchmany(self, size=None): + def fetchmany(self, size: Optional[int] = None) -> Any: return self.await_(self._cursor.fetchmany(size=size)) - def fetchall(self): + def fetchall(self) -> Sequence[Any]: return self.await_(self._cursor.fetchall()) - def __iter__(self): + def __iter__(self) -> Iterator[Any]: iterator = self._cursor.__aiter__() while True: try: @@ -164,46 +279,50 @@ class AsyncAdapt_dbapi_connection(AdaptedConnection): _ss_cursor_cls = AsyncAdapt_dbapi_ss_cursor await_ = staticmethod(await_only) + __slots__ = ("dbapi", "_execute_mutex") - def __init__(self, dbapi, connection): + _connection: AsyncIODBAPIConnection + + def __init__(self, dbapi: Any, connection: AsyncIODBAPIConnection): self.dbapi = dbapi self._connection = connection self._execute_mutex = asyncio.Lock() - def ping(self, reconnect): - return self.await_(self._connection.ping(reconnect)) - - def add_output_converter(self, *arg, **kw): - self._connection.add_output_converter(*arg, **kw) - - def character_set_name(self): - return self._connection.character_set_name() - - @property - def autocommit(self): - return self._connection.autocommit - - @autocommit.setter - def autocommit(self, value): - # https://github.com/aio-libs/aioodbc/issues/448 - # self._connection.autocommit = value - - self._connection._conn.autocommit = value - - def cursor(self, server_side=False): + def cursor(self, server_side: bool = False) -> AsyncAdapt_dbapi_cursor: if server_side: return self._ss_cursor_cls(self) else: return self._cursor_cls(self) - def rollback(self): - self.await_(self._connection.rollback()) - - def commit(self): - self.await_(self._connection.commit()) - - def close(self): + def execute( + self, + operation: Any, + parameters: Optional[_DBAPISingleExecuteParams] = None, + ) -> Any: + """lots of DBAPIs seem to provide this, so include it""" + cursor = self.cursor() + cursor.execute(operation, parameters) + return cursor + + def _handle_exception(self, error: Exception) -> NoReturn: + exc_info = sys.exc_info() + + raise error.with_traceback(exc_info[2]) + + def rollback(self) -> None: + try: + self.await_(self._connection.rollback()) + except Exception as error: + self._handle_exception(error) + + def commit(self) -> None: + try: + self.await_(self._connection.commit()) + except Exception as error: + self._handle_exception(error) + + def close(self) -> None: self.await_(self._connection.close()) diff --git a/lib/sqlalchemy/dialects/mysql/aiomysql.py b/lib/sqlalchemy/dialects/mysql/aiomysql.py index bd5e7de6b4f..ea11f3bc87d 100644 --- a/lib/sqlalchemy/dialects/mysql/aiomysql.py +++ b/lib/sqlalchemy/dialects/mysql/aiomysql.py @@ -29,162 +29,42 @@ ) """ # noqa -from collections import deque - from .pymysql import MySQLDialect_pymysql from ... import pool from ... import util -from ...engine import AdaptedConnection -from ...util.concurrency import asyncio +from ...connectors.asyncio import AsyncAdapt_dbapi_connection +from ...connectors.asyncio import AsyncAdapt_dbapi_cursor +from ...connectors.asyncio import AsyncAdapt_dbapi_ss_cursor from ...util.concurrency import await_fallback from ...util.concurrency import await_only -class AsyncAdapt_aiomysql_cursor: - # TODO: base on connectors/asyncio.py - # see #10415 - server_side = False - __slots__ = ( - "_adapt_connection", - "_connection", - "await_", - "_cursor", - "_rows", - ) - - def __init__(self, adapt_connection): - self._adapt_connection = adapt_connection - self._connection = adapt_connection._connection - self.await_ = adapt_connection.await_ - - cursor = self._connection.cursor(adapt_connection.dbapi.Cursor) - - # see https://github.com/aio-libs/aiomysql/issues/543 - self._cursor = self.await_(cursor.__aenter__()) - self._rows = deque() - - @property - def description(self): - return self._cursor.description - - @property - def rowcount(self): - return self._cursor.rowcount - - @property - def arraysize(self): - return self._cursor.arraysize - - @arraysize.setter - def arraysize(self, value): - self._cursor.arraysize = value - - @property - def lastrowid(self): - return self._cursor.lastrowid - - def close(self): - # note we aren't actually closing the cursor here, - # we are just letting GC do it. to allow this to be async - # we would need the Result to change how it does "Safe close cursor". - # MySQL "cursors" don't actually have state to be "closed" besides - # exhausting rows, which we already have done for sync cursor. - # another option would be to emulate aiosqlite dialect and assign - # cursor only if we are doing server side cursor operation. - self._rows.clear() - - def execute(self, operation, parameters=None): - return self.await_(self._execute_async(operation, parameters)) - - def executemany(self, operation, seq_of_parameters): - return self.await_( - self._executemany_async(operation, seq_of_parameters) - ) - - async def _execute_async(self, operation, parameters): - async with self._adapt_connection._execute_mutex: - result = await self._cursor.execute(operation, parameters) - - if not self.server_side: - # aiomysql has a "fake" async result, so we have to pull it out - # of that here since our default result is not async. - # we could just as easily grab "_rows" here and be done with it - # but this is safer. - self._rows = deque(await self._cursor.fetchall()) - return result - - async def _executemany_async(self, operation, seq_of_parameters): - async with self._adapt_connection._execute_mutex: - return await self._cursor.executemany(operation, seq_of_parameters) - - def setinputsizes(self, *inputsizes): - pass - - def __iter__(self): - while self._rows: - yield self._rows.popleft() - - def fetchone(self): - if self._rows: - return self._rows.popleft() - else: - return None - - def fetchmany(self, size=None): - if size is None: - size = self.arraysize - - rr = self._rows - return [rr.popleft() for _ in range(min(size, len(rr)))] - - def fetchall(self): - retval = list(self._rows) - self._rows.clear() - return retval - - -class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor): - # TODO: base on connectors/asyncio.py - # see #10415 +class AsyncAdapt_aiomysql_cursor(AsyncAdapt_dbapi_cursor): __slots__ = () - server_side = True - - def __init__(self, adapt_connection): - self._adapt_connection = adapt_connection - self._connection = adapt_connection._connection - self.await_ = adapt_connection.await_ - cursor = self._connection.cursor(adapt_connection.dbapi.SSCursor) + def _make_new_cursor(self, connection): + return connection.cursor(self._adapt_connection.dbapi.Cursor) - self._cursor = self.await_(cursor.__aenter__()) - def close(self): - if self._cursor is not None: - self.await_(self._cursor.close()) - self._cursor = None - - def fetchone(self): - return self.await_(self._cursor.fetchone()) - - def fetchmany(self, size=None): - return self.await_(self._cursor.fetchmany(size=size)) +class AsyncAdapt_aiomysql_ss_cursor( + AsyncAdapt_dbapi_ss_cursor, AsyncAdapt_aiomysql_cursor +): + __slots__ = () - def fetchall(self): - return self.await_(self._cursor.fetchall()) + def _make_new_cursor(self, connection): + return connection.cursor( + self._adapt_connection.dbapi.aiomysql.cursors.SSCursor + ) -class AsyncAdapt_aiomysql_connection(AdaptedConnection): - # TODO: base on connectors/asyncio.py - # see #10415 - await_ = staticmethod(await_only) - __slots__ = ("dbapi", "_execute_mutex") +class AsyncAdapt_aiomysql_connection(AsyncAdapt_dbapi_connection): + __slots__ = () - def __init__(self, dbapi, connection): - self.dbapi = dbapi - self._connection = connection - self._execute_mutex = asyncio.Lock() + _cursor_cls = AsyncAdapt_aiomysql_cursor + _ss_cursor_cls = AsyncAdapt_aiomysql_ss_cursor def ping(self, reconnect): + assert not reconnect return self.await_(self._connection.ping(reconnect)) def character_set_name(self): @@ -193,18 +73,6 @@ def character_set_name(self): def autocommit(self, value): self.await_(self._connection.autocommit(value)) - def cursor(self, server_side=False): - if server_side: - return AsyncAdapt_aiomysql_ss_cursor(self) - else: - return AsyncAdapt_aiomysql_cursor(self) - - def rollback(self): - self.await_(self._connection.rollback()) - - def commit(self): - self.await_(self._connection.commit()) - def terminate(self): # it's not awaitable. self._connection.close() @@ -214,8 +82,6 @@ def close(self) -> None: class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection): - # TODO: base on connectors/asyncio.py - # see #10415 __slots__ = () await_ = staticmethod(await_fallback) diff --git a/lib/sqlalchemy/dialects/mysql/asyncmy.py b/lib/sqlalchemy/dialects/mysql/asyncmy.py index 9ec54e694da..179d6c2035b 100644 --- a/lib/sqlalchemy/dialects/mysql/asyncmy.py +++ b/lib/sqlalchemy/dialects/mysql/asyncmy.py @@ -27,183 +27,57 @@ ) """ # noqa -from collections import deque -from contextlib import asynccontextmanager +from __future__ import annotations from .pymysql import MySQLDialect_pymysql from ... import pool from ... import util -from ...engine import AdaptedConnection -from ...util.concurrency import asyncio +from ...connectors.asyncio import AsyncAdapt_dbapi_connection +from ...connectors.asyncio import AsyncAdapt_dbapi_cursor +from ...connectors.asyncio import AsyncAdapt_dbapi_ss_cursor from ...util.concurrency import await_fallback from ...util.concurrency import await_only -class AsyncAdapt_asyncmy_cursor: - # TODO: base on connectors/asyncio.py - # see #10415 - server_side = False - __slots__ = ( - "_adapt_connection", - "_connection", - "await_", - "_cursor", - "_rows", - ) - - def __init__(self, adapt_connection): - self._adapt_connection = adapt_connection - self._connection = adapt_connection._connection - self.await_ = adapt_connection.await_ - - cursor = self._connection.cursor() - - self._cursor = self.await_(cursor.__aenter__()) - self._rows = deque() - - @property - def description(self): - return self._cursor.description - - @property - def rowcount(self): - return self._cursor.rowcount - - @property - def arraysize(self): - return self._cursor.arraysize - - @arraysize.setter - def arraysize(self, value): - self._cursor.arraysize = value - - @property - def lastrowid(self): - return self._cursor.lastrowid - - def close(self): - # note we aren't actually closing the cursor here, - # we are just letting GC do it. to allow this to be async - # we would need the Result to change how it does "Safe close cursor". - # MySQL "cursors" don't actually have state to be "closed" besides - # exhausting rows, which we already have done for sync cursor. - # another option would be to emulate aiosqlite dialect and assign - # cursor only if we are doing server side cursor operation. - self._rows.clear() - - def execute(self, operation, parameters=None): - return self.await_(self._execute_async(operation, parameters)) - - def executemany(self, operation, seq_of_parameters): - return self.await_( - self._executemany_async(operation, seq_of_parameters) - ) - - async def _execute_async(self, operation, parameters): - async with self._adapt_connection._mutex_and_adapt_errors(): - if parameters is None: - result = await self._cursor.execute(operation) - else: - result = await self._cursor.execute(operation, parameters) - - if not self.server_side: - # asyncmy has a "fake" async result, so we have to pull it out - # of that here since our default result is not async. - # we could just as easily grab "_rows" here and be done with it - # but this is safer. - self._rows = deque(await self._cursor.fetchall()) - return result - - async def _executemany_async(self, operation, seq_of_parameters): - async with self._adapt_connection._mutex_and_adapt_errors(): - return await self._cursor.executemany(operation, seq_of_parameters) - - def setinputsizes(self, *inputsizes): - pass - - def __iter__(self): - while self._rows: - yield self._rows.popleft() - - def fetchone(self): - if self._rows: - return self._rows.popleft() - else: - return None - - def fetchmany(self, size=None): - if size is None: - size = self.arraysize - - rr = self._rows - return [rr.popleft() for _ in range(min(size, len(rr)))] - - def fetchall(self): - retval = list(self._rows) - self._rows.clear() - return retval +class AsyncAdapt_asyncmy_cursor(AsyncAdapt_dbapi_cursor): + __slots__ = () -class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor): - # TODO: base on connectors/asyncio.py - # see #10415 +class AsyncAdapt_asyncmy_ss_cursor( + AsyncAdapt_dbapi_ss_cursor, AsyncAdapt_asyncmy_cursor +): __slots__ = () - server_side = True - def __init__(self, adapt_connection): - self._adapt_connection = adapt_connection - self._connection = adapt_connection._connection - self.await_ = adapt_connection.await_ - - cursor = self._connection.cursor( - adapt_connection.dbapi.asyncmy.cursors.SSCursor + def _make_new_cursor(self, connection): + return connection.cursor( + self._adapt_connection.dbapi.asyncmy.cursors.SSCursor ) - self._cursor = self.await_(cursor.__aenter__()) - - def close(self): - if self._cursor is not None: - self.await_(self._cursor.close()) - self._cursor = None - - def fetchone(self): - return self.await_(self._cursor.fetchone()) - - def fetchmany(self, size=None): - return self.await_(self._cursor.fetchmany(size=size)) - - def fetchall(self): - return self.await_(self._cursor.fetchall()) +class AsyncAdapt_asyncmy_connection(AsyncAdapt_dbapi_connection): + __slots__ = () -class AsyncAdapt_asyncmy_connection(AdaptedConnection): - # TODO: base on connectors/asyncio.py - # see #10415 - await_ = staticmethod(await_only) - __slots__ = ("dbapi", "_execute_mutex") + _cursor_cls = AsyncAdapt_asyncmy_cursor + _ss_cursor_cls = AsyncAdapt_asyncmy_ss_cursor - def __init__(self, dbapi, connection): - self.dbapi = dbapi - self._connection = connection - self._execute_mutex = asyncio.Lock() + def _handle_exception(self, error): + if isinstance(error, AttributeError): + raise self.dbapi.InternalError( + "network operation failed due to asyncmy attribute error" + ) - @asynccontextmanager - async def _mutex_and_adapt_errors(self): - async with self._execute_mutex: - try: - yield - except AttributeError: - raise self.dbapi.InternalError( - "network operation failed due to asyncmy attribute error" - ) + raise error def ping(self, reconnect): assert not reconnect return self.await_(self._do_ping()) async def _do_ping(self): - async with self._mutex_and_adapt_errors(): - return await self._connection.ping(False) + try: + async with self._execute_mutex: + return await self._connection.ping(False) + except Exception as error: + self._handle_exception(error) def character_set_name(self): return self._connection.character_set_name() @@ -211,18 +85,6 @@ def character_set_name(self): def autocommit(self, value): self.await_(self._connection.autocommit(value)) - def cursor(self, server_side=False): - if server_side: - return AsyncAdapt_asyncmy_ss_cursor(self) - else: - return AsyncAdapt_asyncmy_cursor(self) - - def rollback(self): - self.await_(self._connection.rollback()) - - def commit(self): - self.await_(self._connection.commit()) - def terminate(self): # it's not awaitable. self._connection.close() From c1dc687472422eba82dfbdc117aa194623aa03d7 Mon Sep 17 00:00:00 2001 From: Denodo Research Labs <65558872+denodo-research-labs@users.noreply.github.com> Date: Mon, 19 May 2025 22:19:34 +0200 Subject: [PATCH 529/544] Update index.rst in dialects docs to include Denodo (#12604) (cherry picked from commit 279cd787ca12792d401bf9b45f2895c7b5dc0c77) --- doc/build/dialects/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 535b13552a4..bca807355c6 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -86,6 +86,8 @@ Currently maintained external dialect projects for SQLAlchemy include: +------------------------------------------------+---------------------------------------+ | Databricks | databricks_ | +------------------------------------------------+---------------------------------------+ +| Denodo | denodo-sqlalchemy_ | ++------------------------------------------------+---------------------------------------+ | EXASolution | sqlalchemy_exasol_ | +------------------------------------------------+---------------------------------------+ | Elasticsearch (readonly) | elasticsearch-dbapi_ | @@ -179,3 +181,4 @@ Currently maintained external dialect projects for SQLAlchemy include: .. _sqlalchemy-kinetica: https://github.com/kineticadb/sqlalchemy-kinetica/ .. _sqlalchemy-tidb: https://github.com/pingcap/sqlalchemy-tidb .. _ydb-sqlalchemy: https://github.com/ydb-platform/ydb-sqlalchemy/ +.. _denodo-sqlalchemy: https://pypi.org/project/denodo-sqlalchemy/ From f0ed77355b9f64ce426bc921a134ad958f989a1f Mon Sep 17 00:00:00 2001 From: Pablo Estevez Date: Tue, 13 May 2025 09:39:19 -0400 Subject: [PATCH 530/544] Type mysql dialect Closes: #12164 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12164 Pull-request-sha: 545e2c39d5ee4f3938111b26e098fa2aa2b6e800 Co-authored-by: Mike Bayer Change-Id: I37bd98049ff1a64d58e9490b0e5e2ea764dd1f73 (cherry picked from commit d89db542e419ac83ce1a43a5c2bf3c8225d6d2e9) --- lib/sqlalchemy/connectors/asyncio.py | 29 +- lib/sqlalchemy/connectors/pyodbc.py | 8 +- lib/sqlalchemy/dialects/__init__.py | 3 +- lib/sqlalchemy/dialects/mysql/aiomysql.py | 100 +- lib/sqlalchemy/dialects/mysql/asyncmy.py | 83 +- lib/sqlalchemy/dialects/mysql/base.py | 883 ++++++++++++------ lib/sqlalchemy/dialects/mysql/cymysql.py | 46 +- lib/sqlalchemy/dialects/mysql/enumerated.py | 91 +- lib/sqlalchemy/dialects/mysql/expression.py | 9 +- lib/sqlalchemy/dialects/mysql/json.py | 38 +- lib/sqlalchemy/dialects/mysql/mariadb.py | 18 +- .../dialects/mysql/mariadbconnector.py | 104 ++- .../dialects/mysql/mysqlconnector.py | 120 ++- lib/sqlalchemy/dialects/mysql/mysqldb.py | 106 ++- lib/sqlalchemy/dialects/mysql/provision.py | 1 - lib/sqlalchemy/dialects/mysql/pymysql.py | 42 +- lib/sqlalchemy/dialects/mysql/pyodbc.py | 46 +- lib/sqlalchemy/dialects/mysql/reflection.py | 124 ++- .../dialects/mysql/reserved_words.py | 1 - lib/sqlalchemy/dialects/mysql/types.py | 171 ++-- lib/sqlalchemy/engine/default.py | 9 +- lib/sqlalchemy/engine/interfaces.py | 39 +- lib/sqlalchemy/pool/base.py | 2 + lib/sqlalchemy/sql/compiler.py | 2 +- lib/sqlalchemy/sql/ddl.py | 2 + lib/sqlalchemy/sql/elements.py | 6 +- lib/sqlalchemy/sql/functions.py | 2 +- lib/sqlalchemy/sql/sqltypes.py | 5 +- lib/sqlalchemy/sql/type_api.py | 6 +- 29 files changed, 1429 insertions(+), 667 deletions(-) diff --git a/lib/sqlalchemy/connectors/asyncio.py b/lib/sqlalchemy/connectors/asyncio.py index c036d3fc7e6..fda21b6d6f0 100644 --- a/lib/sqlalchemy/connectors/asyncio.py +++ b/lib/sqlalchemy/connectors/asyncio.py @@ -19,15 +19,19 @@ from typing import NoReturn from typing import Optional from typing import Sequence +from typing import TYPE_CHECKING from ..engine import AdaptedConnection -from ..engine.interfaces import _DBAPICursorDescription -from ..engine.interfaces import _DBAPIMultiExecuteParams -from ..engine.interfaces import _DBAPISingleExecuteParams from ..util.concurrency import await_fallback from ..util.concurrency import await_only from ..util.typing import Protocol -from ..util.typing import Self + +if TYPE_CHECKING: + from ..engine.interfaces import _DBAPICursorDescription + from ..engine.interfaces import _DBAPIMultiExecuteParams + from ..engine.interfaces import _DBAPISingleExecuteParams + from ..engine.interfaces import DBAPIModule + from ..util.typing import Self class AsyncIODBAPIConnection(Protocol): @@ -37,7 +41,8 @@ class AsyncIODBAPIConnection(Protocol): """ - async def close(self) -> None: ... + # note that async DBAPIs dont agree if close() should be awaitable, + # so it is omitted here and picked up by the __getattr__ hook below async def commit(self) -> None: ... @@ -45,6 +50,10 @@ def cursor(self, *args: Any, **kwargs: Any) -> AsyncIODBAPICursor: ... async def rollback(self) -> None: ... + def __getattr__(self, key: str) -> Any: ... + + def __setattr__(self, key: str, value: Any) -> None: ... + class AsyncIODBAPICursor(Protocol): """protocol representing an async adapted version @@ -102,6 +111,16 @@ async def nextset(self) -> Optional[bool]: ... def __aiter__(self) -> AsyncIterator[Any]: ... +class AsyncAdapt_dbapi_module: + if TYPE_CHECKING: + Error = DBAPIModule.Error + OperationalError = DBAPIModule.OperationalError + InterfaceError = DBAPIModule.InterfaceError + IntegrityError = DBAPIModule.IntegrityError + + def __getattr__(self, key: str) -> Any: ... + + class AsyncAdapt_dbapi_cursor: server_side = False __slots__ = ( diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py index 091ff2042f1..766493e2e0c 100644 --- a/lib/sqlalchemy/connectors/pyodbc.py +++ b/lib/sqlalchemy/connectors/pyodbc.py @@ -8,7 +8,6 @@ from __future__ import annotations import re -from types import ModuleType import typing from typing import Any from typing import Dict @@ -29,6 +28,7 @@ from ..sql.type_api import TypeEngine if typing.TYPE_CHECKING: + from ..engine.interfaces import DBAPIModule from ..engine.interfaces import IsolationLevel @@ -48,15 +48,13 @@ class PyODBCConnector(Connector): # hold the desired driver name pyodbc_driver_name: Optional[str] = None - dbapi: ModuleType - def __init__(self, use_setinputsizes: bool = False, **kw: Any): super().__init__(**kw) if use_setinputsizes: self.bind_typing = interfaces.BindTyping.SETINPUTSIZES @classmethod - def import_dbapi(cls) -> ModuleType: + def import_dbapi(cls) -> DBAPIModule: return __import__("pyodbc") def create_connect_args(self, url: URL) -> ConnectArgsType: @@ -150,7 +148,7 @@ def is_disconnect( ], cursor: Optional[interfaces.DBAPICursor], ) -> bool: - if isinstance(e, self.dbapi.ProgrammingError): + if isinstance(e, self.loaded_dbapi.ProgrammingError): return "The cursor's connection has been closed." in str( e ) or "Attempt to use a closed connection." in str(e) diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py index 31ce6d64b52..30928a98455 100644 --- a/lib/sqlalchemy/dialects/__init__.py +++ b/lib/sqlalchemy/dialects/__init__.py @@ -7,6 +7,7 @@ from __future__ import annotations +from typing import Any from typing import Callable from typing import Optional from typing import Type @@ -39,7 +40,7 @@ def _auto_fn(name: str) -> Optional[Callable[[], Type[Dialect]]]: # hardcoded. if mysql / mariadb etc were third party dialects # they would just publish all the entrypoints, which would actually # look much nicer. - module = __import__( + module: Any = __import__( "sqlalchemy.dialects.mysql.mariadb" ).dialects.mysql.mariadb return module.loader(driver) # type: ignore diff --git a/lib/sqlalchemy/dialects/mysql/aiomysql.py b/lib/sqlalchemy/dialects/mysql/aiomysql.py index ea11f3bc87d..314c78adee8 100644 --- a/lib/sqlalchemy/dialects/mysql/aiomysql.py +++ b/lib/sqlalchemy/dialects/mysql/aiomysql.py @@ -4,7 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors r""" .. dialect:: mysql+aiomysql @@ -29,20 +28,44 @@ ) """ # noqa +from __future__ import annotations + +from types import ModuleType +from typing import Any +from typing import Dict +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + from .pymysql import MySQLDialect_pymysql from ... import pool from ... import util from ...connectors.asyncio import AsyncAdapt_dbapi_connection from ...connectors.asyncio import AsyncAdapt_dbapi_cursor +from ...connectors.asyncio import AsyncAdapt_dbapi_module from ...connectors.asyncio import AsyncAdapt_dbapi_ss_cursor from ...util.concurrency import await_fallback from ...util.concurrency import await_only +if TYPE_CHECKING: + + from ...connectors.asyncio import AsyncIODBAPIConnection + from ...connectors.asyncio import AsyncIODBAPICursor + from ...engine.interfaces import ConnectArgsType + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import DBAPICursor + from ...engine.interfaces import DBAPIModule + from ...engine.interfaces import PoolProxiedConnection + from ...engine.url import URL + class AsyncAdapt_aiomysql_cursor(AsyncAdapt_dbapi_cursor): __slots__ = () - def _make_new_cursor(self, connection): + def _make_new_cursor( + self, connection: AsyncIODBAPIConnection + ) -> AsyncIODBAPICursor: return connection.cursor(self._adapt_connection.dbapi.Cursor) @@ -51,7 +74,9 @@ class AsyncAdapt_aiomysql_ss_cursor( ): __slots__ = () - def _make_new_cursor(self, connection): + def _make_new_cursor( + self, connection: AsyncIODBAPIConnection + ) -> AsyncIODBAPICursor: return connection.cursor( self._adapt_connection.dbapi.aiomysql.cursors.SSCursor ) @@ -63,17 +88,17 @@ class AsyncAdapt_aiomysql_connection(AsyncAdapt_dbapi_connection): _cursor_cls = AsyncAdapt_aiomysql_cursor _ss_cursor_cls = AsyncAdapt_aiomysql_ss_cursor - def ping(self, reconnect): + def ping(self, reconnect: bool) -> None: assert not reconnect - return self.await_(self._connection.ping(reconnect)) + self.await_(self._connection.ping(reconnect)) - def character_set_name(self): - return self._connection.character_set_name() + def character_set_name(self) -> Optional[str]: + return self._connection.character_set_name() # type: ignore[no-any-return] # noqa: E501 - def autocommit(self, value): + def autocommit(self, value: Any) -> None: self.await_(self._connection.autocommit(value)) - def terminate(self): + def terminate(self) -> None: # it's not awaitable. self._connection.close() @@ -87,15 +112,15 @@ class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection): await_ = staticmethod(await_fallback) -class AsyncAdapt_aiomysql_dbapi: - def __init__(self, aiomysql, pymysql): +class AsyncAdapt_aiomysql_dbapi(AsyncAdapt_dbapi_module): + def __init__(self, aiomysql: ModuleType, pymysql: ModuleType): self.aiomysql = aiomysql self.pymysql = pymysql self.paramstyle = "format" self._init_dbapi_attributes() self.Cursor, self.SSCursor = self._init_cursors_subclasses() - def _init_dbapi_attributes(self): + def _init_dbapi_attributes(self) -> None: for name in ( "Warning", "Error", @@ -121,7 +146,7 @@ def _init_dbapi_attributes(self): ): setattr(self, name, getattr(self.pymysql, name)) - def connect(self, *arg, **kw): + def connect(self, *arg: Any, **kw: Any) -> AsyncAdapt_aiomysql_connection: async_fallback = kw.pop("async_fallback", False) creator_fn = kw.pop("async_creator_fn", self.aiomysql.connect) @@ -136,37 +161,43 @@ def connect(self, *arg, **kw): await_only(creator_fn(*arg, **kw)), ) - def _init_cursors_subclasses(self): + def _init_cursors_subclasses( + self, + ) -> Tuple[AsyncIODBAPICursor, AsyncIODBAPICursor]: # suppress unconditional warning emitted by aiomysql - class Cursor(self.aiomysql.Cursor): - async def _show_warnings(self, conn): + class Cursor(self.aiomysql.Cursor): # type: ignore[misc, name-defined] + async def _show_warnings( + self, conn: AsyncIODBAPIConnection + ) -> None: pass - class SSCursor(self.aiomysql.SSCursor): - async def _show_warnings(self, conn): + class SSCursor(self.aiomysql.SSCursor): # type: ignore[misc, name-defined] # noqa: E501 + async def _show_warnings( + self, conn: AsyncIODBAPIConnection + ) -> None: pass - return Cursor, SSCursor + return Cursor, SSCursor # type: ignore[return-value] class MySQLDialect_aiomysql(MySQLDialect_pymysql): driver = "aiomysql" supports_statement_cache = True - supports_server_side_cursors = True + supports_server_side_cursors = True # type: ignore[assignment] _sscursor = AsyncAdapt_aiomysql_ss_cursor is_async = True has_terminate = True @classmethod - def import_dbapi(cls): + def import_dbapi(cls) -> AsyncAdapt_aiomysql_dbapi: return AsyncAdapt_aiomysql_dbapi( __import__("aiomysql"), __import__("pymysql") ) @classmethod - def get_pool_class(cls, url): + def get_pool_class(cls, url: URL) -> type: async_fallback = url.query.get("async_fallback", False) if util.asbool(async_fallback): @@ -174,28 +205,37 @@ def get_pool_class(cls, url): else: return pool.AsyncAdaptedQueuePool - def do_terminate(self, dbapi_connection) -> None: + def do_terminate(self, dbapi_connection: DBAPIConnection) -> None: dbapi_connection.terminate() - def create_connect_args(self, url): + def create_connect_args( + self, url: URL, _translate_args: Optional[Dict[str, Any]] = None + ) -> ConnectArgsType: return super().create_connect_args( url, _translate_args=dict(username="user", database="db") ) - def is_disconnect(self, e, connection, cursor): + def is_disconnect( + self, + e: DBAPIModule.Error, + connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], + cursor: Optional[DBAPICursor], + ) -> bool: if super().is_disconnect(e, connection, cursor): return True else: str_e = str(e).lower() return "not connected" in str_e - def _found_rows_client_flag(self): - from pymysql.constants import CLIENT + def _found_rows_client_flag(self) -> int: + from pymysql.constants import CLIENT # type: ignore - return CLIENT.FOUND_ROWS + return CLIENT.FOUND_ROWS # type: ignore[no-any-return] - def get_driver_connection(self, connection): - return connection._connection + def get_driver_connection( + self, connection: DBAPIConnection + ) -> AsyncIODBAPIConnection: + return connection._connection # type: ignore[no-any-return] dialect = MySQLDialect_aiomysql diff --git a/lib/sqlalchemy/dialects/mysql/asyncmy.py b/lib/sqlalchemy/dialects/mysql/asyncmy.py index 179d6c2035b..32a45c0d35d 100644 --- a/lib/sqlalchemy/dialects/mysql/asyncmy.py +++ b/lib/sqlalchemy/dialects/mysql/asyncmy.py @@ -4,7 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors r""" .. dialect:: mysql+asyncmy @@ -29,15 +28,33 @@ """ # noqa from __future__ import annotations +from types import ModuleType +from typing import Any +from typing import NoReturn +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + from .pymysql import MySQLDialect_pymysql from ... import pool from ... import util from ...connectors.asyncio import AsyncAdapt_dbapi_connection from ...connectors.asyncio import AsyncAdapt_dbapi_cursor +from ...connectors.asyncio import AsyncAdapt_dbapi_module from ...connectors.asyncio import AsyncAdapt_dbapi_ss_cursor from ...util.concurrency import await_fallback from ...util.concurrency import await_only +if TYPE_CHECKING: + from ...connectors.asyncio import AsyncIODBAPIConnection + from ...connectors.asyncio import AsyncIODBAPICursor + from ...engine.interfaces import ConnectArgsType + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import DBAPICursor + from ...engine.interfaces import DBAPIModule + from ...engine.interfaces import PoolProxiedConnection + from ...engine.url import URL + class AsyncAdapt_asyncmy_cursor(AsyncAdapt_dbapi_cursor): __slots__ = () @@ -48,7 +65,9 @@ class AsyncAdapt_asyncmy_ss_cursor( ): __slots__ = () - def _make_new_cursor(self, connection): + def _make_new_cursor( + self, connection: AsyncIODBAPIConnection + ) -> AsyncIODBAPICursor: return connection.cursor( self._adapt_connection.dbapi.asyncmy.cursors.SSCursor ) @@ -60,7 +79,7 @@ class AsyncAdapt_asyncmy_connection(AsyncAdapt_dbapi_connection): _cursor_cls = AsyncAdapt_asyncmy_cursor _ss_cursor_cls = AsyncAdapt_asyncmy_ss_cursor - def _handle_exception(self, error): + def _handle_exception(self, error: Exception) -> NoReturn: if isinstance(error, AttributeError): raise self.dbapi.InternalError( "network operation failed due to asyncmy attribute error" @@ -68,24 +87,24 @@ def _handle_exception(self, error): raise error - def ping(self, reconnect): + def ping(self, reconnect: bool) -> None: assert not reconnect return self.await_(self._do_ping()) - async def _do_ping(self): + async def _do_ping(self) -> None: try: async with self._execute_mutex: - return await self._connection.ping(False) + await self._connection.ping(False) except Exception as error: self._handle_exception(error) - def character_set_name(self): - return self._connection.character_set_name() + def character_set_name(self) -> Optional[str]: + return self._connection.character_set_name() # type: ignore[no-any-return] # noqa: E501 - def autocommit(self, value): + def autocommit(self, value: Any) -> None: self.await_(self._connection.autocommit(value)) - def terminate(self): + def terminate(self) -> None: # it's not awaitable. self._connection.close() @@ -99,18 +118,13 @@ class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection): await_ = staticmethod(await_fallback) -def _Binary(x): - """Return x as a binary type.""" - return bytes(x) - - -class AsyncAdapt_asyncmy_dbapi: - def __init__(self, asyncmy): +class AsyncAdapt_asyncmy_dbapi(AsyncAdapt_dbapi_module): + def __init__(self, asyncmy: ModuleType): self.asyncmy = asyncmy self.paramstyle = "format" self._init_dbapi_attributes() - def _init_dbapi_attributes(self): + def _init_dbapi_attributes(self) -> None: for name in ( "Warning", "Error", @@ -131,9 +145,9 @@ def _init_dbapi_attributes(self): BINARY = util.symbol("BINARY") DATETIME = util.symbol("DATETIME") TIMESTAMP = util.symbol("TIMESTAMP") - Binary = staticmethod(_Binary) + Binary = staticmethod(bytes) - def connect(self, *arg, **kw): + def connect(self, *arg: Any, **kw: Any) -> AsyncAdapt_asyncmy_connection: async_fallback = kw.pop("async_fallback", False) creator_fn = kw.pop("async_creator_fn", self.asyncmy.connect) @@ -153,18 +167,18 @@ class MySQLDialect_asyncmy(MySQLDialect_pymysql): driver = "asyncmy" supports_statement_cache = True - supports_server_side_cursors = True + supports_server_side_cursors = True # type: ignore[assignment] _sscursor = AsyncAdapt_asyncmy_ss_cursor is_async = True has_terminate = True @classmethod - def import_dbapi(cls): + def import_dbapi(cls) -> DBAPIModule: return AsyncAdapt_asyncmy_dbapi(__import__("asyncmy")) @classmethod - def get_pool_class(cls, url): + def get_pool_class(cls, url: URL) -> type: async_fallback = url.query.get("async_fallback", False) if util.asbool(async_fallback): @@ -172,15 +186,20 @@ def get_pool_class(cls, url): else: return pool.AsyncAdaptedQueuePool - def do_terminate(self, dbapi_connection) -> None: + def do_terminate(self, dbapi_connection: DBAPIConnection) -> None: dbapi_connection.terminate() - def create_connect_args(self, url): + def create_connect_args(self, url: URL) -> ConnectArgsType: # type: ignore[override] # noqa: E501 return super().create_connect_args( url, _translate_args=dict(username="user", database="db") ) - def is_disconnect(self, e, connection, cursor): + def is_disconnect( + self, + e: DBAPIModule.Error, + connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], + cursor: Optional[DBAPICursor], + ) -> bool: if super().is_disconnect(e, connection, cursor): return True else: @@ -189,13 +208,15 @@ def is_disconnect(self, e, connection, cursor): "not connected" in str_e or "network operation failed" in str_e ) - def _found_rows_client_flag(self): - from asyncmy.constants import CLIENT + def _found_rows_client_flag(self) -> int: + from asyncmy.constants import CLIENT # type: ignore - return CLIENT.FOUND_ROWS + return CLIENT.FOUND_ROWS # type: ignore[no-any-return] - def get_driver_connection(self, connection): - return connection._connection + def get_driver_connection( + self, connection: DBAPIConnection + ) -> AsyncIODBAPIConnection: + return connection._connection # type: ignore[no-any-return] dialect = MySQLDialect_asyncmy diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index a78c4e0f747..def897edf9c 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -4,7 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors r""" @@ -1066,11 +1065,22 @@ class MyClass(Base): """ # noqa from __future__ import annotations -from array import array as _array from collections import defaultdict from itertools import compress import re +from typing import Any +from typing import Callable from typing import cast +from typing import DefaultDict +from typing import Dict +from typing import List +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union from . import reflection as _reflection from .enumerated import ENUM @@ -1113,7 +1123,6 @@ class MyClass(Base): from .types import YEAR from ... import exc from ... import literal_column -from ... import log from ... import schema as sa_schema from ... import sql from ... import util @@ -1137,10 +1146,46 @@ class MyClass(Base): from ...types import BLOB from ...types import BOOLEAN from ...types import DATE +from ...types import LargeBinary from ...types import UUID from ...types import VARBINARY from ...util import topological +if TYPE_CHECKING: + + from ...dialects.mysql import expression + from ...dialects.mysql.dml import OnDuplicateClause + from ...engine.base import Connection + from ...engine.cursor import CursorResult + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import DBAPICursor + from ...engine.interfaces import DBAPIModule + from ...engine.interfaces import IsolationLevel + from ...engine.interfaces import PoolProxiedConnection + from ...engine.interfaces import ReflectedCheckConstraint + from ...engine.interfaces import ReflectedColumn + from ...engine.interfaces import ReflectedForeignKeyConstraint + from ...engine.interfaces import ReflectedIndex + from ...engine.interfaces import ReflectedPrimaryKeyConstraint + from ...engine.interfaces import ReflectedTableComment + from ...engine.interfaces import ReflectedUniqueConstraint + from ...engine.row import Row + from ...engine.url import URL + from ...schema import Table + from ...sql import ddl + from ...sql import selectable + from ...sql.dml import _DMLTableElement + from ...sql.dml import Delete + from ...sql.dml import Update + from ...sql.dml import ValuesBase + from ...sql.functions import aggregate_strings + from ...sql.functions import random + from ...sql.functions import rollup + from ...sql.functions import sysdate + from ...sql.schema import Sequence as Sequence_SchemaItem + from ...sql.type_api import TypeEngine + from ...sql.visitors import ExternallyTraversible + SET_RE = re.compile( r"\s*SET\s+(?:(?:GLOBAL|SESSION)\s+)?\w", re.I | re.UNICODE @@ -1235,7 +1280,7 @@ class MyClass(Base): class MySQLExecutionContext(default.DefaultExecutionContext): - def post_exec(self): + def post_exec(self) -> None: if ( self.isdelete and cast(SQLCompiler, self.compiled).effective_returning @@ -1252,7 +1297,7 @@ def post_exec(self): _cursor.FullyBufferedCursorFetchStrategy( self.cursor, [ - (entry.keyname, None) + (entry.keyname, None) # type: ignore[misc] for entry in cast( SQLCompiler, self.compiled )._result_columns @@ -1261,14 +1306,18 @@ def post_exec(self): ) ) - def create_server_side_cursor(self): + def create_server_side_cursor(self) -> DBAPICursor: if self.dialect.supports_server_side_cursors: - return self._dbapi_connection.cursor(self.dialect._sscursor) + return self._dbapi_connection.cursor( + self.dialect._sscursor # type: ignore[attr-defined] + ) else: raise NotImplementedError() - def fire_sequence(self, seq, type_): - return self._execute_scalar( + def fire_sequence( + self, seq: Sequence_SchemaItem, type_: sqltypes.Integer + ) -> int: + return self._execute_scalar( # type: ignore[no-any-return] ( "select nextval(%s)" % self.identifier_preparer.format_sequence(seq) @@ -1278,46 +1327,51 @@ def fire_sequence(self, seq, type_): class MySQLCompiler(compiler.SQLCompiler): + dialect: MySQLDialect render_table_with_column_in_update_from = True """Overridden from base SQLCompiler value""" extract_map = compiler.SQLCompiler.extract_map.copy() extract_map.update({"milliseconds": "millisecond"}) - def default_from(self): + def default_from(self) -> str: """Called when a ``SELECT`` statement has no froms, and no ``FROM`` clause is to be appended. """ if self.stack: stmt = self.stack[-1]["selectable"] - if stmt._where_criteria: + if stmt._where_criteria: # type: ignore[attr-defined] return " FROM DUAL" return "" - def visit_random_func(self, fn, **kw): + def visit_random_func(self, fn: random, **kw: Any) -> str: return "rand%s" % self.function_argspec(fn) - def visit_rollup_func(self, fn, **kw): + def visit_rollup_func(self, fn: rollup[Any], **kw: Any) -> str: clause = ", ".join( elem._compiler_dispatch(self, **kw) for elem in fn.clauses ) return f"{clause} WITH ROLLUP" - def visit_aggregate_strings_func(self, fn, **kw): + def visit_aggregate_strings_func( + self, fn: aggregate_strings, **kw: Any + ) -> str: expr, delimeter = ( elem._compiler_dispatch(self, **kw) for elem in fn.clauses ) return f"group_concat({expr} SEPARATOR {delimeter})" - def visit_sequence(self, seq, **kw): - return "nextval(%s)" % self.preparer.format_sequence(seq) + def visit_sequence(self, sequence: sa_schema.Sequence, **kw: Any) -> str: + return "nextval(%s)" % self.preparer.format_sequence(sequence) - def visit_sysdate_func(self, fn, **kw): + def visit_sysdate_func(self, fn: sysdate, **kw: Any) -> str: return "SYSDATE()" - def _render_json_extract_from_binary(self, binary, operator, **kw): + def _render_json_extract_from_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: # note we are intentionally calling upon the process() calls in the # order in which they appear in the SQL String as this is used # by positional parameter rendering @@ -1344,9 +1398,10 @@ def _render_json_extract_from_binary(self, binary, operator, **kw): ) ) elif binary.type._type_affinity is sqltypes.Numeric: + binary_type = cast(sqltypes.Numeric[Any], binary.type) if ( - binary.type.scale is not None - and binary.type.precision is not None + binary_type.scale is not None + and binary_type.precision is not None ): # using DECIMAL here because MySQL does not recognize NUMERIC type_expression = ( @@ -1354,8 +1409,8 @@ def _render_json_extract_from_binary(self, binary, operator, **kw): % ( self.process(binary.left, **kw), self.process(binary.right, **kw), - binary.type.precision, - binary.type.scale, + binary_type.precision, + binary_type.scale, ) ) else: @@ -1389,15 +1444,22 @@ def _render_json_extract_from_binary(self, binary, operator, **kw): return case_expression + " " + type_expression + " END" - def visit_json_getitem_op_binary(self, binary, operator, **kw): + def visit_json_getitem_op_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: return self._render_json_extract_from_binary(binary, operator, **kw) - def visit_json_path_getitem_op_binary(self, binary, operator, **kw): + def visit_json_path_getitem_op_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: return self._render_json_extract_from_binary(binary, operator, **kw) - def visit_on_duplicate_key_update(self, on_duplicate, **kw): - statement = self.current_executable + def visit_on_duplicate_key_update( + self, on_duplicate: OnDuplicateClause, **kw: Any + ) -> str: + statement: ValuesBase = self.current_executable + cols: List[elements.KeyedColumnElement[Any]] if on_duplicate._parameter_ordering: parameter_ordering = [ coercions.expect(roles.DMLColumnRole, key) @@ -1410,7 +1472,7 @@ def visit_on_duplicate_key_update(self, on_duplicate, **kw): if key in statement.table.c ] + [c for c in statement.table.c if c.key not in ordered_keys] else: - cols = statement.table.c + cols = list(statement.table.c) clauses = [] @@ -1419,7 +1481,7 @@ def visit_on_duplicate_key_update(self, on_duplicate, **kw): ) if requires_mysql8_alias: - if statement.table.name.lower() == "new": + if statement.table.name.lower() == "new": # type: ignore[union-attr] # noqa: E501 _on_dup_alias_name = "new_1" else: _on_dup_alias_name = "new" @@ -1440,26 +1502,26 @@ def visit_on_duplicate_key_update(self, on_duplicate, **kw): value_text = self.process(val.self_group(), use_schema=False) else: - def replace(obj): + def replace( + element: ExternallyTraversible, **kw: Any + ) -> Optional[ExternallyTraversible]: if ( - isinstance(obj, elements.BindParameter) - and obj.type._isnull + isinstance(element, elements.BindParameter) + and element.type._isnull ): - obj = obj._clone() - obj.type = column.type - return obj + return element._with_binary_element_type(column.type) elif ( - isinstance(obj, elements.ColumnClause) - and obj.table is on_duplicate.inserted_alias + isinstance(element, elements.ColumnClause) + and element.table is on_duplicate.inserted_alias ): if requires_mysql8_alias: column_literal_clause = ( f"{_on_dup_alias_name}." - f"{self.preparer.quote(obj.name)}" + f"{self.preparer.quote(element.name)}" ) else: column_literal_clause = ( - f"VALUES({self.preparer.quote(obj.name)})" + f"VALUES({self.preparer.quote(element.name)})" ) return literal_column(column_literal_clause) else: @@ -1478,7 +1540,7 @@ def replace(obj): "Additional column names not matching " "any column keys in table '%s': %s" % ( - self.statement.table.name, + self.statement.table.name, # type: ignore[union-attr] (", ".join("'%s'" % c for c in non_matching)), ) ) @@ -1492,13 +1554,15 @@ def replace(obj): return f"ON DUPLICATE KEY UPDATE {', '.join(clauses)}" def visit_concat_op_expression_clauselist( - self, clauselist, operator, **kw - ): + self, clauselist: elements.ClauseList, operator: Any, **kw: Any + ) -> str: return "concat(%s)" % ( ", ".join(self.process(elem, **kw) for elem in clauselist.clauses) ) - def visit_concat_op_binary(self, binary, operator, **kw): + def visit_concat_op_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: return "concat(%s, %s)" % ( self.process(binary.left, **kw), self.process(binary.right, **kw), @@ -1521,10 +1585,12 @@ def visit_concat_op_binary(self, binary, operator, **kw): "WITH QUERY EXPANSION", ) - def visit_mysql_match(self, element, **kw): + def visit_mysql_match(self, element: expression.match, **kw: Any) -> str: return self.visit_match_op_binary(element, element.operator, **kw) - def visit_match_op_binary(self, binary, operator, **kw): + def visit_match_op_binary( + self, binary: expression.match, operator: Any, **kw: Any + ) -> str: """ Note that `mysql_boolean_mode` is enabled by default because of backward compatibility @@ -1545,12 +1611,11 @@ def visit_match_op_binary(self, binary, operator, **kw): "with_query_expansion=%s" % query_expansion, ) - flags = ", ".join(flags) + flags_str = ", ".join(flags) - raise exc.CompileError("Invalid MySQL match flags: %s" % flags) + raise exc.CompileError("Invalid MySQL match flags: %s" % flags_str) - match_clause = binary.left - match_clause = self.process(match_clause, **kw) + match_clause = self.process(binary.left, **kw) against_clause = self.process(binary.right, **kw) if any(flag_combination): @@ -1559,21 +1624,25 @@ def visit_match_op_binary(self, binary, operator, **kw): flag_combination, ) - against_clause = [against_clause] - against_clause.extend(flag_expressions) - - against_clause = " ".join(against_clause) + against_clause = " ".join([against_clause, *flag_expressions]) return "MATCH (%s) AGAINST (%s)" % (match_clause, against_clause) - def get_from_hint_text(self, table, text): + def get_from_hint_text( + self, table: selectable.FromClause, text: Optional[str] + ) -> Optional[str]: return text - def visit_typeclause(self, typeclause, type_=None, **kw): + def visit_typeclause( + self, + typeclause: elements.TypeClause, + type_: Optional[TypeEngine[Any]] = None, + **kw: Any, + ) -> Optional[str]: if type_ is None: type_ = typeclause.type.dialect_impl(self.dialect) if isinstance(type_, sqltypes.TypeDecorator): - return self.visit_typeclause(typeclause, type_.impl, **kw) + return self.visit_typeclause(typeclause, type_.impl, **kw) # type: ignore[arg-type] # noqa: E501 elif isinstance(type_, sqltypes.Integer): if getattr(type_, "unsigned", False): return "UNSIGNED INTEGER" @@ -1612,7 +1681,7 @@ def visit_typeclause(self, typeclause, type_=None, **kw): else: return None - def visit_cast(self, cast, **kw): + def visit_cast(self, cast: elements.Cast[Any], **kw: Any) -> str: type_ = self.process(cast.typeclause) if type_ is None: util.warn( @@ -1626,7 +1695,9 @@ def visit_cast(self, cast, **kw): return "CAST(%s AS %s)" % (self.process(cast.clause, **kw), type_) - def render_literal_value(self, value, type_): + def render_literal_value( + self, value: Optional[str], type_: TypeEngine[Any] + ) -> str: value = super().render_literal_value(value, type_) if self.dialect._backslash_escapes: value = value.replace("\\", "\\\\") @@ -1634,13 +1705,15 @@ def render_literal_value(self, value, type_): # override native_boolean=False behavior here, as # MySQL still supports native boolean - def visit_true(self, element, **kw): + def visit_true(self, expr: elements.True_, **kw: Any) -> str: return "true" - def visit_false(self, element, **kw): + def visit_false(self, expr: elements.False_, **kw: Any) -> str: return "false" - def get_select_precolumns(self, select, **kw): + def get_select_precolumns( + self, select: selectable.Select[Any], **kw: Any + ) -> str: """Add special MySQL keywords in place of DISTINCT. .. deprecated:: 1.4 This usage is deprecated. @@ -1660,7 +1733,13 @@ def get_select_precolumns(self, select, **kw): return super().get_select_precolumns(select, **kw) - def visit_join(self, join, asfrom=False, from_linter=None, **kwargs): + def visit_join( + self, + join: selectable.Join, + asfrom: bool = False, + from_linter: Optional[compiler.FromLinter] = None, + **kwargs: Any, + ) -> str: if from_linter: from_linter.edges.add((join.left, join.right)) @@ -1681,18 +1760,21 @@ def visit_join(self, join, asfrom=False, from_linter=None, **kwargs): join.right, asfrom=True, from_linter=from_linter, **kwargs ), " ON ", - self.process(join.onclause, from_linter=from_linter, **kwargs), + self.process(join.onclause, from_linter=from_linter, **kwargs), # type: ignore[arg-type] # noqa: E501 ) ) - def for_update_clause(self, select, **kw): + def for_update_clause( + self, select: selectable.GenerativeSelect, **kw: Any + ) -> str: + assert select._for_update_arg is not None if select._for_update_arg.read: tmp = " LOCK IN SHARE MODE" else: tmp = " FOR UPDATE" if select._for_update_arg.of and self.dialect.supports_for_update_of: - tables = util.OrderedSet() + tables: util.OrderedSet[elements.ClauseElement] = util.OrderedSet() for c in select._for_update_arg.of: tables.update(sql_util.surface_selectables_only(c)) @@ -1709,7 +1791,9 @@ def for_update_clause(self, select, **kw): return tmp - def limit_clause(self, select, **kw): + def limit_clause( + self, select: selectable.GenerativeSelect, **kw: Any + ) -> str: # MySQL supports: # LIMIT # LIMIT , @@ -1745,24 +1829,31 @@ def limit_clause(self, select, **kw): self.process(limit_clause, **kw), ) else: + assert limit_clause is not None # No offset provided, so just use the limit return " \n LIMIT %s" % (self.process(limit_clause, **kw),) - def update_limit_clause(self, update_stmt): + def update_limit_clause(self, update_stmt: Update) -> Optional[str]: limit = update_stmt.kwargs.get("%s_limit" % self.dialect.name, None) if limit is not None: return f"LIMIT {int(limit)}" else: return None - def delete_limit_clause(self, delete_stmt): + def delete_limit_clause(self, delete_stmt: Delete) -> Optional[str]: limit = delete_stmt.kwargs.get("%s_limit" % self.dialect.name, None) if limit is not None: return f"LIMIT {int(limit)}" else: return None - def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): + def update_tables_clause( + self, + update_stmt: Update, + from_table: _DMLTableElement, + extra_froms: List[selectable.FromClause], + **kw: Any, + ) -> str: kw["asfrom"] = True return ", ".join( t._compiler_dispatch(self, **kw) @@ -1770,11 +1861,22 @@ def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ) def update_from_clause( - self, update_stmt, from_table, extra_froms, from_hints, **kw - ): + self, + update_stmt: Update, + from_table: _DMLTableElement, + extra_froms: List[selectable.FromClause], + from_hints: Any, + **kw: Any, + ) -> None: return None - def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw): + def delete_table_clause( + self, + delete_stmt: Delete, + from_table: _DMLTableElement, + extra_froms: List[selectable.FromClause], + **kw: Any, + ) -> str: """If we have extra froms make sure we render any alias as hint.""" ashint = False if extra_froms: @@ -1784,8 +1886,13 @@ def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw): ) def delete_extra_from_clause( - self, delete_stmt, from_table, extra_froms, from_hints, **kw - ): + self, + delete_stmt: Delete, + from_table: _DMLTableElement, + extra_froms: List[selectable.FromClause], + from_hints: Any, + **kw: Any, + ) -> str: """Render the DELETE .. USING clause specific to MySQL.""" kw["asfrom"] = True return "USING " + ", ".join( @@ -1793,7 +1900,9 @@ def delete_extra_from_clause( for t in [from_table] + extra_froms ) - def visit_empty_set_expr(self, element_types, **kw): + def visit_empty_set_expr( + self, element_types: List[TypeEngine[Any]], **kw: Any + ) -> str: return ( "SELECT %(outer)s FROM (SELECT %(inner)s) " "as _empty_set WHERE 1!=1" @@ -1808,25 +1917,38 @@ def visit_empty_set_expr(self, element_types, **kw): } ) - def visit_is_distinct_from_binary(self, binary, operator, **kw): + def visit_is_distinct_from_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: return "NOT (%s <=> %s)" % ( self.process(binary.left), self.process(binary.right), ) - def visit_is_not_distinct_from_binary(self, binary, operator, **kw): + def visit_is_not_distinct_from_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: return "%s <=> %s" % ( self.process(binary.left), self.process(binary.right), ) - def _mariadb_regexp_flags(self, flags, pattern, **kw): + def _mariadb_regexp_flags( + self, flags: str, pattern: elements.ColumnElement[Any], **kw: Any + ) -> str: return "CONCAT('(?', %s, ')', %s)" % ( self.render_literal_value(flags, sqltypes.STRINGTYPE), self.process(pattern, **kw), ) - def _regexp_match(self, op_string, binary, operator, **kw): + def _regexp_match( + self, + op_string: str, + binary: elements.BinaryExpression[Any], + operator: Any, + **kw: Any, + ) -> str: + assert binary.modifiers is not None flags = binary.modifiers["flags"] if flags is None: return self._generate_generic_binary(binary, op_string, **kw) @@ -1847,13 +1969,20 @@ def _regexp_match(self, op_string, binary, operator, **kw): else: return text - def visit_regexp_match_op_binary(self, binary, operator, **kw): + def visit_regexp_match_op_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: return self._regexp_match(" REGEXP ", binary, operator, **kw) - def visit_not_regexp_match_op_binary(self, binary, operator, **kw): + def visit_not_regexp_match_op_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: return self._regexp_match(" NOT REGEXP ", binary, operator, **kw) - def visit_regexp_replace_op_binary(self, binary, operator, **kw): + def visit_regexp_replace_op_binary( + self, binary: elements.BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: + assert binary.modifiers is not None flags = binary.modifiers["flags"] if flags is None: return "REGEXP_REPLACE(%s, %s)" % ( @@ -1875,7 +2004,11 @@ def visit_regexp_replace_op_binary(self, binary, operator, **kw): class MySQLDDLCompiler(compiler.DDLCompiler): - def get_column_specification(self, column, **kw): + dialect: MySQLDialect + + def get_column_specification( + self, column: sa_schema.Column[Any], **kw: Any + ) -> str: """Builds column DDL.""" if ( self.dialect.is_mariadb is True @@ -1941,7 +2074,7 @@ def get_column_specification(self, column, **kw): colspec.append("DEFAULT " + default) return " ".join(colspec) - def post_create_table(self, table): + def post_create_table(self, table: sa_schema.Table) -> str: """Build table-level CREATE options like ENGINE and COLLATE.""" table_opts = [] @@ -2025,16 +2158,16 @@ def post_create_table(self, table): return " ".join(table_opts) - def visit_create_index(self, create, **kw): + def visit_create_index(self, create: ddl.CreateIndex, **kw: Any) -> str: # type: ignore[override] # noqa: E501 index = create.element self._verify_index_table(index) preparer = self.preparer - table = preparer.format_table(index.table) + table = preparer.format_table(index.table) # type: ignore[arg-type] columns = [ self.sql_compiler.process( ( - elements.Grouping(expr) + elements.Grouping(expr) # type: ignore[arg-type] if ( isinstance(expr, elements.BinaryExpression) or ( @@ -2073,10 +2206,10 @@ def visit_create_index(self, create, **kw): # length value can be a (column_name --> integer value) # mapping specifying the prefix length for each column of the # index - columns = ", ".join( + columns_str = ", ".join( ( - "%s(%d)" % (expr, length[col.name]) - if col.name in length + "%s(%d)" % (expr, length[col.name]) # type: ignore[union-attr] # noqa: E501 + if col.name in length # type: ignore[union-attr] else ( "%s(%d)" % (expr, length[expr]) if expr in length @@ -2088,12 +2221,12 @@ def visit_create_index(self, create, **kw): else: # or can be an integer value specifying the same # prefix length for all columns of the index - columns = ", ".join( + columns_str = ", ".join( "%s(%d)" % (col, length) for col in columns ) else: - columns = ", ".join(columns) - text += "(%s)" % columns + columns_str = ", ".join(columns) + text += "(%s)" % columns_str parser = index.dialect_options["mysql"]["with_parser"] if parser is not None: @@ -2105,14 +2238,16 @@ def visit_create_index(self, create, **kw): return text - def visit_primary_key_constraint(self, constraint, **kw): + def visit_primary_key_constraint( + self, constraint: sa_schema.PrimaryKeyConstraint, **kw: Any + ) -> str: text = super().visit_primary_key_constraint(constraint) using = constraint.dialect_options["mysql"]["using"] if using: text += " USING %s" % (self.preparer.quote(using)) return text - def visit_drop_index(self, drop, **kw): + def visit_drop_index(self, drop: ddl.DropIndex, **kw: Any) -> str: index = drop.element text = "\nDROP INDEX " if drop.if_exists: @@ -2120,10 +2255,12 @@ def visit_drop_index(self, drop, **kw): return text + "%s ON %s" % ( self._prepared_index_name(index, include_schema=False), - self.preparer.format_table(index.table), + self.preparer.format_table(index.table), # type: ignore[arg-type] ) - def visit_drop_constraint(self, drop, **kw): + def visit_drop_constraint( + self, drop: ddl.DropConstraint, **kw: Any + ) -> str: constraint = drop.element if isinstance(constraint, sa_schema.ForeignKeyConstraint): qual = "FOREIGN KEY " @@ -2149,7 +2286,9 @@ def visit_drop_constraint(self, drop, **kw): const, ) - def define_constraint_match(self, constraint): + def define_constraint_match( + self, constraint: sa_schema.ForeignKeyConstraint + ) -> str: if constraint.match is not None: raise exc.CompileError( "MySQL ignores the 'MATCH' keyword while at the same time " @@ -2157,7 +2296,9 @@ def define_constraint_match(self, constraint): ) return "" - def visit_set_table_comment(self, create, **kw): + def visit_set_table_comment( + self, create: ddl.SetTableComment, **kw: Any + ) -> str: return "ALTER TABLE %s COMMENT %s" % ( self.preparer.format_table(create.element), self.sql_compiler.render_literal_value( @@ -2165,12 +2306,16 @@ def visit_set_table_comment(self, create, **kw): ), ) - def visit_drop_table_comment(self, create, **kw): + def visit_drop_table_comment( + self, drop: ddl.DropTableComment, **kw: Any + ) -> str: return "ALTER TABLE %s COMMENT ''" % ( - self.preparer.format_table(create.element) + self.preparer.format_table(drop.element) ) - def visit_set_column_comment(self, create, **kw): + def visit_set_column_comment( + self, create: ddl.SetColumnComment, **kw: Any + ) -> str: return "ALTER TABLE %s CHANGE %s %s" % ( self.preparer.format_table(create.element.table), self.preparer.format_column(create.element), @@ -2179,7 +2324,7 @@ def visit_set_column_comment(self, create, **kw): class MySQLTypeCompiler(compiler.GenericTypeCompiler): - def _extend_numeric(self, type_, spec): + def _extend_numeric(self, type_: _NumericType, spec: str) -> str: "Extend a numeric-type declaration with MySQL specific extensions." if not self._mysql_type(type_): @@ -2191,13 +2336,15 @@ def _extend_numeric(self, type_, spec): spec += " ZEROFILL" return spec - def _extend_string(self, type_, defaults, spec): + def _extend_string( + self, type_: _StringType, defaults: Dict[str, Any], spec: str + ) -> str: """Extend a string-type declaration with standard SQL CHARACTER SET / COLLATE annotations and MySQL specific extensions. """ - def attr(name): + def attr(name: str) -> Any: return getattr(type_, name, defaults.get(name)) if attr("charset"): @@ -2207,6 +2354,7 @@ def attr(name): elif attr("unicode"): charset = "UNICODE" else: + charset = None if attr("collation"): @@ -2225,10 +2373,10 @@ def attr(name): [c for c in (spec, charset, collation) if c is not None] ) - def _mysql_type(self, type_): + def _mysql_type(self, type_: Any) -> bool: return isinstance(type_, (_StringType, _NumericType)) - def visit_NUMERIC(self, type_, **kw): + def visit_NUMERIC(self, type_: NUMERIC, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if type_.precision is None: return self._extend_numeric(type_, "NUMERIC") elif type_.scale is None: @@ -2243,7 +2391,7 @@ def visit_NUMERIC(self, type_, **kw): % {"precision": type_.precision, "scale": type_.scale}, ) - def visit_DECIMAL(self, type_, **kw): + def visit_DECIMAL(self, type_: DECIMAL, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if type_.precision is None: return self._extend_numeric(type_, "DECIMAL") elif type_.scale is None: @@ -2258,7 +2406,7 @@ def visit_DECIMAL(self, type_, **kw): % {"precision": type_.precision, "scale": type_.scale}, ) - def visit_DOUBLE(self, type_, **kw): + def visit_DOUBLE(self, type_: DOUBLE, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if type_.precision is not None and type_.scale is not None: return self._extend_numeric( type_, @@ -2268,7 +2416,7 @@ def visit_DOUBLE(self, type_, **kw): else: return self._extend_numeric(type_, "DOUBLE") - def visit_REAL(self, type_, **kw): + def visit_REAL(self, type_: REAL, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if type_.precision is not None and type_.scale is not None: return self._extend_numeric( type_, @@ -2278,7 +2426,7 @@ def visit_REAL(self, type_, **kw): else: return self._extend_numeric(type_, "REAL") - def visit_FLOAT(self, type_, **kw): + def visit_FLOAT(self, type_: FLOAT, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if ( self._mysql_type(type_) and type_.scale is not None @@ -2294,7 +2442,7 @@ def visit_FLOAT(self, type_, **kw): else: return self._extend_numeric(type_, "FLOAT") - def visit_INTEGER(self, type_, **kw): + def visit_INTEGER(self, type_: INTEGER, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric( type_, @@ -2304,7 +2452,7 @@ def visit_INTEGER(self, type_, **kw): else: return self._extend_numeric(type_, "INTEGER") - def visit_BIGINT(self, type_, **kw): + def visit_BIGINT(self, type_: BIGINT, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric( type_, @@ -2314,7 +2462,7 @@ def visit_BIGINT(self, type_, **kw): else: return self._extend_numeric(type_, "BIGINT") - def visit_MEDIUMINT(self, type_, **kw): + def visit_MEDIUMINT(self, type_: MEDIUMINT, **kw: Any) -> str: if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric( type_, @@ -2324,7 +2472,7 @@ def visit_MEDIUMINT(self, type_, **kw): else: return self._extend_numeric(type_, "MEDIUMINT") - def visit_TINYINT(self, type_, **kw): + def visit_TINYINT(self, type_: TINYINT, **kw: Any) -> str: if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric( type_, "TINYINT(%s)" % type_.display_width @@ -2332,7 +2480,7 @@ def visit_TINYINT(self, type_, **kw): else: return self._extend_numeric(type_, "TINYINT") - def visit_SMALLINT(self, type_, **kw): + def visit_SMALLINT(self, type_: SMALLINT, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric( type_, @@ -2342,55 +2490,55 @@ def visit_SMALLINT(self, type_, **kw): else: return self._extend_numeric(type_, "SMALLINT") - def visit_BIT(self, type_, **kw): + def visit_BIT(self, type_: BIT, **kw: Any) -> str: if type_.length is not None: return "BIT(%s)" % type_.length else: return "BIT" - def visit_DATETIME(self, type_, **kw): + def visit_DATETIME(self, type_: DATETIME, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if getattr(type_, "fsp", None): - return "DATETIME(%d)" % type_.fsp + return "DATETIME(%d)" % type_.fsp # type: ignore[str-format] else: return "DATETIME" - def visit_DATE(self, type_, **kw): + def visit_DATE(self, type_: DATE, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 return "DATE" - def visit_TIME(self, type_, **kw): + def visit_TIME(self, type_: TIME, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if getattr(type_, "fsp", None): - return "TIME(%d)" % type_.fsp + return "TIME(%d)" % type_.fsp # type: ignore[str-format] else: return "TIME" - def visit_TIMESTAMP(self, type_, **kw): + def visit_TIMESTAMP(self, type_: TIMESTAMP, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if getattr(type_, "fsp", None): - return "TIMESTAMP(%d)" % type_.fsp + return "TIMESTAMP(%d)" % type_.fsp # type: ignore[str-format] else: return "TIMESTAMP" - def visit_YEAR(self, type_, **kw): + def visit_YEAR(self, type_: YEAR, **kw: Any) -> str: if type_.display_width is None: return "YEAR" else: return "YEAR(%s)" % type_.display_width - def visit_TEXT(self, type_, **kw): + def visit_TEXT(self, type_: TEXT, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if type_.length is not None: return self._extend_string(type_, {}, "TEXT(%d)" % type_.length) else: return self._extend_string(type_, {}, "TEXT") - def visit_TINYTEXT(self, type_, **kw): + def visit_TINYTEXT(self, type_: TINYTEXT, **kw: Any) -> str: return self._extend_string(type_, {}, "TINYTEXT") - def visit_MEDIUMTEXT(self, type_, **kw): + def visit_MEDIUMTEXT(self, type_: MEDIUMTEXT, **kw: Any) -> str: return self._extend_string(type_, {}, "MEDIUMTEXT") - def visit_LONGTEXT(self, type_, **kw): + def visit_LONGTEXT(self, type_: LONGTEXT, **kw: Any) -> str: return self._extend_string(type_, {}, "LONGTEXT") - def visit_VARCHAR(self, type_, **kw): + def visit_VARCHAR(self, type_: VARCHAR, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if type_.length is not None: return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length) else: @@ -2398,7 +2546,7 @@ def visit_VARCHAR(self, type_, **kw): "VARCHAR requires a length on dialect %s" % self.dialect.name ) - def visit_CHAR(self, type_, **kw): + def visit_CHAR(self, type_: CHAR, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if type_.length is not None: return self._extend_string( type_, {}, "CHAR(%(length)s)" % {"length": type_.length} @@ -2406,7 +2554,7 @@ def visit_CHAR(self, type_, **kw): else: return self._extend_string(type_, {}, "CHAR") - def visit_NVARCHAR(self, type_, **kw): + def visit_NVARCHAR(self, type_: NVARCHAR, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 # We'll actually generate the equiv. "NATIONAL VARCHAR" instead # of "NVARCHAR". if type_.length is not None: @@ -2420,7 +2568,7 @@ def visit_NVARCHAR(self, type_, **kw): "NVARCHAR requires a length on dialect %s" % self.dialect.name ) - def visit_NCHAR(self, type_, **kw): + def visit_NCHAR(self, type_: NCHAR, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 # We'll actually generate the equiv. # "NATIONAL CHAR" instead of "NCHAR". if type_.length is not None: @@ -2432,40 +2580,42 @@ def visit_NCHAR(self, type_, **kw): else: return self._extend_string(type_, {"national": True}, "CHAR") - def visit_UUID(self, type_, **kw): + def visit_UUID(self, type_: UUID[Any], **kw: Any) -> str: # type: ignore[override] # NOQA: E501 return "UUID" - def visit_VARBINARY(self, type_, **kw): - return "VARBINARY(%d)" % type_.length + def visit_VARBINARY(self, type_: VARBINARY, **kw: Any) -> str: + return "VARBINARY(%d)" % type_.length # type: ignore[str-format] - def visit_JSON(self, type_, **kw): + def visit_JSON(self, type_: JSON, **kw: Any) -> str: return "JSON" - def visit_large_binary(self, type_, **kw): + def visit_large_binary(self, type_: LargeBinary, **kw: Any) -> str: return self.visit_BLOB(type_) - def visit_enum(self, type_, **kw): + def visit_enum(self, type_: ENUM, **kw: Any) -> str: # type: ignore[override] # NOQA: E501 if not type_.native_enum: return super().visit_enum(type_) else: return self._visit_enumerated_values("ENUM", type_, type_.enums) - def visit_BLOB(self, type_, **kw): + def visit_BLOB(self, type_: LargeBinary, **kw: Any) -> str: if type_.length is not None: return "BLOB(%d)" % type_.length else: return "BLOB" - def visit_TINYBLOB(self, type_, **kw): + def visit_TINYBLOB(self, type_: TINYBLOB, **kw: Any) -> str: return "TINYBLOB" - def visit_MEDIUMBLOB(self, type_, **kw): + def visit_MEDIUMBLOB(self, type_: MEDIUMBLOB, **kw: Any) -> str: return "MEDIUMBLOB" - def visit_LONGBLOB(self, type_, **kw): + def visit_LONGBLOB(self, type_: LONGBLOB, **kw: Any) -> str: return "LONGBLOB" - def _visit_enumerated_values(self, name, type_, enumerated_values): + def _visit_enumerated_values( + self, name: str, type_: _StringType, enumerated_values: Sequence[str] + ) -> str: quoted_enums = [] for e in enumerated_values: if self.dialect.identifier_preparer._double_percents: @@ -2475,20 +2625,25 @@ def _visit_enumerated_values(self, name, type_, enumerated_values): type_, {}, "%s(%s)" % (name, ",".join(quoted_enums)) ) - def visit_ENUM(self, type_, **kw): + def visit_ENUM(self, type_: ENUM, **kw: Any) -> str: return self._visit_enumerated_values("ENUM", type_, type_.enums) - def visit_SET(self, type_, **kw): + def visit_SET(self, type_: SET, **kw: Any) -> str: return self._visit_enumerated_values("SET", type_, type_.values) - def visit_BOOLEAN(self, type_, **kw): + def visit_BOOLEAN(self, type_: sqltypes.Boolean, **kw: Any) -> str: return "BOOL" class MySQLIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = RESERVED_WORDS_MYSQL - def __init__(self, dialect, server_ansiquotes=False, **kw): + def __init__( + self, + dialect: default.DefaultDialect, + server_ansiquotes: bool = False, + **kw: Any, + ): if not server_ansiquotes: quote = "`" else: @@ -2496,7 +2651,7 @@ def __init__(self, dialect, server_ansiquotes=False, **kw): super().__init__(dialect, initial_quote=quote, escape_quote=quote) - def _quote_free_identifiers(self, *ids): + def _quote_free_identifiers(self, *ids: Optional[str]) -> Tuple[str, ...]: """Unilaterally identifier-quote any number of strings.""" return tuple([self.quote_identifier(i) for i in ids if i is not None]) @@ -2506,7 +2661,6 @@ class MariaDBIdentifierPreparer(MySQLIdentifierPreparer): reserved_words = RESERVED_WORDS_MARIADB -@log.class_logger class MySQLDialect(default.DefaultDialect): """Details of the MySQL dialect. Not used directly in application code. @@ -2573,9 +2727,9 @@ class MySQLDialect(default.DefaultDialect): ddl_compiler = MySQLDDLCompiler type_compiler_cls = MySQLTypeCompiler ischema_names = ischema_names - preparer = MySQLIdentifierPreparer + preparer: type[MySQLIdentifierPreparer] = MySQLIdentifierPreparer - is_mariadb = False + is_mariadb: bool = False _mariadb_normalized_version_info = None # default SQL compilation settings - @@ -2584,6 +2738,9 @@ class MySQLDialect(default.DefaultDialect): _backslash_escapes = True _server_ansiquotes = False + server_version_info: Tuple[int, ...] + identifier_preparer: MySQLIdentifierPreparer + construct_arguments = [ (sa_schema.Table, {"*": None}), (sql.Update, {"limit": None}), @@ -2602,18 +2759,20 @@ class MySQLDialect(default.DefaultDialect): def __init__( self, - json_serializer=None, - json_deserializer=None, - is_mariadb=None, - **kwargs, - ): + json_serializer: Optional[Callable[..., Any]] = None, + json_deserializer: Optional[Callable[..., Any]] = None, + is_mariadb: Optional[bool] = None, + **kwargs: Any, + ) -> None: kwargs.pop("use_ansiquotes", None) # legacy default.DefaultDialect.__init__(self, **kwargs) self._json_serializer = json_serializer self._json_deserializer = json_deserializer - self._set_mariadb(is_mariadb, None) + self._set_mariadb(is_mariadb, ()) - def get_isolation_level_values(self, dbapi_conn): + def get_isolation_level_values( + self, dbapi_conn: DBAPIConnection + ) -> Sequence[IsolationLevel]: return ( "SERIALIZABLE", "READ UNCOMMITTED", @@ -2621,13 +2780,17 @@ def get_isolation_level_values(self, dbapi_conn): "REPEATABLE READ", ) - def set_isolation_level(self, dbapi_connection, level): + def set_isolation_level( + self, dbapi_connection: DBAPIConnection, level: IsolationLevel + ) -> None: cursor = dbapi_connection.cursor() cursor.execute(f"SET SESSION TRANSACTION ISOLATION LEVEL {level}") cursor.execute("COMMIT") cursor.close() - def get_isolation_level(self, dbapi_connection): + def get_isolation_level( + self, dbapi_connection: DBAPIConnection + ) -> IsolationLevel: cursor = dbapi_connection.cursor() if self._is_mysql and self.server_version_info >= (5, 7, 20): cursor.execute("SELECT @@transaction_isolation") @@ -2644,10 +2807,10 @@ def get_isolation_level(self, dbapi_connection): cursor.close() if isinstance(val, bytes): val = val.decode() - return val.upper().replace("-", " ") + return val.upper().replace("-", " ") # type: ignore[no-any-return] @classmethod - def _is_mariadb_from_url(cls, url): + def _is_mariadb_from_url(cls, url: URL) -> bool: dbapi = cls.import_dbapi() dialect = cls(dbapi=dbapi) @@ -2656,7 +2819,7 @@ def _is_mariadb_from_url(cls, url): try: cursor = conn.cursor() cursor.execute("SELECT VERSION() LIKE '%MariaDB%'") - val = cursor.fetchone()[0] + val = cursor.fetchone()[0] # type: ignore[index] except: raise else: @@ -2664,22 +2827,25 @@ def _is_mariadb_from_url(cls, url): finally: conn.close() - def _get_server_version_info(self, connection): + def _get_server_version_info( + self, connection: Connection + ) -> Tuple[int, ...]: # get database server version info explicitly over the wire # to avoid proxy servers like MaxScale getting in the # way with their own values, see #4205 dbapi_con = connection.connection cursor = dbapi_con.cursor() cursor.execute("SELECT VERSION()") - val = cursor.fetchone()[0] + + val = cursor.fetchone()[0] # type: ignore[index] cursor.close() if isinstance(val, bytes): val = val.decode() return self._parse_server_version(val) - def _parse_server_version(self, val): - version = [] + def _parse_server_version(self, val: str) -> Tuple[int, ...]: + version: List[int] = [] is_mariadb = False r = re.compile(r"[.\-+]") @@ -2700,7 +2866,7 @@ def _parse_server_version(self, val): server_version_info = tuple(version) self._set_mariadb( - server_version_info and is_mariadb, server_version_info + bool(server_version_info and is_mariadb), server_version_info ) if not is_mariadb: @@ -2716,7 +2882,9 @@ def _parse_server_version(self, val): self.server_version_info = server_version_info return server_version_info - def _set_mariadb(self, is_mariadb, server_version_info): + def _set_mariadb( + self, is_mariadb: Optional[bool], server_version_info: Tuple[int, ...] + ) -> None: if is_mariadb is None: return @@ -2740,38 +2908,54 @@ def _set_mariadb(self, is_mariadb, server_version_info): self.is_mariadb = is_mariadb - def do_begin_twophase(self, connection, xid): + def do_begin_twophase(self, connection: Connection, xid: Any) -> None: connection.execute(sql.text("XA BEGIN :xid"), dict(xid=xid)) - def do_prepare_twophase(self, connection, xid): + def do_prepare_twophase(self, connection: Connection, xid: Any) -> None: connection.execute(sql.text("XA END :xid"), dict(xid=xid)) connection.execute(sql.text("XA PREPARE :xid"), dict(xid=xid)) def do_rollback_twophase( - self, connection, xid, is_prepared=True, recover=False - ): + self, + connection: Connection, + xid: Any, + is_prepared: bool = True, + recover: bool = False, + ) -> None: if not is_prepared: connection.execute(sql.text("XA END :xid"), dict(xid=xid)) connection.execute(sql.text("XA ROLLBACK :xid"), dict(xid=xid)) def do_commit_twophase( - self, connection, xid, is_prepared=True, recover=False - ): + self, + connection: Connection, + xid: Any, + is_prepared: bool = True, + recover: bool = False, + ) -> None: if not is_prepared: self.do_prepare_twophase(connection, xid) connection.execute(sql.text("XA COMMIT :xid"), dict(xid=xid)) - def do_recover_twophase(self, connection): + def do_recover_twophase(self, connection: Connection) -> List[Any]: resultset = connection.exec_driver_sql("XA RECOVER") - return [row["data"][0 : row["gtrid_length"]] for row in resultset] + return [ + row["data"][0 : row["gtrid_length"]] + for row in resultset.mappings() + ] - def is_disconnect(self, e, connection, cursor): + def is_disconnect( + self, + e: DBAPIModule.Error, + connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], + cursor: Optional[DBAPICursor], + ) -> bool: if isinstance( e, ( - self.dbapi.OperationalError, - self.dbapi.ProgrammingError, - self.dbapi.InterfaceError, + self.dbapi.OperationalError, # type: ignore + self.dbapi.ProgrammingError, # type: ignore + self.dbapi.InterfaceError, # type: ignore ), ) and self._extract_error_code(e) in ( 1927, @@ -2784,7 +2968,7 @@ def is_disconnect(self, e, connection, cursor): ): return True elif isinstance( - e, (self.dbapi.InterfaceError, self.dbapi.InternalError) + e, (self.dbapi.InterfaceError, self.dbapi.InternalError) # type: ignore # noqa: E501 ): # if underlying connection is closed, # this is the error you get @@ -2792,13 +2976,17 @@ def is_disconnect(self, e, connection, cursor): else: return False - def _compat_fetchall(self, rp, charset=None): + def _compat_fetchall( + self, rp: CursorResult[Any], charset: Optional[str] = None + ) -> Union[Sequence[Row[Any]], Sequence[_DecodingRow]]: """Proxy result rows to smooth over MySQL-Python driver inconsistencies.""" return [_DecodingRow(row, charset) for row in rp.fetchall()] - def _compat_fetchone(self, rp, charset=None): + def _compat_fetchone( + self, rp: CursorResult[Any], charset: Optional[str] = None + ) -> Union[Row[Any], None, _DecodingRow]: """Proxy a result row to smooth over MySQL-Python driver inconsistencies.""" @@ -2808,7 +2996,9 @@ def _compat_fetchone(self, rp, charset=None): else: return None - def _compat_first(self, rp, charset=None): + def _compat_first( + self, rp: CursorResult[Any], charset: Optional[str] = None + ) -> Optional[_DecodingRow]: """Proxy a result row to smooth over MySQL-Python driver inconsistencies.""" @@ -2818,14 +3008,22 @@ def _compat_first(self, rp, charset=None): else: return None - def _extract_error_code(self, exception): + def _extract_error_code( + self, exception: DBAPIModule.Error + ) -> Optional[int]: raise NotImplementedError() - def _get_default_schema_name(self, connection): - return connection.exec_driver_sql("SELECT DATABASE()").scalar() + def _get_default_schema_name(self, connection: Connection) -> str: + return connection.exec_driver_sql("SELECT DATABASE()").scalar() # type: ignore[return-value] # noqa: E501 @reflection.cache - def has_table(self, connection, table_name, schema=None, **kw): + def has_table( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> bool: self._ensure_has_table_connection(connection) if schema is None: @@ -2866,12 +3064,18 @@ def has_table(self, connection, table_name, schema=None, **kw): # # there's more "doesn't exist" kinds of messages but they are # less clear if mysql 8 would suddenly start using one of those - if self._extract_error_code(e.orig) in (1146, 1049, 1051): + if self._extract_error_code(e.orig) in (1146, 1049, 1051): # type: ignore # noqa: E501 return False raise @reflection.cache - def has_sequence(self, connection, sequence_name, schema=None, **kw): + def has_sequence( + self, + connection: Connection, + sequence_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> bool: if not self.supports_sequences: self._sequences_not_supported() if not schema: @@ -2891,14 +3095,16 @@ def has_sequence(self, connection, sequence_name, schema=None, **kw): ) return cursor.first() is not None - def _sequences_not_supported(self): + def _sequences_not_supported(self) -> NoReturn: raise NotImplementedError( "Sequences are supported only by the " "MariaDB series 10.3 or greater" ) @reflection.cache - def get_sequence_names(self, connection, schema=None, **kw): + def get_sequence_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: if not self.supports_sequences: self._sequences_not_supported() if not schema: @@ -2918,10 +3124,12 @@ def get_sequence_names(self, connection, schema=None, **kw): ) ] - def initialize(self, connection): + def initialize(self, connection: Connection) -> None: # this is driver-based, does not need server version info # and is fairly critical for even basic SQL operations - self._connection_charset = self._detect_charset(connection) + self._connection_charset: Optional[str] = self._detect_charset( + connection + ) # call super().initialize() because we need to have # server_version_info set up. in 1.4 under python 2 only this does the @@ -2965,9 +3173,10 @@ def initialize(self, connection): self._warn_for_known_db_issues() - def _warn_for_known_db_issues(self): + def _warn_for_known_db_issues(self) -> None: if self.is_mariadb: mdb_version = self._mariadb_normalized_version_info + assert mdb_version is not None if mdb_version > (10, 2) and mdb_version < (10, 2, 9): util.warn( "MariaDB %r before 10.2.9 has known issues regarding " @@ -2980,7 +3189,7 @@ def _warn_for_known_db_issues(self): ) @property - def _support_float_cast(self): + def _support_float_cast(self) -> bool: if not self.server_version_info: return False elif self.is_mariadb: @@ -2991,7 +3200,7 @@ def _support_float_cast(self): return self.server_version_info >= (8, 0, 17) @property - def _support_default_function(self): + def _support_default_function(self) -> bool: if not self.server_version_info: return False elif self.is_mariadb: @@ -3002,32 +3211,38 @@ def _support_default_function(self): return self.server_version_info >= (8, 0, 13) @property - def _is_mariadb(self): + def _is_mariadb(self) -> bool: return self.is_mariadb @property - def _is_mysql(self): + def _is_mysql(self) -> bool: return not self.is_mariadb @property - def _is_mariadb_102(self): - return self.is_mariadb and self._mariadb_normalized_version_info > ( - 10, - 2, + def _is_mariadb_102(self) -> bool: + return ( + self.is_mariadb + and self._mariadb_normalized_version_info # type:ignore[operator] + > ( + 10, + 2, + ) ) @reflection.cache - def get_schema_names(self, connection, **kw): + def get_schema_names(self, connection: Connection, **kw: Any) -> List[str]: rp = connection.exec_driver_sql("SHOW schemas") return [r[0] for r in rp] @reflection.cache - def get_table_names(self, connection, schema=None, **kw): + def get_table_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: """Return a Unicode SHOW TABLES from a given schema.""" if schema is not None: - current_schema = schema + current_schema: str = schema else: - current_schema = self.default_schema_name + current_schema = self.default_schema_name # type: ignore charset = self._connection_charset @@ -3043,9 +3258,12 @@ def get_table_names(self, connection, schema=None, **kw): ] @reflection.cache - def get_view_names(self, connection, schema=None, **kw): + def get_view_names( + self, connection: Connection, schema: Optional[str] = None, **kw: Any + ) -> List[str]: if schema is None: schema = self.default_schema_name + assert schema is not None charset = self._connection_charset rp = connection.exec_driver_sql( "SHOW FULL TABLES FROM %s" @@ -3058,7 +3276,13 @@ def get_view_names(self, connection, schema=None, **kw): ] @reflection.cache - def get_table_options(self, connection, table_name, schema=None, **kw): + def get_table_options( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> Dict[str, Any]: parsed_state = self._parsed_state_or_create( connection, table_name, schema, **kw ) @@ -3068,7 +3292,13 @@ def get_table_options(self, connection, table_name, schema=None, **kw): return ReflectionDefaults.table_options() @reflection.cache - def get_columns(self, connection, table_name, schema=None, **kw): + def get_columns( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedColumn]: parsed_state = self._parsed_state_or_create( connection, table_name, schema, **kw ) @@ -3078,7 +3308,13 @@ def get_columns(self, connection, table_name, schema=None, **kw): return ReflectionDefaults.columns() @reflection.cache - def get_pk_constraint(self, connection, table_name, schema=None, **kw): + def get_pk_constraint( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> ReflectedPrimaryKeyConstraint: parsed_state = self._parsed_state_or_create( connection, table_name, schema, **kw ) @@ -3090,13 +3326,19 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): return ReflectionDefaults.pk_constraint() @reflection.cache - def get_foreign_keys(self, connection, table_name, schema=None, **kw): + def get_foreign_keys( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedForeignKeyConstraint]: parsed_state = self._parsed_state_or_create( connection, table_name, schema, **kw ) default_schema = None - fkeys = [] + fkeys: List[ReflectedForeignKeyConstraint] = [] for spec in parsed_state.fk_constraints: ref_name = spec["table"][-1] @@ -3116,7 +3358,7 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): if spec.get(opt, False) not in ("NO ACTION", None): con_kw[opt] = spec[opt] - fkey_d = { + fkey_d: ReflectedForeignKeyConstraint = { "name": spec["name"], "constrained_columns": loc_names, "referred_schema": ref_schema, @@ -3131,7 +3373,11 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): return fkeys if fkeys else ReflectionDefaults.foreign_keys() - def _correct_for_mysql_bugs_88718_96365(self, fkeys, connection): + def _correct_for_mysql_bugs_88718_96365( + self, + fkeys: List[ReflectedForeignKeyConstraint], + connection: Connection, + ) -> None: # Foreign key is always in lower case (MySQL 8.0) # https://bugs.mysql.com/bug.php?id=88718 # issue #4344 for SQLAlchemy @@ -3147,22 +3393,24 @@ def _correct_for_mysql_bugs_88718_96365(self, fkeys, connection): if self._casing in (1, 2): - def lower(s): + def lower(s: str) -> str: return s.lower() else: # if on case sensitive, there can be two tables referenced # with the same name different casing, so we need to use # case-sensitive matching. - def lower(s): + def lower(s: str) -> str: return s - default_schema_name = connection.dialect.default_schema_name + default_schema_name: str = connection.dialect.default_schema_name # type: ignore # noqa: E501 # NOTE: using (table_schema, table_name, lower(column_name)) in (...) # is very slow since mysql does not seem able to properly use indexse. # Unpack the where condition instead. - schema_by_table_by_column = defaultdict(lambda: defaultdict(list)) + schema_by_table_by_column: DefaultDict[ + str, DefaultDict[str, List[str]] + ] = DefaultDict(lambda: DefaultDict(list)) for rec in fkeys: sch = lower(rec["referred_schema"] or default_schema_name) tbl = lower(rec["referred_table"]) @@ -3197,7 +3445,9 @@ def lower(s): _info_columns.c.column_name, ).where(condition) - correct_for_wrong_fk_case = connection.execute(select) + correct_for_wrong_fk_case: CursorResult[Tuple[str, str, str]] = ( + connection.execute(select) + ) # in casing=0, table name and schema name come back in their # exact case. @@ -3209,35 +3459,41 @@ def lower(s): # SHOW CREATE TABLE converts them to *lower case*, therefore # not matching. So for this case, case-insensitive lookup # is necessary - d = defaultdict(dict) + d: DefaultDict[Tuple[str, str], Dict[str, str]] = defaultdict(dict) for schema, tname, cname in correct_for_wrong_fk_case: d[(lower(schema), lower(tname))]["SCHEMANAME"] = schema d[(lower(schema), lower(tname))]["TABLENAME"] = tname d[(lower(schema), lower(tname))][cname.lower()] = cname for fkey in fkeys: - rec = d[ + rec_b = d[ ( lower(fkey["referred_schema"] or default_schema_name), lower(fkey["referred_table"]), ) ] - fkey["referred_table"] = rec["TABLENAME"] + fkey["referred_table"] = rec_b["TABLENAME"] if fkey["referred_schema"] is not None: - fkey["referred_schema"] = rec["SCHEMANAME"] + fkey["referred_schema"] = rec_b["SCHEMANAME"] fkey["referred_columns"] = [ - rec[col.lower()] for col in fkey["referred_columns"] + rec_b[col.lower()] for col in fkey["referred_columns"] ] @reflection.cache - def get_check_constraints(self, connection, table_name, schema=None, **kw): + def get_check_constraints( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedCheckConstraint]: parsed_state = self._parsed_state_or_create( connection, table_name, schema, **kw ) - cks = [ + cks: List[ReflectedCheckConstraint] = [ {"name": spec["name"], "sqltext": spec["sqltext"]} for spec in parsed_state.ck_constraints ] @@ -3245,7 +3501,13 @@ def get_check_constraints(self, connection, table_name, schema=None, **kw): return cks if cks else ReflectionDefaults.check_constraints() @reflection.cache - def get_table_comment(self, connection, table_name, schema=None, **kw): + def get_table_comment( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> ReflectedTableComment: parsed_state = self._parsed_state_or_create( connection, table_name, schema, **kw ) @@ -3256,12 +3518,18 @@ def get_table_comment(self, connection, table_name, schema=None, **kw): return ReflectionDefaults.table_comment() @reflection.cache - def get_indexes(self, connection, table_name, schema=None, **kw): + def get_indexes( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedIndex]: parsed_state = self._parsed_state_or_create( connection, table_name, schema, **kw ) - indexes = [] + indexes: List[ReflectedIndex] = [] for spec in parsed_state.keys: dialect_options = {} @@ -3273,32 +3541,30 @@ def get_indexes(self, connection, table_name, schema=None, **kw): unique = True elif flavor in ("FULLTEXT", "SPATIAL"): dialect_options["%s_prefix" % self.name] = flavor - elif flavor is None: - pass - else: - self.logger.info( + elif flavor is not None: + util.warn( "Converting unknown KEY type %s to a plain KEY", flavor ) - pass if spec["parser"]: dialect_options["%s_with_parser" % (self.name)] = spec[ "parser" ] - index_d = {} + index_d: ReflectedIndex = { + "name": spec["name"], + "column_names": [s[0] for s in spec["columns"]], + "unique": unique, + } - index_d["name"] = spec["name"] - index_d["column_names"] = [s[0] for s in spec["columns"]] mysql_length = { s[0]: s[1] for s in spec["columns"] if s[1] is not None } if mysql_length: dialect_options["%s_length" % self.name] = mysql_length - index_d["unique"] = unique if flavor: - index_d["type"] = flavor + index_d["type"] = flavor # type: ignore[typeddict-unknown-key] if dialect_options: index_d["dialect_options"] = dialect_options @@ -3309,13 +3575,17 @@ def get_indexes(self, connection, table_name, schema=None, **kw): @reflection.cache def get_unique_constraints( - self, connection, table_name, schema=None, **kw - ): + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> List[ReflectedUniqueConstraint]: parsed_state = self._parsed_state_or_create( connection, table_name, schema, **kw ) - ucs = [ + ucs: List[ReflectedUniqueConstraint] = [ { "name": key["name"], "column_names": [col[0] for col in key["columns"]], @@ -3331,7 +3601,13 @@ def get_unique_constraints( return ReflectionDefaults.unique_constraints() @reflection.cache - def get_view_definition(self, connection, view_name, schema=None, **kw): + def get_view_definition( + self, + connection: Connection, + view_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> str: charset = self._connection_charset full_name = ".".join( self.identifier_preparer._quote_free_identifiers(schema, view_name) @@ -3345,8 +3621,12 @@ def get_view_definition(self, connection, view_name, schema=None, **kw): return sql def _parsed_state_or_create( - self, connection, table_name, schema=None, **kw - ): + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> _reflection.ReflectedState: return self._setup_parser( connection, table_name, @@ -3355,7 +3635,7 @@ def _parsed_state_or_create( ) @util.memoized_property - def _tabledef_parser(self): + def _tabledef_parser(self) -> _reflection.MySQLTableDefinitionParser: """return the MySQLTableDefinitionParser, generate if needed. The deferred creation ensures that the dialect has @@ -3366,7 +3646,13 @@ def _tabledef_parser(self): return _reflection.MySQLTableDefinitionParser(self, preparer) @reflection.cache - def _setup_parser(self, connection, table_name, schema=None, **kw): + def _setup_parser( + self, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Any, + ) -> _reflection.ReflectedState: charset = self._connection_charset parser = self._tabledef_parser full_name = ".".join( @@ -3382,10 +3668,14 @@ def _setup_parser(self, connection, table_name, schema=None, **kw): columns = self._describe_table( connection, None, charset, full_name=full_name ) - sql = parser._describe_to_create(table_name, columns) + sql = parser._describe_to_create( + table_name, columns # type: ignore[arg-type] + ) return parser.parse(sql, charset) - def _fetch_setting(self, connection, setting_name): + def _fetch_setting( + self, connection: Connection, setting_name: str + ) -> Optional[str]: charset = self._connection_charset if self.server_version_info and self.server_version_info < (5, 6): @@ -3400,12 +3690,12 @@ def _fetch_setting(self, connection, setting_name): if not row: return None else: - return row[fetch_col] + return cast("Optional[str]", row[fetch_col]) - def _detect_charset(self, connection): + def _detect_charset(self, connection: Connection) -> str: raise NotImplementedError() - def _detect_casing(self, connection): + def _detect_casing(self, connection: Connection) -> int: """Sniff out identifier case sensitivity. Cached per-connection. This value can not change without a server @@ -3429,7 +3719,7 @@ def _detect_casing(self, connection): self._casing = cs return cs - def _detect_collations(self, connection): + def _detect_collations(self, connection: Connection) -> Dict[str, str]: """Pull the active COLLATIONS list from the server. Cached per-connection. @@ -3442,7 +3732,7 @@ def _detect_collations(self, connection): collations[row[0]] = row[1] return collations - def _detect_sql_mode(self, connection): + def _detect_sql_mode(self, connection: Connection) -> None: setting = self._fetch_setting(connection, "sql_mode") if setting is None: @@ -3454,7 +3744,7 @@ def _detect_sql_mode(self, connection): else: self._sql_mode = setting or "" - def _detect_ansiquotes(self, connection): + def _detect_ansiquotes(self, connection: Connection) -> None: """Detect and adjust for the ANSI_QUOTES sql mode.""" mode = self._sql_mode @@ -3469,12 +3759,35 @@ def _detect_ansiquotes(self, connection): # as of MySQL 5.0.1 self._backslash_escapes = "NO_BACKSLASH_ESCAPES" not in mode + @overload def _show_create_table( - self, connection, table, charset=None, full_name=None - ): + self, + connection: Connection, + table: Optional[Table], + charset: Optional[str], + full_name: str, + ) -> str: ... + + @overload + def _show_create_table( + self, + connection: Connection, + table: Table, + charset: Optional[str] = None, + full_name: None = None, + ) -> str: ... + + def _show_create_table( + self, + connection: Connection, + table: Optional[Table], + charset: Optional[str] = None, + full_name: Optional[str] = None, + ) -> str: """Run SHOW CREATE TABLE for a ``Table``.""" if full_name is None: + assert table is not None full_name = self.identifier_preparer.format_table(table) st = "SHOW CREATE TABLE %s" % full_name @@ -3483,19 +3796,44 @@ def _show_create_table( skip_user_error_events=True ).exec_driver_sql(st) except exc.DBAPIError as e: - if self._extract_error_code(e.orig) == 1146: + if self._extract_error_code(e.orig) == 1146: # type: ignore[arg-type] # noqa: E501 raise exc.NoSuchTableError(full_name) from e else: raise row = self._compat_first(rp, charset=charset) if not row: raise exc.NoSuchTableError(full_name) - return row[1].strip() + return cast("str", row[1]).strip() + + @overload + def _describe_table( + self, + connection: Connection, + table: Optional[Table], + charset: Optional[str], + full_name: str, + ) -> Union[Sequence[Row[Any]], Sequence[_DecodingRow]]: ... + + @overload + def _describe_table( + self, + connection: Connection, + table: Table, + charset: Optional[str] = None, + full_name: None = None, + ) -> Union[Sequence[Row[Any]], Sequence[_DecodingRow]]: ... - def _describe_table(self, connection, table, charset=None, full_name=None): + def _describe_table( + self, + connection: Connection, + table: Optional[Table], + charset: Optional[str] = None, + full_name: Optional[str] = None, + ) -> Union[Sequence[Row[Any]], Sequence[_DecodingRow]]: """Run DESCRIBE for a ``Table`` and return processed rows.""" if full_name is None: + assert table is not None full_name = self.identifier_preparer.format_table(table) st = "DESCRIBE %s" % full_name @@ -3506,7 +3844,7 @@ def _describe_table(self, connection, table, charset=None, full_name=None): skip_user_error_events=True ).exec_driver_sql(st) except exc.DBAPIError as e: - code = self._extract_error_code(e.orig) + code = self._extract_error_code(e.orig) # type: ignore[arg-type] # noqa: E501 if code == 1146: raise exc.NoSuchTableError(full_name) from e @@ -3538,7 +3876,7 @@ class _DecodingRow: # sets.Set(['value']) (seriously) but thankfully that doesn't # seem to come up in DDL queries. - _encoding_compat = { + _encoding_compat: Dict[str, str] = { "koi8r": "koi8_r", "koi8u": "koi8_u", "utf16": "utf-16-be", # MySQL's uft16 is always bigendian @@ -3548,24 +3886,23 @@ class _DecodingRow: "eucjpms": "ujis", } - def __init__(self, rowproxy, charset): + def __init__(self, rowproxy: Row[Any], charset: Optional[str]): self.rowproxy = rowproxy - self.charset = self._encoding_compat.get(charset, charset) + self.charset = ( + self._encoding_compat.get(charset, charset) + if charset is not None + else None + ) - def __getitem__(self, index): + def __getitem__(self, index: int) -> Any: item = self.rowproxy[index] - if isinstance(item, _array): - item = item.tostring() - if self.charset and isinstance(item, bytes): return item.decode(self.charset) else: return item - def __getattr__(self, attr): + def __getattr__(self, attr: str) -> Any: item = getattr(self.rowproxy, attr) - if isinstance(item, _array): - item = item.tostring() if self.charset and isinstance(item, bytes): return item.decode(self.charset) else: diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py index 5c00ada9f94..1d48c4e88bc 100644 --- a/lib/sqlalchemy/dialects/mysql/cymysql.py +++ b/lib/sqlalchemy/dialects/mysql/cymysql.py @@ -4,7 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors r""" @@ -21,18 +20,36 @@ dialects are mysqlclient and PyMySQL. """ # noqa +from __future__ import annotations + +from typing import Any +from typing import Iterable +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union -from .base import BIT from .base import MySQLDialect from .mysqldb import MySQLDialect_mysqldb +from .types import BIT from ... import util +if TYPE_CHECKING: + from ...engine.base import Connection + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import DBAPICursor + from ...engine.interfaces import DBAPIModule + from ...engine.interfaces import Dialect + from ...engine.interfaces import PoolProxiedConnection + from ...sql.type_api import _ResultProcessorType + class _cymysqlBIT(BIT): - def result_processor(self, dialect, coltype): + def result_processor( + self, dialect: Dialect, coltype: object + ) -> Optional[_ResultProcessorType[Any]]: """Convert MySQL's 64 bit, variable length binary string to a long.""" - def process(value): + def process(value: Optional[Iterable[int]]) -> Optional[int]: if value is not None: v = 0 for i in iter(value): @@ -55,17 +72,22 @@ class MySQLDialect_cymysql(MySQLDialect_mysqldb): colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _cymysqlBIT}) @classmethod - def import_dbapi(cls): + def import_dbapi(cls) -> DBAPIModule: return __import__("cymysql") - def _detect_charset(self, connection): - return connection.connection.charset + def _detect_charset(self, connection: Connection) -> str: + return connection.connection.charset # type: ignore[no-any-return] - def _extract_error_code(self, exception): - return exception.errno + def _extract_error_code(self, exception: DBAPIModule.Error) -> int: + return exception.errno # type: ignore[no-any-return] - def is_disconnect(self, e, connection, cursor): - if isinstance(e, self.dbapi.OperationalError): + def is_disconnect( + self, + e: DBAPIModule.Error, + connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], + cursor: Optional[DBAPICursor], + ) -> bool: + if isinstance(e, self.loaded_dbapi.OperationalError): return self._extract_error_code(e) in ( 2006, 2013, @@ -73,7 +95,7 @@ def is_disconnect(self, e, connection, cursor): 2045, 2055, ) - elif isinstance(e, self.dbapi.InterfaceError): + elif isinstance(e, self.loaded_dbapi.InterfaceError): # if underlying connection is closed, # this is the error you get return True diff --git a/lib/sqlalchemy/dialects/mysql/enumerated.py b/lib/sqlalchemy/dialects/mysql/enumerated.py index 6745cae55e7..ab305207cc6 100644 --- a/lib/sqlalchemy/dialects/mysql/enumerated.py +++ b/lib/sqlalchemy/dialects/mysql/enumerated.py @@ -4,26 +4,43 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors +from __future__ import annotations +import enum import re +from typing import Any +from typing import Dict +from typing import Optional +from typing import Set +from typing import Type +from typing import TYPE_CHECKING +from typing import Union from .types import _StringType from ... import exc from ... import sql from ... import util from ...sql import sqltypes +from ...sql import type_api +if TYPE_CHECKING: + from ...engine.interfaces import Dialect + from ...sql.elements import ColumnElement + from ...sql.type_api import _BindProcessorType + from ...sql.type_api import _ResultProcessorType + from ...sql.type_api import TypeEngine + from ...sql.type_api import TypeEngineMixin -class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _StringType): + +class ENUM(type_api.NativeForEmulated, sqltypes.Enum, _StringType): """MySQL ENUM type.""" __visit_name__ = "ENUM" native_enum = True - def __init__(self, *enums, **kw): + def __init__(self, *enums: Union[str, Type[enum.Enum]], **kw: Any) -> None: """Construct an ENUM. E.g.:: @@ -62,21 +79,27 @@ def __init__(self, *enums, **kw): """ kw.pop("strict", None) - self._enum_init(enums, kw) + self._enum_init(enums, kw) # type: ignore[arg-type] _StringType.__init__(self, length=self.length, **kw) @classmethod - def adapt_emulated_to_native(cls, impl, **kw): + def adapt_emulated_to_native( + cls, + impl: Union[TypeEngine[Any], TypeEngineMixin], + **kw: Any, + ) -> ENUM: """Produce a MySQL native :class:`.mysql.ENUM` from plain :class:`.Enum`. """ + if TYPE_CHECKING: + assert isinstance(impl, ENUM) kw.setdefault("validate_strings", impl.validate_strings) kw.setdefault("values_callable", impl.values_callable) kw.setdefault("omit_aliases", impl._omit_aliases) return cls(**kw) - def _object_value_for_elem(self, elem): + def _object_value_for_elem(self, elem: str) -> Union[str, enum.Enum]: # mysql sends back a blank string for any value that # was persisted that was not in the enums; that is, it does no # validation on the incoming data, it "truncates" it to be @@ -86,18 +109,22 @@ def _object_value_for_elem(self, elem): else: return super()._object_value_for_elem(elem) - def __repr__(self): + def __repr__(self) -> str: return util.generic_repr( self, to_inspect=[ENUM, _StringType, sqltypes.Enum] ) +# TODO: SET is a string as far as configuration but does not act like +# a string at the python level. We either need to make a py-type agnostic +# version of String as a base to be used for this, make this some kind of +# TypeDecorator, or just vendor it out as its own type. class SET(_StringType): """MySQL SET type.""" __visit_name__ = "SET" - def __init__(self, *values, **kw): + def __init__(self, *values: str, **kw: Any): """Construct a SET. E.g.:: @@ -150,17 +177,19 @@ def __init__(self, *values, **kw): "setting retrieve_as_bitwise=True" ) if self.retrieve_as_bitwise: - self._bitmap = { + self._inversed_bitmap: Dict[str, int] = { value: 2**idx for idx, value in enumerate(self.values) } - self._bitmap.update( - (2**idx, value) for idx, value in enumerate(self.values) - ) + self._bitmap: Dict[int, str] = { + 2**idx: value for idx, value in enumerate(self.values) + } length = max([len(v) for v in values] + [0]) kw.setdefault("length", length) super().__init__(**kw) - def column_expression(self, colexpr): + def column_expression( + self, colexpr: ColumnElement[Any] + ) -> ColumnElement[Any]: if self.retrieve_as_bitwise: return sql.type_coerce( sql.type_coerce(colexpr, sqltypes.Integer) + 0, self @@ -168,10 +197,12 @@ def column_expression(self, colexpr): else: return colexpr - def result_processor(self, dialect, coltype): + def result_processor( + self, dialect: Dialect, coltype: Any + ) -> Optional[_ResultProcessorType[Any]]: if self.retrieve_as_bitwise: - def process(value): + def process(value: Union[str, int, None]) -> Optional[Set[str]]: if value is not None: value = int(value) @@ -182,11 +213,14 @@ def process(value): else: super_convert = super().result_processor(dialect, coltype) - def process(value): + def process(value: Union[str, Set[str], None]) -> Optional[Set[str]]: # type: ignore[misc] # noqa: E501 if isinstance(value, str): # MySQLdb returns a string, let's parse if super_convert: value = super_convert(value) + assert value is not None + if TYPE_CHECKING: + assert isinstance(value, str) return set(re.findall(r"[^,]+", value)) else: # mysql-connector-python does a naive @@ -197,43 +231,48 @@ def process(value): return process - def bind_processor(self, dialect): + def bind_processor( + self, dialect: Dialect + ) -> _BindProcessorType[Union[str, int]]: super_convert = super().bind_processor(dialect) if self.retrieve_as_bitwise: - def process(value): + def process( + value: Union[str, int, set[str], None], + ) -> Union[str, int, None]: if value is None: return None elif isinstance(value, (int, str)): if super_convert: - return super_convert(value) + return super_convert(value) # type: ignore[arg-type, no-any-return] # noqa: E501 else: return value else: int_value = 0 for v in value: - int_value |= self._bitmap[v] + int_value |= self._inversed_bitmap[v] return int_value else: - def process(value): + def process( + value: Union[str, int, set[str], None], + ) -> Union[str, int, None]: # accept strings and int (actually bitflag) values directly if value is not None and not isinstance(value, (int, str)): value = ",".join(value) - if super_convert: - return super_convert(value) + return super_convert(value) # type: ignore else: return value return process - def adapt(self, impltype, **kw): + def adapt(self, cls: type, **kw: Any) -> Any: kw["retrieve_as_bitwise"] = self.retrieve_as_bitwise - return util.constructor_copy(self, impltype, *self.values, **kw) + return util.constructor_copy(self, cls, *self.values, **kw) - def __repr__(self): + def __repr__(self) -> str: return util.generic_repr( self, to_inspect=[SET, _StringType], diff --git a/lib/sqlalchemy/dialects/mysql/expression.py b/lib/sqlalchemy/dialects/mysql/expression.py index b60a0888517..9d19d52de5e 100644 --- a/lib/sqlalchemy/dialects/mysql/expression.py +++ b/lib/sqlalchemy/dialects/mysql/expression.py @@ -4,8 +4,10 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors +from __future__ import annotations + +from typing import Any from ... import exc from ... import util @@ -18,7 +20,7 @@ from ...util.typing import Self -class match(Generative, elements.BinaryExpression): +class match(Generative, elements.BinaryExpression[Any]): """Produce a ``MATCH (X, Y) AGAINST ('TEXT')`` clause. E.g.:: @@ -73,8 +75,9 @@ class match(Generative, elements.BinaryExpression): __visit_name__ = "mysql_match" inherit_cache = True + modifiers: util.immutabledict[str, Any] - def __init__(self, *cols, **kw): + def __init__(self, *cols: elements.ColumnElement[Any], **kw: Any): if not cols: raise exc.ArgumentError("columns are required") diff --git a/lib/sqlalchemy/dialects/mysql/json.py b/lib/sqlalchemy/dialects/mysql/json.py index 8912af36631..e654a61941d 100644 --- a/lib/sqlalchemy/dialects/mysql/json.py +++ b/lib/sqlalchemy/dialects/mysql/json.py @@ -4,10 +4,18 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors +from __future__ import annotations + +from typing import Any +from typing import TYPE_CHECKING from ... import types as sqltypes +if TYPE_CHECKING: + from ...engine.interfaces import Dialect + from ...sql.type_api import _BindProcessorType + from ...sql.type_api import _LiteralProcessorType + class JSON(sqltypes.JSON): """MySQL JSON type. @@ -34,13 +42,13 @@ class JSON(sqltypes.JSON): class _FormatTypeMixin: - def _format_value(self, value): + def _format_value(self, value: Any) -> str: raise NotImplementedError() - def bind_processor(self, dialect): - super_proc = self.string_bind_processor(dialect) + def bind_processor(self, dialect: Dialect) -> _BindProcessorType[Any]: + super_proc = self.string_bind_processor(dialect) # type: ignore[attr-defined] # noqa: E501 - def process(value): + def process(value: Any) -> Any: value = self._format_value(value) if super_proc: value = super_proc(value) @@ -48,29 +56,31 @@ def process(value): return process - def literal_processor(self, dialect): - super_proc = self.string_literal_processor(dialect) + def literal_processor( + self, dialect: Dialect + ) -> _LiteralProcessorType[Any]: + super_proc = self.string_literal_processor(dialect) # type: ignore[attr-defined] # noqa: E501 - def process(value): + def process(value: Any) -> str: value = self._format_value(value) if super_proc: value = super_proc(value) - return value + return value # type: ignore[no-any-return] return process class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType): - def _format_value(self, value): + def _format_value(self, value: Any) -> str: if isinstance(value, int): - value = "$[%s]" % value + formatted_value = "$[%s]" % value else: - value = '$."%s"' % value - return value + formatted_value = '$."%s"' % value + return formatted_value class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType): - def _format_value(self, value): + def _format_value(self, value: Any) -> str: return "$%s" % ( "".join( [ diff --git a/lib/sqlalchemy/dialects/mysql/mariadb.py b/lib/sqlalchemy/dialects/mysql/mariadb.py index b84dee37a7b..508820e67ce 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadb.py +++ b/lib/sqlalchemy/dialects/mysql/mariadb.py @@ -4,9 +4,15 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors + +from __future__ import annotations + +from typing import Any +from typing import Callable + from .base import MariaDBIdentifierPreparer from .base import MySQLDialect +from .base import MySQLIdentifierPreparer from .base import MySQLTypeCompiler from ...sql import sqltypes @@ -30,10 +36,10 @@ class INET6(sqltypes.TypeEngine[str]): class MariaDBTypeCompiler(MySQLTypeCompiler): - def visit_INET4(self, type_, **kwargs) -> str: + def visit_INET4(self, type_: INET4, **kwargs: Any) -> str: return "INET4" - def visit_INET6(self, type_, **kwargs) -> str: + def visit_INET6(self, type_: INET6, **kwargs: Any) -> str: return "INET6" @@ -41,11 +47,11 @@ class MariaDBDialect(MySQLDialect): is_mariadb = True supports_statement_cache = True name = "mariadb" - preparer = MariaDBIdentifierPreparer + preparer: type[MySQLIdentifierPreparer] = MariaDBIdentifierPreparer type_compiler_cls = MariaDBTypeCompiler -def loader(driver): +def loader(driver: str) -> Callable[[], type[MariaDBDialect]]: dialect_mod = __import__( "sqlalchemy.dialects.mysql.%s" % driver ).dialects.mysql @@ -53,7 +59,7 @@ def loader(driver): driver_mod = getattr(dialect_mod, driver) if hasattr(driver_mod, "mariadb_dialect"): driver_cls = driver_mod.mariadb_dialect - return driver_cls + return driver_cls # type: ignore[no-any-return] else: driver_cls = driver_mod.dialect diff --git a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py index 2d2ad199710..c6bb58a8d93 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadbconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mariadbconnector.py @@ -4,8 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors - """ @@ -29,7 +27,15 @@ .. mariadb: https://github.com/mariadb-corporation/mariadb-connector-python """ # noqa +from __future__ import annotations + import re +from typing import Any +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union from uuid import UUID as _python_UUID from .base import MySQLCompiler @@ -39,6 +45,19 @@ from ... import util from ...sql import sqltypes +if TYPE_CHECKING: + from ...engine.base import Connection + from ...engine.interfaces import ConnectArgsType + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import DBAPICursor + from ...engine.interfaces import DBAPIModule + from ...engine.interfaces import Dialect + from ...engine.interfaces import IsolationLevel + from ...engine.interfaces import PoolProxiedConnection + from ...engine.url import URL + from ...sql.compiler import SQLCompiler + from ...sql.type_api import _ResultProcessorType + mariadb_cpy_minimum_version = (1, 0, 1) @@ -47,10 +66,12 @@ class _MariaDBUUID(sqltypes.UUID[sqltypes._UUID_RETURN]): # work around JIRA issue # https://jira.mariadb.org/browse/CONPY-270. When that issue is fixed, # this type can be removed. - def result_processor(self, dialect, coltype): + def result_processor( + self, dialect: Dialect, coltype: object + ) -> Optional[_ResultProcessorType[Any]]: if self.as_uuid: - def process(value): + def process(value: Any) -> Any: if value is not None: if hasattr(value, "decode"): value = value.decode("ascii") @@ -60,7 +81,7 @@ def process(value): return process else: - def process(value): + def process(value: Any) -> Any: if value is not None: if hasattr(value, "decode"): value = value.decode("ascii") @@ -71,23 +92,27 @@ def process(value): class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext): - _lastrowid = None + _lastrowid: Optional[int] = None - def create_server_side_cursor(self): + def create_server_side_cursor(self) -> DBAPICursor: return self._dbapi_connection.cursor(buffered=False) - def create_default_cursor(self): + def create_default_cursor(self) -> DBAPICursor: return self._dbapi_connection.cursor(buffered=True) - def post_exec(self): + def post_exec(self) -> None: super().post_exec() self._rowcount = self.cursor.rowcount + if TYPE_CHECKING: + assert isinstance(self.compiled, SQLCompiler) if self.isinsert and self.compiled.postfetch_lastrowid: self._lastrowid = self.cursor.lastrowid - def get_lastrowid(self): + def get_lastrowid(self) -> int: + if TYPE_CHECKING: + assert self._lastrowid is not None return self._lastrowid @@ -126,7 +151,7 @@ class MySQLDialect_mariadbconnector(MySQLDialect): ) @util.memoized_property - def _dbapi_version(self): + def _dbapi_version(self) -> Tuple[int, ...]: if self.dbapi and hasattr(self.dbapi, "__version__"): return tuple( [ @@ -139,7 +164,7 @@ def _dbapi_version(self): else: return (99, 99, 99) - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) self.paramstyle = "qmark" if self.dbapi is not None: @@ -151,19 +176,24 @@ def __init__(self, **kwargs): ) @classmethod - def import_dbapi(cls): + def import_dbapi(cls) -> DBAPIModule: return __import__("mariadb") - def is_disconnect(self, e, connection, cursor): + def is_disconnect( + self, + e: DBAPIModule.Error, + connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], + cursor: Optional[DBAPICursor], + ) -> bool: if super().is_disconnect(e, connection, cursor): return True - elif isinstance(e, self.dbapi.Error): + elif isinstance(e, self.loaded_dbapi.Error): str_e = str(e).lower() return "not connected" in str_e or "isn't valid" in str_e else: return False - def create_connect_args(self, url): + def create_connect_args(self, url: URL) -> ConnectArgsType: opts = url.translate_connect_args() opts.update(url.query) @@ -200,19 +230,21 @@ def create_connect_args(self, url): except (AttributeError, ImportError): self.supports_sane_rowcount = False opts["client_flag"] = client_flag - return [[], opts] + return [], opts - def _extract_error_code(self, exception): + def _extract_error_code(self, exception: DBAPIModule.Error) -> int: try: - rc = exception.errno + rc: int = exception.errno except: rc = -1 return rc - def _detect_charset(self, connection): + def _detect_charset(self, connection: Connection) -> str: return "utf8mb4" - def get_isolation_level_values(self, dbapi_connection): + def get_isolation_level_values( + self, dbapi_conn: DBAPIConnection + ) -> Sequence[IsolationLevel]: return ( "SERIALIZABLE", "READ UNCOMMITTED", @@ -221,21 +253,23 @@ def get_isolation_level_values(self, dbapi_connection): "AUTOCOMMIT", ) - def set_isolation_level(self, connection, level): + def set_isolation_level( + self, dbapi_connection: DBAPIConnection, level: IsolationLevel + ) -> None: if level == "AUTOCOMMIT": - connection.autocommit = True + dbapi_connection.autocommit = True else: - connection.autocommit = False - super().set_isolation_level(connection, level) + dbapi_connection.autocommit = False + super().set_isolation_level(dbapi_connection, level) - def do_begin_twophase(self, connection, xid): + def do_begin_twophase(self, connection: Connection, xid: Any) -> None: connection.execute( sql.text("XA BEGIN :xid").bindparams( sql.bindparam("xid", xid, literal_execute=True) ) ) - def do_prepare_twophase(self, connection, xid): + def do_prepare_twophase(self, connection: Connection, xid: Any) -> None: connection.execute( sql.text("XA END :xid").bindparams( sql.bindparam("xid", xid, literal_execute=True) @@ -248,8 +282,12 @@ def do_prepare_twophase(self, connection, xid): ) def do_rollback_twophase( - self, connection, xid, is_prepared=True, recover=False - ): + self, + connection: Connection, + xid: Any, + is_prepared: bool = True, + recover: bool = False, + ) -> None: if not is_prepared: connection.execute( sql.text("XA END :xid").bindparams( @@ -263,8 +301,12 @@ def do_rollback_twophase( ) def do_commit_twophase( - self, connection, xid, is_prepared=True, recover=False - ): + self, + connection: Connection, + xid: Any, + is_prepared: bool = True, + recover: bool = False, + ) -> None: if not is_prepared: self.do_prepare_twophase(connection, xid) connection.execute( diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index faeae16abd5..ad2e4856e1d 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -4,7 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors r""" @@ -46,29 +45,53 @@ """ # noqa +from __future__ import annotations import re +from typing import Any +from typing import cast +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union -from .base import BIT from .base import MariaDBIdentifierPreparer from .base import MySQLCompiler from .base import MySQLDialect from .base import MySQLExecutionContext from .base import MySQLIdentifierPreparer from .mariadb import MariaDBDialect +from .types import BIT from ... import util +if TYPE_CHECKING: + + from ...engine.base import Connection + from ...engine.cursor import CursorResult + from ...engine.interfaces import ConnectArgsType + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import DBAPICursor + from ...engine.interfaces import DBAPIModule + from ...engine.interfaces import IsolationLevel + from ...engine.interfaces import PoolProxiedConnection + from ...engine.row import Row + from ...engine.url import URL + from ...sql.elements import BinaryExpression + class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): - def create_server_side_cursor(self): + def create_server_side_cursor(self) -> DBAPICursor: return self._dbapi_connection.cursor(buffered=False) - def create_default_cursor(self): + def create_default_cursor(self) -> DBAPICursor: return self._dbapi_connection.cursor(buffered=True) class MySQLCompiler_mysqlconnector(MySQLCompiler): - def visit_mod_binary(self, binary, operator, **kw): + def visit_mod_binary( + self, binary: BinaryExpression[Any], operator: Any, **kw: Any + ) -> str: return ( self.process(binary.left, **kw) + " % " @@ -78,32 +101,35 @@ def visit_mod_binary(self, binary, operator, **kw): class IdentifierPreparerCommon_mysqlconnector: @property - def _double_percents(self): + def _double_percents(self) -> bool: return False @_double_percents.setter - def _double_percents(self, value): + def _double_percents(self, value: Any) -> None: pass - def _escape_identifier(self, value): - value = value.replace(self.escape_quote, self.escape_to_quote) + def _escape_identifier(self, value: str) -> str: + value = value.replace( + self.escape_quote, # type:ignore[attr-defined] + self.escape_to_quote, # type:ignore[attr-defined] + ) return value -class MySQLIdentifierPreparer_mysqlconnector( +class MySQLIdentifierPreparer_mysqlconnector( # type:ignore[misc] IdentifierPreparerCommon_mysqlconnector, MySQLIdentifierPreparer ): pass -class MariaDBIdentifierPreparer_mysqlconnector( +class MariaDBIdentifierPreparer_mysqlconnector( # type:ignore[misc] IdentifierPreparerCommon_mysqlconnector, MariaDBIdentifierPreparer ): pass class _myconnpyBIT(BIT): - def result_processor(self, dialect, coltype): + def result_processor(self, dialect: Any, coltype: Any) -> None: """MySQL-connector already converts mysql bits, so.""" return None @@ -128,21 +154,21 @@ class MySQLDialect_mysqlconnector(MySQLDialect): execution_ctx_cls = MySQLExecutionContext_mysqlconnector - preparer = MySQLIdentifierPreparer_mysqlconnector + preparer: type[MySQLIdentifierPreparer] = ( + MySQLIdentifierPreparer_mysqlconnector + ) colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _myconnpyBIT}) @classmethod - def import_dbapi(cls): - from mysql import connector + def import_dbapi(cls) -> DBAPIModule: + return cast(DBAPIModule, __import__("mysql.connector").connector) - return connector - - def do_ping(self, dbapi_connection): + def do_ping(self, dbapi_connection: DBAPIConnection) -> bool: dbapi_connection.ping(False) return True - def create_connect_args(self, url): + def create_connect_args(self, url: URL) -> ConnectArgsType: opts = url.translate_connect_args(username="user") opts.update(url.query) @@ -177,7 +203,9 @@ def create_connect_args(self, url): # supports_sane_rowcount. if self.dbapi is not None: try: - from mysql.connector.constants import ClientFlag + from mysql.connector import constants # type: ignore + + ClientFlag = constants.ClientFlag client_flags = opts.get( "client_flags", ClientFlag.get_default() @@ -187,27 +215,33 @@ def create_connect_args(self, url): except Exception: pass - return [[], opts] + return [], opts @util.memoized_property - def _mysqlconnector_version_info(self): + def _mysqlconnector_version_info(self) -> Optional[Tuple[int, ...]]: if self.dbapi and hasattr(self.dbapi, "__version__"): m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", self.dbapi.__version__) if m: return tuple(int(x) for x in m.group(1, 2, 3) if x is not None) + return None - def _detect_charset(self, connection): - return connection.connection.charset + def _detect_charset(self, connection: Connection) -> str: + return connection.connection.charset # type: ignore - def _extract_error_code(self, exception): - return exception.errno + def _extract_error_code(self, exception: BaseException) -> int: + return exception.errno # type: ignore - def is_disconnect(self, e, connection, cursor): + def is_disconnect( + self, + e: Exception, + connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], + cursor: Optional[DBAPICursor], + ) -> bool: errnos = (2006, 2013, 2014, 2045, 2055, 2048) exceptions = ( - self.dbapi.OperationalError, - self.dbapi.InterfaceError, - self.dbapi.ProgrammingError, + self.loaded_dbapi.OperationalError, # + self.loaded_dbapi.InterfaceError, + self.loaded_dbapi.ProgrammingError, ) if isinstance(e, exceptions): return ( @@ -218,13 +252,23 @@ def is_disconnect(self, e, connection, cursor): else: return False - def _compat_fetchall(self, rp, charset=None): + def _compat_fetchall( + self, + rp: CursorResult[Tuple[Any, ...]], + charset: Optional[str] = None, + ) -> Sequence[Row[Tuple[Any, ...]]]: return rp.fetchall() - def _compat_fetchone(self, rp, charset=None): + def _compat_fetchone( + self, + rp: CursorResult[Tuple[Any, ...]], + charset: Optional[str] = None, + ) -> Optional[Row[Tuple[Any, ...]]]: return rp.fetchone() - def get_isolation_level_values(self, dbapi_connection): + def get_isolation_level_values( + self, dbapi_conn: DBAPIConnection + ) -> Sequence[IsolationLevel]: return ( "SERIALIZABLE", "READ UNCOMMITTED", @@ -233,12 +277,14 @@ def get_isolation_level_values(self, dbapi_connection): "AUTOCOMMIT", ) - def set_isolation_level(self, connection, level): + def set_isolation_level( + self, dbapi_connection: DBAPIConnection, level: IsolationLevel + ) -> None: if level == "AUTOCOMMIT": - connection.autocommit = True + dbapi_connection.autocommit = True else: - connection.autocommit = False - super().set_isolation_level(connection, level) + dbapi_connection.autocommit = False + super().set_isolation_level(dbapi_connection, level) class MariaDBDialect_mysqlconnector( diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index 3cf56c1fd09..52adaefab76 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -4,8 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors - """ @@ -86,16 +84,35 @@ The mysqldb dialect supports server-side cursors. See :ref:`mysql_ss_cursors`. """ +from __future__ import annotations import re +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING from .base import MySQLCompiler from .base import MySQLDialect from .base import MySQLExecutionContext from .base import MySQLIdentifierPreparer -from .base import TEXT -from ... import sql from ... import util +from ...util.typing import Literal + +if TYPE_CHECKING: + + from ...engine.base import Connection + from ...engine.interfaces import _DBAPIMultiExecuteParams + from ...engine.interfaces import ConnectArgsType + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import DBAPICursor + from ...engine.interfaces import DBAPIModule + from ...engine.interfaces import ExecutionContext + from ...engine.interfaces import IsolationLevel + from ...engine.url import URL class MySQLExecutionContext_mysqldb(MySQLExecutionContext): @@ -119,8 +136,9 @@ class MySQLDialect_mysqldb(MySQLDialect): execution_ctx_cls = MySQLExecutionContext_mysqldb statement_compiler = MySQLCompiler_mysqldb preparer = MySQLIdentifierPreparer + server_version_info: Tuple[int, ...] - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any): super().__init__(**kwargs) self._mysql_dbapi_version = ( self._parse_dbapi_version(self.dbapi.__version__) @@ -128,7 +146,7 @@ def __init__(self, **kwargs): else (0, 0, 0) ) - def _parse_dbapi_version(self, version): + def _parse_dbapi_version(self, version: str) -> Tuple[int, ...]: m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", version) if m: return tuple(int(x) for x in m.group(1, 2, 3) if x is not None) @@ -136,7 +154,7 @@ def _parse_dbapi_version(self, version): return (0, 0, 0) @util.langhelpers.memoized_property - def supports_server_side_cursors(self): + def supports_server_side_cursors(self) -> bool: # type: ignore[override] try: cursors = __import__("MySQLdb.cursors").cursors self._sscursor = cursors.SSCursor @@ -145,13 +163,13 @@ def supports_server_side_cursors(self): return False @classmethod - def import_dbapi(cls): + def import_dbapi(cls) -> DBAPIModule: return __import__("MySQLdb") - def on_connect(self): + def on_connect(self) -> Callable[[DBAPIConnection], None]: super_ = super().on_connect() - def on_connect(conn): + def on_connect(conn: DBAPIConnection) -> None: if super_ is not None: super_(conn) @@ -164,43 +182,24 @@ def on_connect(conn): return on_connect - def do_ping(self, dbapi_connection): + def do_ping(self, dbapi_connection: DBAPIConnection) -> Literal[True]: dbapi_connection.ping() return True - def do_executemany(self, cursor, statement, parameters, context=None): + def do_executemany( + self, + cursor: DBAPICursor, + statement: str, + parameters: _DBAPIMultiExecuteParams, + context: Optional[ExecutionContext] = None, + ) -> None: rowcount = cursor.executemany(statement, parameters) if context is not None: - context._rowcount = rowcount - - def _check_unicode_returns(self, connection): - # work around issue fixed in - # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8 - # specific issue w/ the utf8mb4_bin collation and unicode returns - - collation = connection.exec_driver_sql( - "show collation where %s = 'utf8mb4' and %s = 'utf8mb4_bin'" - % ( - self.identifier_preparer.quote("Charset"), - self.identifier_preparer.quote("Collation"), - ) - ).scalar() - has_utf8mb4_bin = self.server_version_info > (5,) and collation - if has_utf8mb4_bin: - additional_tests = [ - sql.collate( - sql.cast( - sql.literal_column("'test collated returns'"), - TEXT(charset="utf8mb4"), - ), - "utf8mb4_bin", - ) - ] - else: - additional_tests = [] - return super()._check_unicode_returns(connection, additional_tests) + cast(MySQLExecutionContext, context)._rowcount = rowcount - def create_connect_args(self, url, _translate_args=None): + def create_connect_args( + self, url: URL, _translate_args: Optional[Dict[str, Any]] = None + ) -> ConnectArgsType: if _translate_args is None: _translate_args = dict( database="db", username="user", password="passwd" @@ -249,9 +248,9 @@ def create_connect_args(self, url, _translate_args=None): if client_flag_found_rows is not None: client_flag |= client_flag_found_rows opts["client_flag"] = client_flag - return [[], opts] + return [], opts - def _found_rows_client_flag(self): + def _found_rows_client_flag(self) -> Optional[int]: if self.dbapi is not None: try: CLIENT_FLAGS = __import__( @@ -260,20 +259,23 @@ def _found_rows_client_flag(self): except (AttributeError, ImportError): return None else: - return CLIENT_FLAGS.FOUND_ROWS + return CLIENT_FLAGS.FOUND_ROWS # type: ignore else: return None - def _extract_error_code(self, exception): - return exception.args[0] + def _extract_error_code(self, exception: DBAPIModule.Error) -> int: + return exception.args[0] # type: ignore[no-any-return] - def _detect_charset(self, connection): + def _detect_charset(self, connection: Connection) -> str: """Sniff out the character set in use for connection results.""" try: # note: the SQL here would be # "SHOW VARIABLES LIKE 'character_set%%'" - cset_name = connection.connection.character_set_name + + cset_name: Callable[[], str] = ( + connection.connection.character_set_name + ) except AttributeError: util.warn( "No 'character_set_name' can be detected with " @@ -285,7 +287,9 @@ def _detect_charset(self, connection): else: return cset_name() - def get_isolation_level_values(self, dbapi_connection): + def get_isolation_level_values( + self, dbapi_conn: DBAPIConnection + ) -> Tuple[IsolationLevel, ...]: return ( "SERIALIZABLE", "READ UNCOMMITTED", @@ -294,7 +298,9 @@ def get_isolation_level_values(self, dbapi_connection): "AUTOCOMMIT", ) - def set_isolation_level(self, dbapi_connection, level): + def set_isolation_level( + self, dbapi_connection: DBAPIConnection, level: IsolationLevel + ) -> None: if level == "AUTOCOMMIT": dbapi_connection.autocommit(True) else: diff --git a/lib/sqlalchemy/dialects/mysql/provision.py b/lib/sqlalchemy/dialects/mysql/provision.py index 46070848cb1..fe97672ad85 100644 --- a/lib/sqlalchemy/dialects/mysql/provision.py +++ b/lib/sqlalchemy/dialects/mysql/provision.py @@ -5,7 +5,6 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: ignore-errors - from ... import exc from ...testing.provision import configure_follower from ...testing.provision import create_db diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index 67cb4cdd766..26fd9b038bd 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -4,8 +4,6 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors - r""" @@ -49,9 +47,26 @@ to the pymysql driver as well. """ # noqa +from __future__ import annotations + +from typing import Any +from typing import Dict +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union from .mysqldb import MySQLDialect_mysqldb from ...util import langhelpers +from ...util.typing import Literal + +if TYPE_CHECKING: + + from ...engine.interfaces import ConnectArgsType + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import DBAPICursor + from ...engine.interfaces import DBAPIModule + from ...engine.interfaces import PoolProxiedConnection + from ...engine.url import URL class MySQLDialect_pymysql(MySQLDialect_mysqldb): @@ -61,7 +76,7 @@ class MySQLDialect_pymysql(MySQLDialect_mysqldb): description_encoding = None @langhelpers.memoized_property - def supports_server_side_cursors(self): + def supports_server_side_cursors(self) -> bool: # type: ignore[override] try: cursors = __import__("pymysql.cursors").cursors self._sscursor = cursors.SSCursor @@ -70,11 +85,11 @@ def supports_server_side_cursors(self): return False @classmethod - def import_dbapi(cls): + def import_dbapi(cls) -> DBAPIModule: return __import__("pymysql") @langhelpers.memoized_property - def _send_false_to_ping(self): + def _send_false_to_ping(self) -> bool: """determine if pymysql has deprecated, changed the default of, or removed the 'reconnect' argument of connection.ping(). @@ -101,7 +116,7 @@ def _send_false_to_ping(self): not insp.defaults or insp.defaults[0] is not False ) - def do_ping(self, dbapi_connection): + def do_ping(self, dbapi_connection: DBAPIConnection) -> Literal[True]: # type: ignore # noqa: E501 if self._send_false_to_ping: dbapi_connection.ping(False) else: @@ -109,17 +124,24 @@ def do_ping(self, dbapi_connection): return True - def create_connect_args(self, url, _translate_args=None): + def create_connect_args( + self, url: URL, _translate_args: Optional[Dict[str, Any]] = None + ) -> ConnectArgsType: if _translate_args is None: _translate_args = dict(username="user") return super().create_connect_args( url, _translate_args=_translate_args ) - def is_disconnect(self, e, connection, cursor): + def is_disconnect( + self, + e: DBAPIModule.Error, + connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], + cursor: Optional[DBAPICursor], + ) -> bool: if super().is_disconnect(e, connection, cursor): return True - elif isinstance(e, self.dbapi.Error): + elif isinstance(e, self.loaded_dbapi.Error): str_e = str(e).lower() return ( "already closed" in str_e or "connection was killed" in str_e @@ -127,7 +149,7 @@ def is_disconnect(self, e, connection, cursor): else: return False - def _extract_error_code(self, exception): + def _extract_error_code(self, exception: BaseException) -> Any: if isinstance(exception.args[0], Exception): exception = exception.args[0] return exception.args[0] diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py index 6d44bd38370..86f1b3c89ad 100644 --- a/lib/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py @@ -4,12 +4,10 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors r""" - .. dialect:: mysql+pyodbc :name: PyODBC :dbapi: pyodbc @@ -44,8 +42,16 @@ connection_uri = "mysql+pyodbc:///?odbc_connect=%s" % params """ # noqa +from __future__ import annotations +import datetime import re +from typing import Any +from typing import Callable +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union from .base import MySQLDialect from .base import MySQLExecutionContext @@ -55,23 +61,31 @@ from ...connectors.pyodbc import PyODBCConnector from ...sql.sqltypes import Time +if TYPE_CHECKING: + from ...engine import Connection + from ...engine.interfaces import DBAPIConnection + from ...engine.interfaces import Dialect + from ...sql.type_api import _ResultProcessorType + class _pyodbcTIME(TIME): - def result_processor(self, dialect, coltype): - def process(value): + def result_processor( + self, dialect: Dialect, coltype: object + ) -> _ResultProcessorType[datetime.time]: + def process(value: Any) -> Union[datetime.time, None]: # pyodbc returns a datetime.time object; no need to convert - return value + return value # type: ignore[no-any-return] return process class MySQLExecutionContext_pyodbc(MySQLExecutionContext): - def get_lastrowid(self): + def get_lastrowid(self) -> int: cursor = self.create_cursor() cursor.execute("SELECT LAST_INSERT_ID()") - lastrowid = cursor.fetchone()[0] + lastrowid = cursor.fetchone()[0] # type: ignore[index] cursor.close() - return lastrowid + return lastrowid # type: ignore[no-any-return] class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect): @@ -82,7 +96,7 @@ class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect): pyodbc_driver_name = "MySQL" - def _detect_charset(self, connection): + def _detect_charset(self, connection: Connection) -> str: """Sniff out the character set in use for connection results.""" # Prefer 'character_set_results' for the current connection over the @@ -107,21 +121,25 @@ def _detect_charset(self, connection): ) return "latin1" - def _get_server_version_info(self, connection): + def _get_server_version_info( + self, connection: Connection + ) -> Tuple[int, ...]: return MySQLDialect._get_server_version_info(self, connection) - def _extract_error_code(self, exception): + def _extract_error_code(self, exception: BaseException) -> Optional[int]: m = re.compile(r"\((\d+)\)").search(str(exception.args)) - c = m.group(1) + if m is None: + return None + c: Optional[str] = m.group(1) if c: return int(c) else: return None - def on_connect(self): + def on_connect(self) -> Callable[[DBAPIConnection], None]: super_ = super().on_connect() - def on_connect(conn): + def on_connect(conn: DBAPIConnection) -> None: if super_ is not None: super_(conn) diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py index d62390bb845..71bd8c45494 100644 --- a/lib/sqlalchemy/dialects/mysql/reflection.py +++ b/lib/sqlalchemy/dialects/mysql/reflection.py @@ -4,43 +4,62 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors - +from __future__ import annotations import re +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union from .enumerated import ENUM from .enumerated import SET from .types import DATETIME from .types import TIME from .types import TIMESTAMP -from ... import log from ... import types as sqltypes from ... import util +from ...util.typing import Literal + +if TYPE_CHECKING: + from .base import MySQLDialect + from .base import MySQLIdentifierPreparer + from ...engine.interfaces import ReflectedColumn class ReflectedState: """Stores raw information about a SHOW CREATE TABLE statement.""" - def __init__(self): - self.columns = [] - self.table_options = {} - self.table_name = None - self.keys = [] - self.fk_constraints = [] - self.ck_constraints = [] + charset: Optional[str] + + def __init__(self) -> None: + self.columns: List[ReflectedColumn] = [] + self.table_options: Dict[str, str] = {} + self.table_name: Optional[str] = None + self.keys: List[Dict[str, Any]] = [] + self.fk_constraints: List[Dict[str, Any]] = [] + self.ck_constraints: List[Dict[str, Any]] = [] -@log.class_logger class MySQLTableDefinitionParser: """Parses the results of a SHOW CREATE TABLE statement.""" - def __init__(self, dialect, preparer): + def __init__( + self, dialect: MySQLDialect, preparer: MySQLIdentifierPreparer + ): self.dialect = dialect self.preparer = preparer self._prep_regexes() - def parse(self, show_create, charset): + def parse( + self, show_create: str, charset: Optional[str] + ) -> ReflectedState: state = ReflectedState() state.charset = charset for line in re.split(r"\r?\n", show_create): @@ -65,11 +84,11 @@ def parse(self, show_create, charset): if type_ is None: util.warn("Unknown schema content: %r" % line) elif type_ == "key": - state.keys.append(spec) + state.keys.append(spec) # type: ignore[arg-type] elif type_ == "fk_constraint": - state.fk_constraints.append(spec) + state.fk_constraints.append(spec) # type: ignore[arg-type] elif type_ == "ck_constraint": - state.ck_constraints.append(spec) + state.ck_constraints.append(spec) # type: ignore[arg-type] else: pass return state @@ -77,7 +96,13 @@ def parse(self, show_create, charset): def _check_view(self, sql: str) -> bool: return bool(self._re_is_view.match(sql)) - def _parse_constraints(self, line): + def _parse_constraints(self, line: str) -> Union[ + Tuple[None, str], + Tuple[Literal["partition"], str], + Tuple[ + Literal["ck_constraint", "fk_constraint", "key"], Dict[str, str] + ], + ]: """Parse a KEY or CONSTRAINT line. :param line: A line of SHOW CREATE TABLE output @@ -127,7 +152,7 @@ def _parse_constraints(self, line): # No match. return (None, line) - def _parse_table_name(self, line, state): + def _parse_table_name(self, line: str, state: ReflectedState) -> None: """Extract the table name. :param line: The first line of SHOW CREATE TABLE @@ -138,7 +163,7 @@ def _parse_table_name(self, line, state): if m: state.table_name = cleanup(m.group("name")) - def _parse_table_options(self, line, state): + def _parse_table_options(self, line: str, state: ReflectedState) -> None: """Build a dictionary of all reflected table-level options. :param line: The final line of SHOW CREATE TABLE output. @@ -164,7 +189,9 @@ def _parse_table_options(self, line, state): for opt, val in options.items(): state.table_options["%s_%s" % (self.dialect.name, opt)] = val - def _parse_partition_options(self, line, state): + def _parse_partition_options( + self, line: str, state: ReflectedState + ) -> None: options = {} new_line = line[:] @@ -220,7 +247,7 @@ def _parse_partition_options(self, line, state): else: state.table_options["%s_%s" % (self.dialect.name, opt)] = val - def _parse_column(self, line, state): + def _parse_column(self, line: str, state: ReflectedState) -> None: """Extract column details. Falls back to a 'minimal support' variant if full parse fails. @@ -283,7 +310,7 @@ def _parse_column(self, line, state): type_instance = col_type(*type_args, **type_kw) - col_kw = {} + col_kw: Dict[str, Any] = {} # NOT NULL col_kw["nullable"] = True @@ -324,9 +351,13 @@ def _parse_column(self, line, state): name=name, type=type_instance, default=default, comment=comment ) col_d.update(col_kw) - state.columns.append(col_d) + state.columns.append(col_d) # type: ignore[arg-type] - def _describe_to_create(self, table_name, columns): + def _describe_to_create( + self, + table_name: str, + columns: Sequence[Tuple[str, str, str, str, str, str]], + ) -> str: """Re-format DESCRIBE output as a SHOW CREATE TABLE string. DESCRIBE is a much simpler reflection and is sufficient for @@ -379,7 +410,9 @@ def _describe_to_create(self, table_name, columns): ] ) - def _parse_keyexprs(self, identifiers): + def _parse_keyexprs( + self, identifiers: str + ) -> List[Tuple[str, Optional[int], str]]: """Unpack '"col"(2),"col" ASC'-ish strings into components.""" return [ @@ -389,11 +422,12 @@ def _parse_keyexprs(self, identifiers): ) ] - def _prep_regexes(self): + def _prep_regexes(self) -> None: """Pre-compile regular expressions.""" - self._re_columns = [] - self._pr_options = [] + self._pr_options: List[ + Tuple[re.Pattern[Any], Optional[Callable[[str], str]]] + ] = [] _final = self.preparer.final_quote @@ -582,21 +616,21 @@ def _prep_regexes(self): _optional_equals = r"(?:\s*(?:=\s*)|\s+)" - def _add_option_string(self, directive): + def _add_option_string(self, directive: str) -> None: regex = r"(?P%s)%s" r"'(?P(?:[^']|'')*?)'(?!')" % ( re.escape(directive), self._optional_equals, ) self._pr_options.append(_pr_compile(regex, cleanup_text)) - def _add_option_word(self, directive): + def _add_option_word(self, directive: str) -> None: regex = r"(?P%s)%s" r"(?P\w+)" % ( re.escape(directive), self._optional_equals, ) self._pr_options.append(_pr_compile(regex)) - def _add_partition_option_word(self, directive): + def _add_partition_option_word(self, directive: str) -> None: if directive == "PARTITION BY" or directive == "SUBPARTITION BY": regex = r"(?%s)%s" r"(?P\w+.*)" % ( re.escape(directive), @@ -611,7 +645,7 @@ def _add_partition_option_word(self, directive): regex = r"(?%s)(?!\S)" % (re.escape(directive),) self._pr_options.append(_pr_compile(regex)) - def _add_option_regex(self, directive, regex): + def _add_option_regex(self, directive: str, regex: str) -> None: regex = r"(?P%s)%s" r"(?P%s)" % ( re.escape(directive), self._optional_equals, @@ -629,21 +663,35 @@ def _add_option_regex(self, directive, regex): ) -def _pr_compile(regex, cleanup=None): +@overload +def _pr_compile( + regex: str, cleanup: Callable[[str], str] +) -> Tuple[re.Pattern[Any], Callable[[str], str]]: ... + + +@overload +def _pr_compile( + regex: str, cleanup: None = None +) -> Tuple[re.Pattern[Any], None]: ... + + +def _pr_compile( + regex: str, cleanup: Optional[Callable[[str], str]] = None +) -> Tuple[re.Pattern[Any], Optional[Callable[[str], str]]]: """Prepare a 2-tuple of compiled regex and callable.""" return (_re_compile(regex), cleanup) -def _re_compile(regex): +def _re_compile(regex: str) -> re.Pattern[Any]: """Compile a string to regex, I and UNICODE.""" return re.compile(regex, re.I | re.UNICODE) -def _strip_values(values): +def _strip_values(values: Sequence[str]) -> List[str]: "Strip reflected values quotes" - strip_values = [] + strip_values: List[str] = [] for a in values: if a[0:1] == '"' or a[0:1] == "'": # strip enclosing quotes and unquote interior @@ -655,7 +703,9 @@ def _strip_values(values): def cleanup_text(raw_text: str) -> str: if "\\" in raw_text: raw_text = re.sub( - _control_char_regexp, lambda s: _control_char_map[s[0]], raw_text + _control_char_regexp, + lambda s: _control_char_map[s[0]], # type: ignore[index] + raw_text, ) return raw_text.replace("''", "'") diff --git a/lib/sqlalchemy/dialects/mysql/reserved_words.py b/lib/sqlalchemy/dialects/mysql/reserved_words.py index 34fecf42724..ff526394a69 100644 --- a/lib/sqlalchemy/dialects/mysql/reserved_words.py +++ b/lib/sqlalchemy/dialects/mysql/reserved_words.py @@ -11,7 +11,6 @@ # https://mariadb.com/kb/en/reserved-words/ # includes: Reserved Words, Oracle Mode (separate set unioned) # excludes: Exceptions, Function Names -# mypy: ignore-errors RESERVED_WORDS_MARIADB = { "accessible", diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py index ace6824a740..455b0b6629e 100644 --- a/lib/sqlalchemy/dialects/mysql/types.py +++ b/lib/sqlalchemy/dialects/mysql/types.py @@ -4,15 +4,26 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -# mypy: ignore-errors - +from __future__ import annotations import datetime +import decimal +from typing import Any +from typing import Iterable +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union from ... import exc from ... import util from ...sql import sqltypes +if TYPE_CHECKING: + from .base import MySQLDialect + from ...engine.interfaces import Dialect + from ...sql.type_api import _BindProcessorType + from ...sql.type_api import _ResultProcessorType + class _NumericType: """Base for MySQL numeric types. @@ -22,19 +33,27 @@ class _NumericType: """ - def __init__(self, unsigned=False, zerofill=False, **kw): + def __init__( + self, unsigned: bool = False, zerofill: bool = False, **kw: Any + ): self.unsigned = unsigned self.zerofill = zerofill super().__init__(**kw) - def __repr__(self): + def __repr__(self) -> str: return util.generic_repr( self, to_inspect=[_NumericType, sqltypes.Numeric] ) -class _FloatType(_NumericType, sqltypes.Float): - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): +class _FloatType(_NumericType, sqltypes.Float[Union[decimal.Decimal, float]]): + def __init__( + self, + precision: Optional[int] = None, + scale: Optional[int] = None, + asdecimal: bool = True, + **kw: Any, + ): if isinstance(self, (REAL, DOUBLE)) and ( (precision is None and scale is not None) or (precision is not None and scale is None) @@ -46,18 +65,18 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): super().__init__(precision=precision, asdecimal=asdecimal, **kw) self.scale = scale - def __repr__(self): + def __repr__(self) -> str: return util.generic_repr( self, to_inspect=[_FloatType, _NumericType, sqltypes.Float] ) class _IntegerType(_NumericType, sqltypes.Integer): - def __init__(self, display_width=None, **kw): + def __init__(self, display_width: Optional[int] = None, **kw: Any): self.display_width = display_width super().__init__(**kw) - def __repr__(self): + def __repr__(self) -> str: return util.generic_repr( self, to_inspect=[_IntegerType, _NumericType, sqltypes.Integer] ) @@ -68,13 +87,13 @@ class _StringType(sqltypes.String): def __init__( self, - charset=None, - collation=None, - ascii=False, # noqa - binary=False, - unicode=False, - national=False, - **kw, + charset: Optional[str] = None, + collation: Optional[str] = None, + ascii: bool = False, # noqa + binary: bool = False, + unicode: bool = False, + national: bool = False, + **kw: Any, ): self.charset = charset @@ -87,25 +106,33 @@ def __init__( self.national = national super().__init__(**kw) - def __repr__(self): + def __repr__(self) -> str: return util.generic_repr( self, to_inspect=[_StringType, sqltypes.String] ) -class _MatchType(sqltypes.Float, sqltypes.MatchType): - def __init__(self, **kw): +class _MatchType( + sqltypes.Float[Union[decimal.Decimal, float]], sqltypes.MatchType +): + def __init__(self, **kw: Any): # TODO: float arguments? - sqltypes.Float.__init__(self) + sqltypes.Float.__init__(self) # type: ignore[arg-type] sqltypes.MatchType.__init__(self) -class NUMERIC(_NumericType, sqltypes.NUMERIC): +class NUMERIC(_NumericType, sqltypes.NUMERIC[Union[decimal.Decimal, float]]): """MySQL NUMERIC type.""" __visit_name__ = "NUMERIC" - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + def __init__( + self, + precision: Optional[int] = None, + scale: Optional[int] = None, + asdecimal: bool = True, + **kw: Any, + ): """Construct a NUMERIC. :param precision: Total digits in this number. If scale and precision @@ -126,12 +153,18 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): ) -class DECIMAL(_NumericType, sqltypes.DECIMAL): +class DECIMAL(_NumericType, sqltypes.DECIMAL[Union[decimal.Decimal, float]]): """MySQL DECIMAL type.""" __visit_name__ = "DECIMAL" - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + def __init__( + self, + precision: Optional[int] = None, + scale: Optional[int] = None, + asdecimal: bool = True, + **kw: Any, + ): """Construct a DECIMAL. :param precision: Total digits in this number. If scale and precision @@ -152,12 +185,18 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): ) -class DOUBLE(_FloatType, sqltypes.DOUBLE): +class DOUBLE(_FloatType, sqltypes.DOUBLE[Union[decimal.Decimal, float]]): """MySQL DOUBLE type.""" __visit_name__ = "DOUBLE" - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + def __init__( + self, + precision: Optional[int] = None, + scale: Optional[int] = None, + asdecimal: bool = True, + **kw: Any, + ): """Construct a DOUBLE. .. note:: @@ -186,12 +225,18 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): ) -class REAL(_FloatType, sqltypes.REAL): +class REAL(_FloatType, sqltypes.REAL[Union[decimal.Decimal, float]]): """MySQL REAL type.""" __visit_name__ = "REAL" - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): + def __init__( + self, + precision: Optional[int] = None, + scale: Optional[int] = None, + asdecimal: bool = True, + **kw: Any, + ): """Construct a REAL. .. note:: @@ -220,12 +265,18 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): ) -class FLOAT(_FloatType, sqltypes.FLOAT): +class FLOAT(_FloatType, sqltypes.FLOAT[Union[decimal.Decimal, float]]): """MySQL FLOAT type.""" __visit_name__ = "FLOAT" - def __init__(self, precision=None, scale=None, asdecimal=False, **kw): + def __init__( + self, + precision: Optional[int] = None, + scale: Optional[int] = None, + asdecimal: bool = False, + **kw: Any, + ): """Construct a FLOAT. :param precision: Total digits in this number. If scale and precision @@ -245,7 +296,9 @@ def __init__(self, precision=None, scale=None, asdecimal=False, **kw): precision=precision, scale=scale, asdecimal=asdecimal, **kw ) - def bind_processor(self, dialect): + def bind_processor( + self, dialect: Dialect + ) -> Optional[_BindProcessorType[Union[decimal.Decimal, float]]]: return None @@ -254,7 +307,7 @@ class INTEGER(_IntegerType, sqltypes.INTEGER): __visit_name__ = "INTEGER" - def __init__(self, display_width=None, **kw): + def __init__(self, display_width: Optional[int] = None, **kw: Any): """Construct an INTEGER. :param display_width: Optional, maximum display width for this number. @@ -275,7 +328,7 @@ class BIGINT(_IntegerType, sqltypes.BIGINT): __visit_name__ = "BIGINT" - def __init__(self, display_width=None, **kw): + def __init__(self, display_width: Optional[int] = None, **kw: Any): """Construct a BIGINTEGER. :param display_width: Optional, maximum display width for this number. @@ -296,7 +349,7 @@ class MEDIUMINT(_IntegerType): __visit_name__ = "MEDIUMINT" - def __init__(self, display_width=None, **kw): + def __init__(self, display_width: Optional[int] = None, **kw: Any): """Construct a MEDIUMINTEGER :param display_width: Optional, maximum display width for this number. @@ -317,7 +370,7 @@ class TINYINT(_IntegerType): __visit_name__ = "TINYINT" - def __init__(self, display_width=None, **kw): + def __init__(self, display_width: Optional[int] = None, **kw: Any): """Construct a TINYINT. :param display_width: Optional, maximum display width for this number. @@ -338,7 +391,7 @@ class SMALLINT(_IntegerType, sqltypes.SMALLINT): __visit_name__ = "SMALLINT" - def __init__(self, display_width=None, **kw): + def __init__(self, display_width: Optional[int] = None, **kw: Any): """Construct a SMALLINTEGER. :param display_width: Optional, maximum display width for this number. @@ -354,7 +407,7 @@ def __init__(self, display_width=None, **kw): super().__init__(display_width=display_width, **kw) -class BIT(sqltypes.TypeEngine): +class BIT(sqltypes.TypeEngine[Any]): """MySQL BIT type. This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater @@ -365,7 +418,7 @@ class BIT(sqltypes.TypeEngine): __visit_name__ = "BIT" - def __init__(self, length=None): + def __init__(self, length: Optional[int] = None): """Construct a BIT. :param length: Optional, number of bits. @@ -373,19 +426,19 @@ def __init__(self, length=None): """ self.length = length - def result_processor(self, dialect, coltype): + def result_processor( + self, dialect: MySQLDialect, coltype: object # type: ignore[override] + ) -> Optional[_ResultProcessorType[Any]]: """Convert a MySQL's 64 bit, variable length binary string to a long.""" if dialect.supports_native_bit: return None - def process(value): + def process(value: Optional[Iterable[int]]) -> Optional[int]: if value is not None: v = 0 for i in value: - if not isinstance(i, int): - i = ord(i) # convert byte to int on Python 2 v = v << 8 | i return v return value @@ -398,7 +451,7 @@ class TIME(sqltypes.TIME): __visit_name__ = "TIME" - def __init__(self, timezone=False, fsp=None): + def __init__(self, timezone: bool = False, fsp: Optional[int] = None): """Construct a MySQL TIME type. :param timezone: not used by the MySQL dialect. @@ -417,10 +470,12 @@ def __init__(self, timezone=False, fsp=None): super().__init__(timezone=timezone) self.fsp = fsp - def result_processor(self, dialect, coltype): + def result_processor( + self, dialect: Dialect, coltype: object + ) -> _ResultProcessorType[datetime.time]: time = datetime.time - def process(value): + def process(value: Any) -> Optional[datetime.time]: # convert from a timedelta value if value is not None: microseconds = value.microseconds @@ -443,7 +498,7 @@ class TIMESTAMP(sqltypes.TIMESTAMP): __visit_name__ = "TIMESTAMP" - def __init__(self, timezone=False, fsp=None): + def __init__(self, timezone: bool = False, fsp: Optional[int] = None): """Construct a MySQL TIMESTAMP type. :param timezone: not used by the MySQL dialect. @@ -468,7 +523,7 @@ class DATETIME(sqltypes.DATETIME): __visit_name__ = "DATETIME" - def __init__(self, timezone=False, fsp=None): + def __init__(self, timezone: bool = False, fsp: Optional[int] = None): """Construct a MySQL DATETIME type. :param timezone: not used by the MySQL dialect. @@ -488,12 +543,12 @@ def __init__(self, timezone=False, fsp=None): self.fsp = fsp -class YEAR(sqltypes.TypeEngine): +class YEAR(sqltypes.TypeEngine[Any]): """MySQL YEAR type, for single byte storage of years 1901-2155.""" __visit_name__ = "YEAR" - def __init__(self, display_width=None): + def __init__(self, display_width: Optional[int] = None): self.display_width = display_width @@ -502,7 +557,7 @@ class TEXT(_StringType, sqltypes.TEXT): __visit_name__ = "TEXT" - def __init__(self, length=None, **kw): + def __init__(self, length: Optional[int] = None, **kw: Any): """Construct a TEXT. :param length: Optional, if provided the server may optimize storage @@ -538,7 +593,7 @@ class TINYTEXT(_StringType): __visit_name__ = "TINYTEXT" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any): """Construct a TINYTEXT. :param charset: Optional, a column-level character set for this string @@ -571,7 +626,7 @@ class MEDIUMTEXT(_StringType): __visit_name__ = "MEDIUMTEXT" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any): """Construct a MEDIUMTEXT. :param charset: Optional, a column-level character set for this string @@ -603,7 +658,7 @@ class LONGTEXT(_StringType): __visit_name__ = "LONGTEXT" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any): """Construct a LONGTEXT. :param charset: Optional, a column-level character set for this string @@ -635,7 +690,7 @@ class VARCHAR(_StringType, sqltypes.VARCHAR): __visit_name__ = "VARCHAR" - def __init__(self, length=None, **kwargs): + def __init__(self, length: Optional[int] = None, **kwargs: Any) -> None: """Construct a VARCHAR. :param charset: Optional, a column-level character set for this string @@ -667,7 +722,7 @@ class CHAR(_StringType, sqltypes.CHAR): __visit_name__ = "CHAR" - def __init__(self, length=None, **kwargs): + def __init__(self, length: Optional[int] = None, **kwargs: Any): """Construct a CHAR. :param length: Maximum data length, in characters. @@ -683,7 +738,7 @@ def __init__(self, length=None, **kwargs): super().__init__(length=length, **kwargs) @classmethod - def _adapt_string_for_cast(cls, type_): + def _adapt_string_for_cast(cls, type_: sqltypes.String) -> sqltypes.CHAR: # copy the given string type into a CHAR # for the purposes of rendering a CAST expression type_ = sqltypes.to_instance(type_) @@ -712,7 +767,7 @@ class NVARCHAR(_StringType, sqltypes.NVARCHAR): __visit_name__ = "NVARCHAR" - def __init__(self, length=None, **kwargs): + def __init__(self, length: Optional[int] = None, **kwargs: Any): """Construct an NVARCHAR. :param length: Maximum data length, in characters. @@ -738,7 +793,7 @@ class NCHAR(_StringType, sqltypes.NCHAR): __visit_name__ = "NCHAR" - def __init__(self, length=None, **kwargs): + def __init__(self, length: Optional[int] = None, **kwargs: Any): """Construct an NCHAR. :param length: Maximum data length, in characters. diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 69c6dc1b623..15f758e4c2c 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -83,6 +83,7 @@ from .interfaces import _ParamStyle from .interfaces import ConnectArgsType from .interfaces import DBAPIConnection + from .interfaces import DBAPIModule from .interfaces import IsolationLevel from .row import Row from .url import URL @@ -428,7 +429,7 @@ def insert_executemany_returning_sort_by_parameter_order(self): delete_executemany_returning = False @util.memoized_property - def loaded_dbapi(self) -> ModuleType: + def loaded_dbapi(self) -> DBAPIModule: if self.dbapi is None: raise exc.InvalidRequestError( f"Dialect {self} does not have a Python DBAPI established " @@ -554,7 +555,7 @@ def initialize(self, connection: Connection) -> None: % (self.label_length, self.max_identifier_length) ) - def on_connect(self) -> Optional[Callable[[Any], Any]]: + def on_connect(self) -> Optional[Callable[[Any], None]]: # inherits the docstring from interfaces.Dialect.on_connect return None @@ -947,7 +948,7 @@ def do_execute_no_params(self, cursor, statement, context=None): def is_disconnect( self, - e: Exception, + e: DBAPIModule.Error, connection: Union[ pool.PoolProxiedConnection, interfaces.DBAPIConnection, None ], @@ -1052,7 +1053,7 @@ def denormalize_name(self, name): name = name_upper return name - def get_driver_connection(self, connection): + def get_driver_connection(self, connection: DBAPIConnection) -> Any: return connection def _overrides_default(self, method): diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 1823c97fc31..fd99afafd09 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -10,7 +10,6 @@ from __future__ import annotations from enum import Enum -from types import ModuleType from typing import Any from typing import Awaitable from typing import Callable @@ -34,7 +33,7 @@ from .. import util from ..event import EventTarget from ..pool import Pool -from ..pool import PoolProxiedConnection +from ..pool import PoolProxiedConnection as PoolProxiedConnection from ..sql.compiler import Compiled as Compiled from ..sql.compiler import Compiled # noqa from ..sql.compiler import TypeCompiler as TypeCompiler @@ -51,6 +50,7 @@ from .base import Engine from .cursor import CursorResult from .url import URL + from ..connectors.asyncio import AsyncIODBAPIConnection from ..event import _ListenerFnType from ..event import dispatcher from ..exc import StatementError @@ -70,6 +70,7 @@ from ..sql.sqltypes import Integer from ..sql.type_api import _TypeMemoDict from ..sql.type_api import TypeEngine + from ..util.langhelpers import generic_fn_descriptor ConnectArgsType = Tuple[Sequence[str], MutableMapping[str, Any]] @@ -106,6 +107,22 @@ class ExecuteStyle(Enum): """ +class DBAPIModule(Protocol): + class Error(Exception): + def __getattr__(self, key: str) -> Any: ... + + class OperationalError(Error): + pass + + class InterfaceError(Error): + pass + + class IntegrityError(Error): + pass + + def __getattr__(self, key: str) -> Any: ... + + class DBAPIConnection(Protocol): """protocol representing a :pep:`249` database connection. @@ -126,7 +143,9 @@ def cursor(self, *args: Any, **kwargs: Any) -> DBAPICursor: ... def rollback(self) -> None: ... - autocommit: bool + def __getattr__(self, key: str) -> Any: ... + + def __setattr__(self, key: str, value: Any) -> None: ... class DBAPIType(Protocol): @@ -658,7 +677,7 @@ class Dialect(EventTarget): dialect_description: str - dbapi: Optional[ModuleType] + dbapi: Optional[DBAPIModule] """A reference to the DBAPI module object itself. SQLAlchemy dialects import DBAPI modules using the classmethod @@ -682,7 +701,7 @@ class Dialect(EventTarget): """ @util.non_memoized_property - def loaded_dbapi(self) -> ModuleType: + def loaded_dbapi(self) -> DBAPIModule: """same as .dbapi, but is never None; will raise an error if no DBAPI was set up. @@ -786,7 +805,7 @@ def loaded_dbapi(self) -> ModuleType: """The maximum length of constraint names if different from ``max_identifier_length``.""" - supports_server_side_cursors: bool + supports_server_side_cursors: Union[generic_fn_descriptor[bool], bool] """indicates if the dialect supports server side cursors""" server_side_cursors: bool @@ -1239,7 +1258,7 @@ def create_connect_args(self, url): raise NotImplementedError() @classmethod - def import_dbapi(cls) -> ModuleType: + def import_dbapi(cls) -> DBAPIModule: """Import the DBAPI module that is used by this dialect. The Python module object returned here will be assigned as an @@ -2209,7 +2228,7 @@ def do_execute_no_params( def is_disconnect( self, - e: Exception, + e: DBAPIModule.Error, connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]], cursor: Optional[DBAPICursor], ) -> bool: @@ -2313,7 +2332,7 @@ def do_on_connect(connection): """ return self.on_connect() - def on_connect(self) -> Optional[Callable[[Any], Any]]: + def on_connect(self) -> Optional[Callable[[Any], None]]: """return a callable which sets up a newly created DBAPI connection. The callable should accept a single argument "conn" which is the @@ -3370,7 +3389,7 @@ class AdaptedConnection: __slots__ = ("_connection",) - _connection: Any + _connection: AsyncIODBAPIConnection @property def driver_connection(self) -> Any: diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index b070bff197d..fe78506df9e 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -1079,6 +1079,8 @@ def cursor(self, *args: Any, **kwargs: Any) -> DBAPICursor: ... def rollback(self) -> None: ... + def __getattr__(self, key: str) -> Any: ... + @property def is_valid(self) -> bool: """Return True if this :class:`.PoolProxiedConnection` still refers diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index f171256d4a1..d782b019624 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -6862,7 +6862,7 @@ def _prepared_index_name( else: schema_name = None - index_name = self.preparer.format_index(index) + index_name: str = self.preparer.format_index(index) if schema_name: index_name = schema_name + "." + index_name diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 69287d6f215..3b2463a634b 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -432,6 +432,8 @@ class _CreateDropBase(ExecutableDDLElement, Generic[_SI]): """ + element: _SI + def __init__(self, element: _SI) -> None: self.element = self.target = element self._ddl_if = getattr(element, "_ddl_if", None) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 88cb2529d88..37fea947afa 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -80,6 +80,7 @@ from ..util.typing import ParamSpec from ..util.typing import Self + if typing.TYPE_CHECKING: from ._typing import _ByArgument from ._typing import _ColumnExpressionArgument @@ -117,6 +118,7 @@ from ..engine.interfaces import SchemaTranslateMapType from ..engine.result import Result + _NUMERIC = Union[float, Decimal] _NUMBER = Union[float, int, Decimal] @@ -2124,8 +2126,8 @@ def _negate_in_binary(self, negated_op, original_op): else: return self - def _with_binary_element_type(self, type_): - c = ClauseElement._clone(self) + def _with_binary_element_type(self, type_: TypeEngine[Any]) -> Self: + c: Self = ClauseElement._clone(self) c.type = type_ return c diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index cd63e82339e..02ed4fa6bbb 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -789,7 +789,7 @@ def __init__( self.type = sqltypes.BOOLEANTYPE self.negate = None self._is_implicitly_boolean = True - self.modifiers = {} + self.modifiers = util.immutabledict({}) @property def left_expr(self) -> ColumnElement[Any]: diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 5e68da263a2..63d7c495683 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -611,7 +611,10 @@ class Float(Numeric[_N]): __visit_name__ = "float" - scale = None + if not TYPE_CHECKING: + # this is not in 2.1 branch, not clear if needed for 2.0 + # implementation + scale = None @overload def __init__( diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 6e2dfef6659..df3118b6d56 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -12,7 +12,6 @@ from __future__ import annotations from enum import Enum -from types import ModuleType import typing from typing import Any from typing import Callable @@ -58,6 +57,7 @@ from .sqltypes import NUMERICTYPE as NUMERICTYPE # noqa: F401 from .sqltypes import STRINGTYPE as STRINGTYPE # noqa: F401 from .sqltypes import TABLEVALUE as TABLEVALUE # noqa: F401 + from ..engine.interfaces import DBAPIModule from ..engine.interfaces import Dialect from ..util.typing import GenericProtocol @@ -614,7 +614,7 @@ def compare_values(self, x: Any, y: Any) -> bool: return x == y # type: ignore[no-any-return] - def get_dbapi_type(self, dbapi: ModuleType) -> Optional[Any]: + def get_dbapi_type(self, dbapi: DBAPIModule) -> Optional[Any]: """Return the corresponding type object from the underlying DB-API, if any. @@ -2270,7 +2270,7 @@ def copy(self, **kw: Any) -> Self: instance.__dict__.update(self.__dict__) return instance - def get_dbapi_type(self, dbapi: ModuleType) -> Optional[Any]: + def get_dbapi_type(self, dbapi: DBAPIModule) -> Optional[Any]: """Return the DBAPI type object represented by this :class:`.TypeDecorator`. From 4c6c1a6e87fe18c7800ecfa042af49da24fac114 Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Tue, 20 May 2025 10:26:14 -0400 Subject: [PATCH 531/544] Use pg_index's indnatts when indnkeyatts is not available Using NULL when this column is not available does not work with old PostgreSQL (tested on version 9.6, as reported in #12600). Instead, use `indnatts` which should be equal to what `indnkeyatts` would be as there is no "included attributes" in the index on these old versions (but only "key columns"). From https://www.postgresql.org/docs/17/catalog-pg-index.html: * `indnatts`, "The total number of columns in the index [...]; this number includes both key and included attributes" * `indnkeyatts`, "The number of key columns in the index, not counting any included columns [...]" Fixes #12600. Closes: #12611 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12611 Pull-request-sha: 8ff48a6225ec58fdfa84aec75d487238281b1ac1 Change-Id: Idcadcd7db545bc1f73d85b29347c8ba388b1b41d (cherry picked from commit 9071811de76dea558f932215870e4a5513b30362) --- doc/build/changelog/unreleased_20/12600.rst | 7 +++++++ lib/sqlalchemy/dialects/postgresql/base.py | 14 ++++---------- 2 files changed, 11 insertions(+), 10 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12600.rst diff --git a/doc/build/changelog/unreleased_20/12600.rst b/doc/build/changelog/unreleased_20/12600.rst new file mode 100644 index 00000000000..d544a225d3a --- /dev/null +++ b/doc/build/changelog/unreleased_20/12600.rst @@ -0,0 +1,7 @@ +.. change:: + :tags: bug, postgresql, reflection + :tickets: 12600 + + Fixed regression caused by :ticket:`10665` where the newly modified + constraint reflection query would fail on older versions of PostgreSQL + such as version 9.6. Pull request courtesy Denis Laxalde. diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 9d4257cf0e4..69b78bb138a 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -4104,7 +4104,7 @@ def _constraint_query(self): if self.server_version_info >= (11, 0): indnkeyatts = pg_catalog.pg_index.c.indnkeyatts else: - indnkeyatts = sql.null().label("indnkeyatts") + indnkeyatts = pg_catalog.pg_index.c.indnatts.label("indnkeyatts") if self.server_version_info >= (15,): indnullsnotdistinct = pg_catalog.pg_index.c.indnullsnotdistinct @@ -4224,10 +4224,7 @@ def _reflect_constraint( # See note in get_multi_indexes all_cols = row["cols"] indnkeyatts = row["indnkeyatts"] - if ( - indnkeyatts is not None - and len(all_cols) > indnkeyatts - ): + if len(all_cols) > indnkeyatts: inc_cols = all_cols[indnkeyatts:] cst_cols = all_cols[:indnkeyatts] else: @@ -4579,7 +4576,7 @@ def _index_query(self): if self.server_version_info >= (11, 0): indnkeyatts = pg_catalog.pg_index.c.indnkeyatts else: - indnkeyatts = sql.null().label("indnkeyatts") + indnkeyatts = pg_catalog.pg_index.c.indnatts.label("indnkeyatts") if self.server_version_info >= (15,): nulls_not_distinct = pg_catalog.pg_index.c.indnullsnotdistinct @@ -4689,10 +4686,7 @@ def get_multi_indexes( # "The number of key columns in the index, not counting any # included columns, which are merely stored and do not # participate in the index semantics" - if ( - indnkeyatts is not None - and len(all_elements) > indnkeyatts - ): + if len(all_elements) > indnkeyatts: # this is a "covering index" which has INCLUDE columns # as well as regular index columns inc_cols = all_elements[indnkeyatts:] From 4c25e3d0a294460eaab8e7aecdf178f93356dbd9 Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Wed, 21 May 2025 03:23:12 -0400 Subject: [PATCH 532/544] Add missing requires in the tests for older postgresql version Follow up commit 39bb17442ce6ac9a3dde5e2b72376b77ffce5e28. Closes: #12612 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12612 Pull-request-sha: 894276ff232ba328cc235ecf04e84067db204c3d Change-Id: Ib8d47f11e34d6bb40d9a88d5f411c2d5fee70823 (cherry picked from commit 6154aa1b50391aa2a0e69303d8a3b5c2a17dc67a) --- test/dialect/postgresql/test_query.py | 2 +- test/dialect/postgresql/test_reflection.py | 3 +++ test/dialect/postgresql/test_types.py | 6 +++++- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py index c55cd0a5d7c..fc68e08ed4d 100644 --- a/test/dialect/postgresql/test_query.py +++ b/test/dialect/postgresql/test_query.py @@ -1007,7 +1007,7 @@ def test_expression_positional(self, connection): (func.to_tsquery,), (func.plainto_tsquery,), (func.phraseto_tsquery,), - (func.websearch_to_tsquery,), + (func.websearch_to_tsquery, testing.skip_if("postgresql < 11")), argnames="to_ts_func", ) @testing.variation("use_regconfig", [True, False, "literal"]) diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index ebe751b5b34..f8030691744 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -910,6 +910,9 @@ def test_reflected_primary_key_order(self, metadata, connection): subject = Table("subject", meta2, autoload_with=connection) eq_(subject.primary_key.columns.keys(), ["p2", "p1"]) + @testing.skip_if( + "postgresql < 15.0", "on delete with column list not supported" + ) def test_reflected_foreign_key_ondelete_column_list( self, metadata, connection ): diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 795a897699b..0df48f6fd12 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -3548,7 +3548,11 @@ def test_reflection(self, special_types_table, connection): (postgresql.INET, "127.0.0.1"), (postgresql.CIDR, "192.168.100.128/25"), (postgresql.MACADDR, "08:00:2b:01:02:03"), - (postgresql.MACADDR8, "08:00:2b:01:02:03:04:05"), + ( + postgresql.MACADDR8, + "08:00:2b:01:02:03:04:05", + testing.skip_if("postgresql < 10"), + ), argnames="column_type, value", id_="na", ) From fee8c0c0840dc2ec5da0fc39ad4efc504a9122b8 Mon Sep 17 00:00:00 2001 From: krave1986 Date: Sat, 24 May 2025 04:23:00 +0800 Subject: [PATCH 533/544] docs: Clarify that relationship() first parameter is positional (#12621) (cherry picked from commit 18ee6a762ce2ab00671bcce60d6baf1b31291e71) --- doc/build/orm/basic_relationships.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/basic_relationships.rst b/doc/build/orm/basic_relationships.rst index a1bdb0525c3..b4a3ed2b5f5 100644 --- a/doc/build/orm/basic_relationships.rst +++ b/doc/build/orm/basic_relationships.rst @@ -1018,7 +1018,7 @@ within any of these string expressions:: In an example like the above, the string passed to :class:`_orm.Mapped` can be disambiguated from a specific class argument by passing the class -location string directly to :paramref:`_orm.relationship.argument` as well. +location string directly to the first positional parameter (:paramref:`_orm.relationship.argument`) as well. Below illustrates a typing-only import for ``Child``, combined with a runtime specifier for the target class that will search for the correct name within the :class:`_orm.registry`:: From 2eafc001ec6239847b4edde04451044242170ff7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois-Michel=20L=27Heureux?= Date: Fri, 23 May 2025 16:23:53 -0400 Subject: [PATCH 534/544] Doc: Update connection / reconnecting_engine (#12617) (cherry picked from commit 4cac1c6002f805879188c21fb4c75b7406d743f3) --- doc/build/faq/connections.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/build/faq/connections.rst b/doc/build/faq/connections.rst index 1f3bf1ba140..3177d7ea926 100644 --- a/doc/build/faq/connections.rst +++ b/doc/build/faq/connections.rst @@ -258,7 +258,9 @@ statement executions:: fn(cursor_obj, statement, context=context, *arg) except engine.dialect.dbapi.Error as raw_dbapi_err: connection = context.root_connection - if engine.dialect.is_disconnect(raw_dbapi_err, connection, cursor_obj): + if engine.dialect.is_disconnect( + raw_dbapi_err, connection.connection.dbapi_connection, cursor_obj + ): engine.logger.error( "disconnection error, attempt %d/%d", retry + 1, From 482e34537f837f8c2bae9c6eef908235860fff18 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Tue, 20 May 2025 22:15:06 +0200 Subject: [PATCH 535/544] update black to 25.1.0 to align it with alembic Change-Id: I2ac332237f18bbc44155eadee35c64f62adc2867 (cherry picked from commit 76e49d06ceed2121023defe2ff727acea0f1a0e5) --- .pre-commit-config.yaml | 6 +++--- examples/dogpile_caching/helloworld.py | 4 +--- examples/dynamic_dict/__init__.py | 2 +- examples/nested_sets/__init__.py | 2 +- lib/sqlalchemy/engine/base.py | 4 +--- lib/sqlalchemy/engine/strategies.py | 5 +---- lib/sqlalchemy/event/api.py | 4 +--- lib/sqlalchemy/ext/asyncio/base.py | 2 +- lib/sqlalchemy/ext/asyncio/engine.py | 2 +- lib/sqlalchemy/ext/mypy/names.py | 2 +- lib/sqlalchemy/orm/base.py | 8 +++----- lib/sqlalchemy/orm/context.py | 4 ++-- lib/sqlalchemy/orm/decl_base.py | 1 + lib/sqlalchemy/orm/dependency.py | 4 +--- lib/sqlalchemy/orm/events.py | 6 ++---- lib/sqlalchemy/orm/path_registry.py | 4 +--- lib/sqlalchemy/orm/state_changes.py | 4 +--- lib/sqlalchemy/orm/strategies.py | 2 +- lib/sqlalchemy/pool/base.py | 4 +--- lib/sqlalchemy/pool/impl.py | 4 +--- lib/sqlalchemy/schema.py | 4 +--- lib/sqlalchemy/sql/_typing.py | 4 ++-- lib/sqlalchemy/sql/base.py | 6 ++---- lib/sqlalchemy/sql/default_comparator.py | 3 +-- lib/sqlalchemy/sql/expression.py | 5 +---- lib/sqlalchemy/sql/naming.py | 5 +---- lib/sqlalchemy/sql/type_api.py | 4 +--- lib/sqlalchemy/sql/util.py | 4 +--- lib/sqlalchemy/sql/visitors.py | 5 +---- lib/sqlalchemy/types.py | 4 +--- test/ext/test_horizontal_shard.py | 2 +- test/ext/test_orderinglist.py | 2 +- test/orm/inheritance/test_assorted_poly.py | 2 +- test/typing/plain_files/orm/relationship.py | 4 +--- test/typing/plain_files/orm/trad_relationship_uselist.py | 5 +---- tox.ini | 4 ++-- 36 files changed, 45 insertions(+), 92 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 35e10ee29d2..c7d225e1ae0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/python/black - rev: 24.10.0 + rev: 25.1.0 hooks: - id: black @@ -12,7 +12,7 @@ repos: - id: zimports - repo: https://github.com/pycqa/flake8 - rev: 6.1.0 + rev: 7.2.0 hooks: - id: flake8 additional_dependencies: @@ -37,4 +37,4 @@ repos: types: [rst] exclude: README.* additional_dependencies: - - black==24.10.0 + - black==25.1.0 diff --git a/examples/dogpile_caching/helloworld.py b/examples/dogpile_caching/helloworld.py index 01934c59fab..df1c2a318ef 100644 --- a/examples/dogpile_caching/helloworld.py +++ b/examples/dogpile_caching/helloworld.py @@ -1,6 +1,4 @@ -"""Illustrate how to load some data, and cache the results. - -""" +"""Illustrate how to load some data, and cache the results.""" from sqlalchemy import select from .caching_query import FromCache diff --git a/examples/dynamic_dict/__init__.py b/examples/dynamic_dict/__init__.py index ed31df062fb..c1d52d3c430 100644 --- a/examples/dynamic_dict/__init__.py +++ b/examples/dynamic_dict/__init__.py @@ -1,4 +1,4 @@ -""" Illustrates how to place a dictionary-like facade on top of a +"""Illustrates how to place a dictionary-like facade on top of a "dynamic" relation, so that dictionary operations (assuming simple string keys) can operate upon a large collection without loading the full collection at once. diff --git a/examples/nested_sets/__init__.py b/examples/nested_sets/__init__.py index 5fdfbcedc08..cacab411b9a 100644 --- a/examples/nested_sets/__init__.py +++ b/examples/nested_sets/__init__.py @@ -1,4 +1,4 @@ -""" Illustrates a rudimentary way to implement the "nested sets" +"""Illustrates a rudimentary way to implement the "nested sets" pattern for hierarchical data using the SQLAlchemy ORM. .. autosource:: diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 4292ed6d100..ad0e4b62435 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -4,9 +4,7 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Defines :class:`_engine.Connection` and :class:`_engine.Engine`. - -""" +"""Defines :class:`_engine.Connection` and :class:`_engine.Engine`.""" from __future__ import annotations import contextlib diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index 5dd7bca9a49..b4b8077ba05 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -5,10 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Deprecated mock engine strategy used by Alembic. - - -""" +"""Deprecated mock engine strategy used by Alembic.""" from __future__ import annotations diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py index b6ec8f6d32b..01dd4bdd1bf 100644 --- a/lib/sqlalchemy/event/api.py +++ b/lib/sqlalchemy/event/api.py @@ -5,9 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Public API functions for the event system. - -""" +"""Public API functions for the event system.""" from __future__ import annotations from typing import Any diff --git a/lib/sqlalchemy/ext/asyncio/base.py b/lib/sqlalchemy/ext/asyncio/base.py index ce2c439f160..72a617f4e22 100644 --- a/lib/sqlalchemy/ext/asyncio/base.py +++ b/lib/sqlalchemy/ext/asyncio/base.py @@ -215,7 +215,7 @@ async def __aexit__( def asyncstartablecontext( - func: Callable[..., AsyncIterator[_T_co]] + func: Callable[..., AsyncIterator[_T_co]], ) -> Callable[..., GeneratorStartableContext[_T_co]]: """@asyncstartablecontext decorator. diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 65c019954c2..d4ecbdac986 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -1431,7 +1431,7 @@ def _get_sync_engine_or_connection( def _get_sync_engine_or_connection( - async_engine: Union[AsyncEngine, AsyncConnection] + async_engine: Union[AsyncEngine, AsyncConnection], ) -> Union[Engine, Connection]: if isinstance(async_engine, AsyncConnection): return async_engine._proxied diff --git a/lib/sqlalchemy/ext/mypy/names.py b/lib/sqlalchemy/ext/mypy/names.py index 319786288fd..1eaef775953 100644 --- a/lib/sqlalchemy/ext/mypy/names.py +++ b/lib/sqlalchemy/ext/mypy/names.py @@ -297,7 +297,7 @@ def type_id_for_callee(callee: Expression) -> Optional[int]: def type_id_for_named_node( - node: Union[NameExpr, MemberExpr, SymbolNode] + node: Union[NameExpr, MemberExpr, SymbolNode], ) -> Optional[int]: type_id, fullnames = _lookup.get(node.name, (None, None)) diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index ae0ba1029d1..b9f8d32be96 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -5,9 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Constants and rudimental functions used throughout the ORM. - -""" +"""Constants and rudimental functions used throughout the ORM.""" from __future__ import annotations @@ -435,7 +433,7 @@ def _inspect_mapped_object(instance: _T) -> Optional[InstanceState[_T]]: def _class_to_mapper( - class_or_mapper: Union[Mapper[_T], Type[_T]] + class_or_mapper: Union[Mapper[_T], Type[_T]], ) -> Mapper[_T]: # can't get mypy to see an overload for this insp = inspection.inspect(class_or_mapper, False) @@ -447,7 +445,7 @@ def _class_to_mapper( def _mapper_or_none( - entity: Union[Type[_T], _InternalEntityType[_T]] + entity: Union[Type[_T], _InternalEntityType[_T]], ) -> Optional[Mapper[_T]]: """Return the :class:`_orm.Mapper` for the given class or None if the class is not mapped. diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index d5ed61de53f..30b05948a51 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -2525,7 +2525,7 @@ def _column_descriptions( def _legacy_filter_by_entity_zero( - query_or_augmented_select: Union[Query[Any], Select[Any]] + query_or_augmented_select: Union[Query[Any], Select[Any]], ) -> Optional[_InternalEntityType[Any]]: self = query_or_augmented_select if self._setup_joins: @@ -2540,7 +2540,7 @@ def _legacy_filter_by_entity_zero( def _entity_from_pre_ent_zero( - query_or_augmented_select: Union[Query[Any], Select[Any]] + query_or_augmented_select: Union[Query[Any], Select[Any]], ) -> Optional[_InternalEntityType[Any]]: self = query_or_augmented_select if not self._raw_columns: diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index 1176b504186..f09f4369f1b 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -103,6 +103,7 @@ def __call__(self, **kw: Any) -> _O: ... class _DeclMappedClassProtocol(MappedClassProtocol[_O], Protocol): "Internal more detailed version of ``MappedClassProtocol``." + metadata: MetaData __tablename__: str __mapper_args__: _MapperKwArgs diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py index b055240a353..a8cafdd0b7a 100644 --- a/lib/sqlalchemy/orm/dependency.py +++ b/lib/sqlalchemy/orm/dependency.py @@ -7,9 +7,7 @@ # mypy: ignore-errors -"""Relationship dependencies. - -""" +"""Relationship dependencies.""" from __future__ import annotations diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index f161760e6da..5af78fc6b76 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -5,9 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""ORM event interfaces. - -""" +"""ORM event interfaces.""" from __future__ import annotations from typing import Any @@ -1602,7 +1600,7 @@ def my_before_commit(session): _dispatch_target = Session def _lifecycle_event( # type: ignore [misc] - fn: Callable[[SessionEvents, Session, Any], None] + fn: Callable[[SessionEvents, Session, Any], None], ) -> Callable[[SessionEvents, Session, Any], None]: _sessionevents_lifecycle_event_names.add(fn.__name__) return fn diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py index 388e46098d6..bb03e53d2b1 100644 --- a/lib/sqlalchemy/orm/path_registry.py +++ b/lib/sqlalchemy/orm/path_registry.py @@ -4,9 +4,7 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Path tracking utilities, representing mapper graph traversals. - -""" +"""Path tracking utilities, representing mapper graph traversals.""" from __future__ import annotations diff --git a/lib/sqlalchemy/orm/state_changes.py b/lib/sqlalchemy/orm/state_changes.py index 10e417e85d1..a79874e1c7a 100644 --- a/lib/sqlalchemy/orm/state_changes.py +++ b/lib/sqlalchemy/orm/state_changes.py @@ -5,9 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""State tracking utilities used by :class:`_orm.Session`. - -""" +"""State tracking utilities used by :class:`_orm.Session`.""" from __future__ import annotations diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index d9eaa2b388e..8ac34e2943b 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -8,7 +8,7 @@ """sqlalchemy.orm.interfaces.LoaderStrategy - implementations, and related MapperOptions.""" +implementations, and related MapperOptions.""" from __future__ import annotations diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index fe78506df9e..ed4d7c115ab 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -6,9 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Base constructs for connection pools. - -""" +"""Base constructs for connection pools.""" from __future__ import annotations diff --git a/lib/sqlalchemy/pool/impl.py b/lib/sqlalchemy/pool/impl.py index f2b951d8e8d..cc597e1e62e 100644 --- a/lib/sqlalchemy/pool/impl.py +++ b/lib/sqlalchemy/pool/impl.py @@ -6,9 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Pool implementation classes. - -""" +"""Pool implementation classes.""" from __future__ import annotations import threading diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index 16f7ec37b3c..56b90ec99e8 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -5,9 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Compatibility namespace for sqlalchemy.sql.schema and related. - -""" +"""Compatibility namespace for sqlalchemy.sql.schema and related.""" from __future__ import annotations diff --git a/lib/sqlalchemy/sql/_typing.py b/lib/sqlalchemy/sql/_typing.py index 8fe86f63748..8e3c66e553f 100644 --- a/lib/sqlalchemy/sql/_typing.py +++ b/lib/sqlalchemy/sql/_typing.py @@ -337,11 +337,11 @@ def is_table_value_type( def is_selectable(t: Any) -> TypeGuard[Selectable]: ... def is_select_base( - t: Union[Executable, ReturnsRows] + t: Union[Executable, ReturnsRows], ) -> TypeGuard[SelectBase]: ... def is_select_statement( - t: Union[Executable, ReturnsRows] + t: Union[Executable, ReturnsRows], ) -> TypeGuard[Select[Any]]: ... def is_table(t: FromClause) -> TypeGuard[TableClause]: ... diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 102fddd9447..e27296b5332 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -6,9 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: allow-untyped-defs, allow-untyped-calls -"""Foundational utilities common to many sql modules. - -""" +"""Foundational utilities common to many sql modules.""" from __future__ import annotations @@ -2154,7 +2152,7 @@ def __hash__(self): # type: ignore[override] def _entity_namespace( - entity: Union[_HasEntityNamespace, ExternallyTraversible] + entity: Union[_HasEntityNamespace, ExternallyTraversible], ) -> _EntityNamespace: """Return the nearest .entity_namespace for the given entity. diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py index 7fa5dafe9ce..62c1be452e1 100644 --- a/lib/sqlalchemy/sql/default_comparator.py +++ b/lib/sqlalchemy/sql/default_comparator.py @@ -5,8 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Default implementation of SQL comparison operations. -""" +"""Default implementation of SQL comparison operations.""" from __future__ import annotations diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index f8ac3a9ecad..dc7dee13b12 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -5,10 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Defines the public namespace for SQL expression constructs. - - -""" +"""Defines the public namespace for SQL expression constructs.""" from __future__ import annotations diff --git a/lib/sqlalchemy/sql/naming.py b/lib/sqlalchemy/sql/naming.py index 58203e4b9a1..ce68acf15b9 100644 --- a/lib/sqlalchemy/sql/naming.py +++ b/lib/sqlalchemy/sql/naming.py @@ -6,10 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: allow-untyped-defs, allow-untyped-calls -"""Establish constraint and index naming conventions. - - -""" +"""Establish constraint and index naming conventions.""" from __future__ import annotations diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index df3118b6d56..1e08ece5357 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -5,9 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Base types API. - -""" +"""Base types API.""" from __future__ import annotations diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 06ca1532887..9fc4e65d9b4 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -6,9 +6,7 @@ # the MIT License: https://www.opensource.org/licenses/mit-license.php # mypy: allow-untyped-defs, allow-untyped-calls -"""High level utilities which build upon other modules here. - -""" +"""High level utilities which build upon other modules here.""" from __future__ import annotations from collections import deque diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py index e758350adf8..27642851676 100644 --- a/lib/sqlalchemy/sql/visitors.py +++ b/lib/sqlalchemy/sql/visitors.py @@ -5,10 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Visitor/traversal interface and library functions. - - -""" +"""Visitor/traversal interface and library functions.""" from __future__ import annotations diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py index c2b1ab1945f..bb2c2e11de3 100644 --- a/lib/sqlalchemy/types.py +++ b/lib/sqlalchemy/types.py @@ -5,9 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php -"""Compatibility namespace for sqlalchemy.sql.types. - -""" +"""Compatibility namespace for sqlalchemy.sql.types.""" from __future__ import annotations diff --git a/test/ext/test_horizontal_shard.py b/test/ext/test_horizontal_shard.py index 3ff49fc82fe..4d579fa0c1d 100644 --- a/test/ext/test_horizontal_shard.py +++ b/test/ext/test_horizontal_shard.py @@ -51,7 +51,7 @@ class ShardTest: @classmethod def define_tables(cls, metadata): - global db1, db2, db3, db4, weather_locations, weather_reports + global weather_locations cls.tables.ids = ids = Table( "ids", metadata, Column("nextid", Integer, nullable=False) diff --git a/test/ext/test_orderinglist.py b/test/ext/test_orderinglist.py index 90c7f385789..98e2a8207f9 100644 --- a/test/ext/test_orderinglist.py +++ b/test/ext/test_orderinglist.py @@ -70,7 +70,7 @@ def _setup(self, test_collection_class): """Build a relationship situation using the given test_collection_class factory""" - global metadata, slides_table, bullets_table, Slide, Bullet + global slides_table, bullets_table, Slide, Bullet slides_table = Table( "test_Slides", diff --git a/test/orm/inheritance/test_assorted_poly.py b/test/orm/inheritance/test_assorted_poly.py index ab06dbaea3d..9100970d440 100644 --- a/test/orm/inheritance/test_assorted_poly.py +++ b/test/orm/inheritance/test_assorted_poly.py @@ -819,7 +819,7 @@ class RelationshipTest6(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - global people, managers, data + global people, managers people = Table( "people", metadata, diff --git a/test/typing/plain_files/orm/relationship.py b/test/typing/plain_files/orm/relationship.py index 44090ad53b4..a972e23b83e 100644 --- a/test/typing/plain_files/orm/relationship.py +++ b/test/typing/plain_files/orm/relationship.py @@ -1,6 +1,4 @@ -"""this suite experiments with other kinds of relationship syntaxes. - -""" +"""this suite experiments with other kinds of relationship syntaxes.""" from __future__ import annotations diff --git a/test/typing/plain_files/orm/trad_relationship_uselist.py b/test/typing/plain_files/orm/trad_relationship_uselist.py index 9282181f01b..e15fe709341 100644 --- a/test/typing/plain_files/orm/trad_relationship_uselist.py +++ b/test/typing/plain_files/orm/trad_relationship_uselist.py @@ -1,7 +1,4 @@ -"""traditional relationship patterns with explicit uselist. - - -""" +"""traditional relationship patterns with explicit uselist.""" import typing from typing import cast diff --git a/tox.ini b/tox.ini index 200c6b6bf0e..6b3e970902b 100644 --- a/tox.ini +++ b/tox.ini @@ -239,7 +239,7 @@ extras= {[greenletextras]extras} deps= - flake8==6.1.0 + flake8==7.2.0 flake8-import-order flake8-builtins flake8-future-annotations>=0.0.5 @@ -251,7 +251,7 @@ deps= # in case it requires a version pin pydocstyle pygments - black==24.10.0 + black==25.1.0 slotscheck>=0.17.0 # required by generate_tuple_map_overloads From 7ff2e7070a4fb021de83eec0eb8ae05de02d6fbb Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Fri, 23 May 2025 23:10:43 +0200 Subject: [PATCH 536/544] fix missing quotes from cast call in mysqlconnector module This fixes an issue introduced by 51a7678db2f0fcb1552afa40333640bc7fbb6dac in I37bd98049ff1a64d58e9490b0e5e2ea764dd1f73 Change-Id: Id738c04ee4dc8c2b12d9ab0fc71a4e1a6c5bc209 (cherry picked from commit 1070889f263be89e0e47bdbb9f7113e98ead192b) --- lib/sqlalchemy/dialects/mysql/base.py | 4 ++-- lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index def897edf9c..be49a7e7623 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -3690,7 +3690,7 @@ def _fetch_setting( if not row: return None else: - return cast("Optional[str]", row[fetch_col]) + return cast(Optional[str], row[fetch_col]) def _detect_charset(self, connection: Connection) -> str: raise NotImplementedError() @@ -3803,7 +3803,7 @@ def _show_create_table( row = self._compat_first(rp, charset=charset) if not row: raise exc.NoSuchTableError(full_name) - return cast("str", row[1]).strip() + return cast(str, row[1]).strip() @overload def _describe_table( diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index ad2e4856e1d..91d1058e9c4 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -162,7 +162,7 @@ class MySQLDialect_mysqlconnector(MySQLDialect): @classmethod def import_dbapi(cls) -> DBAPIModule: - return cast(DBAPIModule, __import__("mysql.connector").connector) + return cast("DBAPIModule", __import__("mysql.connector").connector) def do_ping(self, dbapi_connection: DBAPIConnection) -> bool: dbapi_connection.ping(False) From 06f0f5f42c0cc3c136971b0a14ce740d2fb87dd7 Mon Sep 17 00:00:00 2001 From: Denis Laxalde Date: Wed, 28 May 2025 15:37:36 -0400 Subject: [PATCH 537/544] Reflect index's column operator class on PostgreSQL Fill the `postgresql_ops` key of PostgreSQL's `dialect_options` returned by get_multi_indexes() with a mapping from column names to the operator class, if it's not the default for respective data type. As we need to join on ``pg_catalog.pg_opclass``, the table definition is added to ``postgresql.pg_catalog``. Fixes #8664. Closes: #12504 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12504 Pull-request-sha: 8fdf93e1b27c371f52990d5fda8b2fdf79ec23eb Change-Id: I8789c1e9d15f8cc9a7205f492ec730570f19bbcc (cherry picked from commit 0642541c6371d19c8d28ff0bdaf6ab3822715a6d) --- doc/build/changelog/unreleased_20/8664.rst | 12 +++++ lib/sqlalchemy/dialects/postgresql/base.py | 41 +++++++++++++++- .../dialects/postgresql/pg_catalog.py | 14 ++++++ test/dialect/postgresql/test_reflection.py | 49 +++++++++++++++++++ 4 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 doc/build/changelog/unreleased_20/8664.rst diff --git a/doc/build/changelog/unreleased_20/8664.rst b/doc/build/changelog/unreleased_20/8664.rst new file mode 100644 index 00000000000..8a17e439720 --- /dev/null +++ b/doc/build/changelog/unreleased_20/8664.rst @@ -0,0 +1,12 @@ +.. change:: + :tags: usecase, postgresql + :tickets: 8664 + + Added ``postgresql_ops`` key to the ``dialect_options`` entry in reflected + dictionary. This maps names of columns used in the index to respective + operator class, if distinct from the default one for column's data type. + Pull request courtesy Denis Laxalde. + + .. seealso:: + + :ref:`postgresql_operator_classes` diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 69b78bb138a..52f4721da9d 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -4513,6 +4513,9 @@ def _index_query(self): pg_catalog.pg_index.c.indexrelid, pg_catalog.pg_index.c.indrelid, sql.func.unnest(pg_catalog.pg_index.c.indkey).label("attnum"), + sql.func.unnest(pg_catalog.pg_index.c.indclass).label( + "att_opclass" + ), sql.func.generate_subscripts( pg_catalog.pg_index.c.indkey, 1 ).label("ord"), @@ -4544,6 +4547,8 @@ def _index_query(self): else_=pg_catalog.pg_attribute.c.attname.cast(TEXT), ).label("element"), (idx_sq.c.attnum == 0).label("is_expr"), + pg_catalog.pg_opclass.c.opcname, + pg_catalog.pg_opclass.c.opcdefault, ) .select_from(idx_sq) .outerjoin( @@ -4554,6 +4559,10 @@ def _index_query(self): pg_catalog.pg_attribute.c.attrelid == idx_sq.c.indrelid, ), ) + .outerjoin( + pg_catalog.pg_opclass, + pg_catalog.pg_opclass.c.oid == idx_sq.c.att_opclass, + ) .where(idx_sq.c.indrelid.in_(bindparam("oids"))) .subquery("idx_attr") ) @@ -4568,6 +4577,12 @@ def _index_query(self): sql.func.array_agg( aggregate_order_by(attr_sq.c.is_expr, attr_sq.c.ord) ).label("elements_is_expr"), + sql.func.array_agg( + aggregate_order_by(attr_sq.c.opcname, attr_sq.c.ord) + ).label("elements_opclass"), + sql.func.array_agg( + aggregate_order_by(attr_sq.c.opcdefault, attr_sq.c.ord) + ).label("elements_opdefault"), ) .group_by(attr_sq.c.indexrelid) .subquery("idx_cols") @@ -4610,6 +4625,8 @@ def _index_query(self): nulls_not_distinct, cols_sq.c.elements, cols_sq.c.elements_is_expr, + cols_sq.c.elements_opclass, + cols_sq.c.elements_opdefault, ) .select_from(pg_catalog.pg_index) .where( @@ -4682,6 +4699,8 @@ def get_multi_indexes( all_elements = row["elements"] all_elements_is_expr = row["elements_is_expr"] + all_elements_opclass = row["elements_opclass"] + all_elements_opdefault = row["elements_opdefault"] indnkeyatts = row["indnkeyatts"] # "The number of key columns in the index, not counting any # included columns, which are merely stored and do not @@ -4701,10 +4720,18 @@ def get_multi_indexes( not is_expr for is_expr in all_elements_is_expr[indnkeyatts:] ) + idx_elements_opclass = all_elements_opclass[ + :indnkeyatts + ] + idx_elements_opdefault = all_elements_opdefault[ + :indnkeyatts + ] else: idx_elements = all_elements idx_elements_is_expr = all_elements_is_expr inc_cols = [] + idx_elements_opclass = all_elements_opclass + idx_elements_opdefault = all_elements_opdefault index = {"name": index_name, "unique": row["indisunique"]} if any(idx_elements_is_expr): @@ -4718,6 +4745,19 @@ def get_multi_indexes( else: index["column_names"] = idx_elements + dialect_options = {} + + if not all(idx_elements_opdefault): + dialect_options["postgresql_ops"] = { + name: opclass + for name, opclass, is_default in zip( + idx_elements, + idx_elements_opclass, + idx_elements_opdefault, + ) + if not is_default + } + sorting = {} for col_index, col_flags in enumerate(row["indoption"]): col_sorting = () @@ -4737,7 +4777,6 @@ def get_multi_indexes( if row["has_constraint"]: index["duplicates_constraint"] = index_name - dialect_options = {} if row["reloptions"]: dialect_options["postgresql_with"] = dict( [ diff --git a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py index 4841056cf9d..9625ccf3347 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg_catalog.py +++ b/lib/sqlalchemy/dialects/postgresql/pg_catalog.py @@ -310,3 +310,17 @@ def process(value: Any) -> Optional[list[int]]: Column("collicurules", Text, info={"server_version": (16,)}), Column("collversion", Text, info={"server_version": (10,)}), ) + +pg_opclass = Table( + "pg_opclass", + pg_catalog_meta, + Column("oid", OID, info={"server_version": (9, 3)}), + Column("opcmethod", NAME), + Column("opcname", NAME), + Column("opsnamespace", OID), + Column("opsowner", OID), + Column("opcfamily", OID), + Column("opcintype", OID), + Column("opcdefault", Boolean), + Column("opckeytype", OID), +) diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index f8030691744..5dd8e00070d 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -27,6 +27,7 @@ from sqlalchemy.dialects.postgresql import base as postgresql from sqlalchemy.dialects.postgresql import DOMAIN from sqlalchemy.dialects.postgresql import ExcludeConstraint +from sqlalchemy.dialects.postgresql import INET from sqlalchemy.dialects.postgresql import INTEGER from sqlalchemy.dialects.postgresql import INTERVAL from sqlalchemy.dialects.postgresql import pg_catalog @@ -1724,6 +1725,54 @@ def test_index_reflection_with_access_method(self, metadata, connection): "gin", ) + def test_index_reflection_with_operator_class(self, metadata, connection): + """reflect indexes with operator class on columns""" + + Table( + "t", + metadata, + Column("id", Integer, nullable=False), + Column("name", String), + Column("alias", String), + Column("addr1", INET), + Column("addr2", INET), + ) + metadata.create_all(connection) + + # 'name' and 'addr1' use a non-default operator, 'addr2' uses the + # default one, and 'alias' uses no operator. + connection.exec_driver_sql( + "CREATE INDEX ix_t ON t USING btree" + " (name text_pattern_ops, alias, addr1 cidr_ops, addr2 inet_ops)" + ) + + ind = inspect(connection).get_indexes("t", None) + expected = [ + { + "unique": False, + "column_names": ["name", "alias", "addr1", "addr2"], + "name": "ix_t", + "dialect_options": { + "postgresql_ops": { + "addr1": "cidr_ops", + "name": "text_pattern_ops", + }, + }, + } + ] + if connection.dialect.server_version_info >= (11, 0): + expected[0]["include_columns"] = [] + expected[0]["dialect_options"]["postgresql_include"] = [] + eq_(ind, expected) + + m = MetaData() + t1 = Table("t", m, autoload_with=connection) + r_ind = list(t1.indexes)[0] + eq_( + r_ind.dialect_options["postgresql"]["ops"], + {"name": "text_pattern_ops", "addr1": "cidr_ops"}, + ) + @testing.skip_if("postgresql < 15.0", "nullsnotdistinct not supported") def test_nullsnotdistinct(self, metadata, connection): Table( From 46a6bfec6611c1352827063a092dfe2810ae8302 Mon Sep 17 00:00:00 2001 From: Federico Caselli Date: Fri, 30 May 2025 22:53:59 +0200 Subject: [PATCH 538/544] Fix type errors surfaced by mypy 1.16 Change-Id: I50bbd760577ff7c865c81153041e82bba068e5d8 (cherry picked from commit 7088f7962bcdf497892da85aff16022d27662eb4) --- lib/sqlalchemy/dialects/mysql/aiomysql.py | 2 +- lib/sqlalchemy/dialects/mysql/asyncmy.py | 2 +- .../dialects/mysql/mysqlconnector.py | 4 ++-- lib/sqlalchemy/dialects/mysql/mysqldb.py | 2 +- lib/sqlalchemy/dialects/mysql/pymysql.py | 4 ++-- lib/sqlalchemy/dialects/postgresql/array.py | 2 +- lib/sqlalchemy/dialects/postgresql/ranges.py | 4 ++-- lib/sqlalchemy/engine/cursor.py | 6 +++--- lib/sqlalchemy/engine/default.py | 5 +++-- lib/sqlalchemy/ext/mutable.py | 1 + lib/sqlalchemy/orm/attributes.py | 4 ++-- lib/sqlalchemy/orm/decl_base.py | 3 +-- lib/sqlalchemy/orm/mapper.py | 20 +++++++++++-------- lib/sqlalchemy/orm/properties.py | 2 +- lib/sqlalchemy/orm/relationships.py | 3 +-- lib/sqlalchemy/orm/util.py | 2 +- lib/sqlalchemy/orm/writeonly.py | 8 ++------ lib/sqlalchemy/pool/impl.py | 8 ++++---- lib/sqlalchemy/sql/coercions.py | 2 +- lib/sqlalchemy/sql/compiler.py | 4 ++-- lib/sqlalchemy/sql/ddl.py | 2 +- lib/sqlalchemy/sql/elements.py | 11 +++++----- lib/sqlalchemy/sql/lambdas.py | 20 ++++++++++--------- lib/sqlalchemy/sql/schema.py | 4 ++-- lib/sqlalchemy/sql/sqltypes.py | 8 ++++---- .../plain_files/orm/mapped_covariant.py | 5 ++++- 26 files changed, 72 insertions(+), 66 deletions(-) diff --git a/lib/sqlalchemy/dialects/mysql/aiomysql.py b/lib/sqlalchemy/dialects/mysql/aiomysql.py index 314c78adee8..e2ac70b0294 100644 --- a/lib/sqlalchemy/dialects/mysql/aiomysql.py +++ b/lib/sqlalchemy/dialects/mysql/aiomysql.py @@ -184,7 +184,7 @@ class MySQLDialect_aiomysql(MySQLDialect_pymysql): driver = "aiomysql" supports_statement_cache = True - supports_server_side_cursors = True # type: ignore[assignment] + supports_server_side_cursors = True _sscursor = AsyncAdapt_aiomysql_ss_cursor is_async = True diff --git a/lib/sqlalchemy/dialects/mysql/asyncmy.py b/lib/sqlalchemy/dialects/mysql/asyncmy.py index 32a45c0d35d..750735e8f1e 100644 --- a/lib/sqlalchemy/dialects/mysql/asyncmy.py +++ b/lib/sqlalchemy/dialects/mysql/asyncmy.py @@ -167,7 +167,7 @@ class MySQLDialect_asyncmy(MySQLDialect_pymysql): driver = "asyncmy" supports_statement_cache = True - supports_server_side_cursors = True # type: ignore[assignment] + supports_server_side_cursors = True _sscursor = AsyncAdapt_asyncmy_ss_cursor is_async = True diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index 91d1058e9c4..a830cb5afef 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -116,13 +116,13 @@ def _escape_identifier(self, value: str) -> str: return value -class MySQLIdentifierPreparer_mysqlconnector( # type:ignore[misc] +class MySQLIdentifierPreparer_mysqlconnector( IdentifierPreparerCommon_mysqlconnector, MySQLIdentifierPreparer ): pass -class MariaDBIdentifierPreparer_mysqlconnector( # type:ignore[misc] +class MariaDBIdentifierPreparer_mysqlconnector( IdentifierPreparerCommon_mysqlconnector, MariaDBIdentifierPreparer ): pass diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index 52adaefab76..de4ae61c047 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -154,7 +154,7 @@ def _parse_dbapi_version(self, version: str) -> Tuple[int, ...]: return (0, 0, 0) @util.langhelpers.memoized_property - def supports_server_side_cursors(self) -> bool: # type: ignore[override] + def supports_server_side_cursors(self) -> bool: try: cursors = __import__("MySQLdb.cursors").cursors self._sscursor = cursors.SSCursor diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index 26fd9b038bd..48b7994a82a 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -76,7 +76,7 @@ class MySQLDialect_pymysql(MySQLDialect_mysqldb): description_encoding = None @langhelpers.memoized_property - def supports_server_side_cursors(self) -> bool: # type: ignore[override] + def supports_server_side_cursors(self) -> bool: try: cursors = __import__("pymysql.cursors").cursors self._sscursor = cursors.SSCursor @@ -116,7 +116,7 @@ def _send_false_to_ping(self) -> bool: not insp.defaults or insp.defaults[0] is not False ) - def do_ping(self, dbapi_connection: DBAPIConnection) -> Literal[True]: # type: ignore # noqa: E501 + def do_ping(self, dbapi_connection: DBAPIConnection) -> Literal[True]: if self._send_false_to_ping: dbapi_connection.ping(False) else: diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 9e094973c34..96f6dc21a2d 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -398,7 +398,7 @@ def overlap(self, other: typing_Any) -> ColumnElement[bool]: def _against_native_enum(self) -> bool: return ( isinstance(self.item_type, sqltypes.Enum) - and self.item_type.native_enum # type: ignore[attr-defined] + and self.item_type.native_enum ) def literal_processor( diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index 93253570c1b..0ce4ea29137 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -271,9 +271,9 @@ def _compare_edges( value2 += step value2_inc = False - if value1 < value2: # type: ignore + if value1 < value2: return -1 - elif value1 > value2: # type: ignore + elif value1 > value2: return 1 elif only_values: return 0 diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index 54e9784e0c4..8e2348efab5 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -1358,15 +1358,15 @@ def _reduce(self, keys): self._we_dont_return_rows() @property - def _keymap(self): + def _keymap(self): # type: ignore[override] self._we_dont_return_rows() @property - def _key_to_index(self): + def _key_to_index(self): # type: ignore[override] self._we_dont_return_rows() @property - def _processors(self): + def _processors(self): # type: ignore[override] self._we_dont_return_rows() @property diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 15f758e4c2c..57759f79cfc 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -478,7 +478,7 @@ def _type_memos(self): return weakref.WeakKeyDictionary() @property - def dialect_description(self): + def dialect_description(self): # type: ignore[override] return self.name + "+" + self.driver @property @@ -1627,7 +1627,7 @@ def _get_cache_stats(self) -> str: return "unknown" @property - def executemany(self): + def executemany(self): # type: ignore[override] return self.execute_style in ( ExecuteStyle.EXECUTEMANY, ExecuteStyle.INSERTMANYVALUES, @@ -1841,6 +1841,7 @@ def _setup_result_proxy(self): if self._rowcount is None and exec_opt.get("preserve_rowcount", False): self._rowcount = self.cursor.rowcount + yp: Optional[Union[int, bool]] if self.is_crud or self.is_text: result = self._setup_dml_or_text_result() yp = False diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index ed618c3819d..3d568fc9892 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -524,6 +524,7 @@ def load(state: InstanceState[_O], *args: Any) -> None: if val is not None: if coerce: val = cls.coerce(key, val) + assert val is not None state.dict[key] = val val._parents[state] = key diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 283cbc60484..3c4f3164514 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -630,11 +630,11 @@ def __init__( self._doc = self.__doc__ = doc @property - def _parententity(self): + def _parententity(self): # type: ignore[override] return inspection.inspect(self.class_, raiseerr=False) @property - def parent(self): + def parent(self): # type: ignore[override] return inspection.inspect(self.class_, raiseerr=False) _is_internal_proxy = True diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index f09f4369f1b..d0a78764cc8 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -2017,8 +2017,7 @@ class _DeferredMapperConfig(_ClassScanMapperConfig): def _early_mapping(self, mapper_kw: _MapperKwArgs) -> None: pass - # mypy disallows plain property override of variable - @property # type: ignore + @property def cls(self) -> Type[Any]: return self._cls() # type: ignore diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 5844854f9d0..ae7f8f24fc4 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -1056,7 +1056,7 @@ def entity(self): """ - primary_key: Tuple[Column[Any], ...] + primary_key: Tuple[ColumnElement[Any], ...] """An iterable containing the collection of :class:`_schema.Column` objects which comprise the 'primary key' of the mapped table, from the @@ -2554,7 +2554,7 @@ def _mappers_from_spec( if spec == "*": mappers = list(self.self_and_descendants) elif spec: - mapper_set = set() + mapper_set: Set[Mapper[Any]] = set() for m in util.to_list(spec): m = _class_to_mapper(m) if not m.isa(self): @@ -3428,9 +3428,11 @@ def primary_base_mapper(self) -> Mapper[Any]: return self.class_manager.mapper.base_mapper def _result_has_identity_key(self, result, adapter=None): - pk_cols: Sequence[ColumnClause[Any]] = self.primary_key - if adapter: - pk_cols = [adapter.columns[c] for c in pk_cols] + pk_cols: Sequence[ColumnElement[Any]] + if adapter is not None: + pk_cols = [adapter.columns[c] for c in self.primary_key] + else: + pk_cols = self.primary_key rk = result.keys() for col in pk_cols: if col not in rk: @@ -3455,9 +3457,11 @@ def identity_key_from_row( for the "row" argument """ - pk_cols: Sequence[ColumnClause[Any]] = self.primary_key - if adapter: - pk_cols = [adapter.columns[c] for c in pk_cols] + pk_cols: Sequence[ColumnElement[Any]] + if adapter is not None: + pk_cols = [adapter.columns[c] for c in self.primary_key] + else: + pk_cols = self.primary_key mapping: RowMapping if hasattr(row, "_mapping"): diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 75ad5b1ca0e..164ae009b25 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -234,7 +234,7 @@ def _memoized_attr__renders_in_subqueries(self) -> bool: return self.strategy._have_default_expression # type: ignore return ("deferred", True) not in self.strategy_key or ( - self not in self.parent._readonly_props # type: ignore + self not in self.parent._readonly_props ) @util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies") diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index eae00338f10..15b63d1b4b4 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -486,8 +486,7 @@ def __init__( else: self._overlaps = () - # mypy ignoring the @property setter - self.cascade = cascade # type: ignore + self.cascade = cascade self.back_populates = back_populates diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 874c4f53b15..ca607af1be4 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1566,7 +1566,7 @@ class Bundle( _propagate_attrs: _PropagateAttrsType = util.immutabledict() - proxy_set = util.EMPTY_SET # type: ignore + proxy_set = util.EMPTY_SET exprs: List[_ColumnsClauseElement] diff --git a/lib/sqlalchemy/orm/writeonly.py b/lib/sqlalchemy/orm/writeonly.py index ac034a09e0a..fe9c8e96e89 100644 --- a/lib/sqlalchemy/orm/writeonly.py +++ b/lib/sqlalchemy/orm/writeonly.py @@ -236,15 +236,11 @@ def get_collection( return DynamicCollectionAdapter(data) # type: ignore[return-value] @util.memoized_property - def _append_token( # type:ignore[override] - self, - ) -> attributes.AttributeEventToken: + def _append_token(self) -> attributes.AttributeEventToken: return attributes.AttributeEventToken(self, attributes.OP_APPEND) @util.memoized_property - def _remove_token( # type:ignore[override] - self, - ) -> attributes.AttributeEventToken: + def _remove_token(self) -> attributes.AttributeEventToken: return attributes.AttributeEventToken(self, attributes.OP_REMOVE) def fire_append_event( diff --git a/lib/sqlalchemy/pool/impl.py b/lib/sqlalchemy/pool/impl.py index cc597e1e62e..f3d53ddb84d 100644 --- a/lib/sqlalchemy/pool/impl.py +++ b/lib/sqlalchemy/pool/impl.py @@ -60,7 +60,7 @@ class QueuePool(Pool): """ - _is_asyncio = False # type: ignore[assignment] + _is_asyncio = False _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = ( sqla_queue.Queue @@ -269,7 +269,7 @@ class AsyncAdaptedQueuePool(QueuePool): """ - _is_asyncio = True # type: ignore[assignment] + _is_asyncio = True _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = ( sqla_queue.AsyncAdaptedQueue ) @@ -278,7 +278,7 @@ class AsyncAdaptedQueuePool(QueuePool): class FallbackAsyncAdaptedQueuePool(AsyncAdaptedQueuePool): - _queue_class = sqla_queue.FallbackAsyncAdaptedQueue + _queue_class = sqla_queue.FallbackAsyncAdaptedQueue # type: ignore[assignment] # noqa: E501 class NullPool(Pool): @@ -356,7 +356,7 @@ class SingletonThreadPool(Pool): """ - _is_asyncio = False # type: ignore[assignment] + _is_asyncio = False def __init__( self, diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index e174833fbdc..ac0393a6056 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -843,7 +843,7 @@ def _warn_for_implicit_coercion(self, elem): ) @util.preload_module("sqlalchemy.sql.elements") - def _literal_coercion(self, element, *, expr, operator, **kw): + def _literal_coercion(self, element, *, expr, operator, **kw): # type: ignore[override] # noqa: E501 if util.is_non_string_iterable(element): non_literal_expressions: Dict[ Optional[_ColumnExpressionArgument[Any]], diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index d782b019624..2353fa39e40 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -4230,7 +4230,7 @@ def visit_cte( if self.preparer._requires_quotes(cte_name): cte_name = self.preparer.quote(cte_name) text += self.get_render_as_alias_suffix(cte_name) - return text + return text # type: ignore[no-any-return] else: return self.preparer.format_alias(cte, cte_name) @@ -6341,7 +6341,7 @@ def visit_update( self.stack.pop(-1) - return text + return text # type: ignore[no-any-return] def delete_extra_from_clause( self, delete_stmt, from_table, extra_froms, from_hints, **kw diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 3b2463a634b..70a83cb8a73 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -439,7 +439,7 @@ def __init__(self, element: _SI) -> None: self._ddl_if = getattr(element, "_ddl_if", None) @property - def stringify_dialect(self): + def stringify_dialect(self): # type: ignore[override] assert not isinstance(self.element, str) return self.element.create_drop_stringify_dialect diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 37fea947afa..2d9ee575620 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -461,7 +461,7 @@ def _with_binary_element_type(self, type_): return self @property - def _constructor(self): + def _constructor(self): # type: ignore[override] """return the 'constructor' for this ClauseElement. This is for the purposes for creating a new object of @@ -694,6 +694,7 @@ def _compile_w_cache( else: elem_cache_key = None + extracted_params: Optional[Sequence[BindParameter[Any]]] if elem_cache_key is not None: if TYPE_CHECKING: assert compiled_cache is not None @@ -2297,7 +2298,7 @@ def _select_iterable(self) -> _SelectIterable: _allow_label_resolve = False @property - def _is_star(self): + def _is_star(self): # type: ignore[override] return self.text == "*" def __init__(self, text: str): @@ -4789,11 +4790,11 @@ def _apply_to_inner( return self @property - def primary_key(self): + def primary_key(self): # type: ignore[override] return self.element.primary_key @property - def foreign_keys(self): + def foreign_keys(self): # type: ignore[override] return self.element.foreign_keys def _copy_internals( @@ -4926,7 +4927,7 @@ class is usable by itself in those cases where behavioral requirements _is_multiparam_column = False @property - def _is_star(self): + def _is_star(self): # type: ignore[override] return self.is_literal and self.name == "*" def __init__( diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index ce755c1f832..21c69fed5af 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -300,7 +300,9 @@ def _retrieve_tracker_rec(self, fn, apply_propagate_attrs, opts): while lambda_element is not None: rec = lambda_element._rec if rec.bindparam_trackers: - tracker_instrumented_fn = rec.tracker_instrumented_fn + tracker_instrumented_fn = ( + rec.tracker_instrumented_fn # type:ignore [union-attr] # noqa: E501 + ) for tracker in rec.bindparam_trackers: tracker( lambda_element.fn, @@ -602,7 +604,7 @@ def _proxied(self) -> Any: return self._rec_expected_expr @property - def _with_options(self): + def _with_options(self): # type: ignore[override] return self._proxied._with_options @property @@ -610,7 +612,7 @@ def _effective_plugin_target(self): return self._proxied._effective_plugin_target @property - def _execution_options(self): + def _execution_options(self): # type: ignore[override] return self._proxied._execution_options @property @@ -618,27 +620,27 @@ def _all_selected_columns(self): return self._proxied._all_selected_columns @property - def is_select(self): + def is_select(self): # type: ignore[override] return self._proxied.is_select @property - def is_update(self): + def is_update(self): # type: ignore[override] return self._proxied.is_update @property - def is_insert(self): + def is_insert(self): # type: ignore[override] return self._proxied.is_insert @property - def is_text(self): + def is_text(self): # type: ignore[override] return self._proxied.is_text @property - def is_delete(self): + def is_delete(self): # type: ignore[override] return self._proxied.is_delete @property - def is_dml(self): + def is_dml(self): # type: ignore[override] return self._proxied.is_dml def spoil(self) -> NullLambdaStatement: diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 68f31de9386..db0678a7378 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -476,7 +476,7 @@ def _new(cls, *args: Any, **kw: Any) -> Any: table.dispatch.before_parent_attach(table, metadata) metadata._add_table(name, schema, table) try: - table.__init__(name, metadata, *args, _no_init=False, **kw) + table.__init__(name, metadata, *args, _no_init=False, **kw) # type: ignore[misc] # noqa: E501 table.dispatch.after_parent_attach(table, metadata) return table except Exception: @@ -2242,7 +2242,7 @@ def _onupdate_description_tuple(self) -> _DefaultDescriptionTuple: return _DefaultDescriptionTuple._from_column_default(self.onupdate) @util.memoized_property - def _gen_static_annotations_cache_key(self) -> bool: # type: ignore + def _gen_static_annotations_cache_key(self) -> bool: """special attribute used by cache key gen, if true, we will use a static cache key for the annotations dictionary, else we will generate a new cache key for annotations each time. diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index 63d7c495683..fc278678b2e 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -1645,14 +1645,14 @@ def _setup_for_values( ) @property - def sort_key_function(self): + def sort_key_function(self): # type: ignore[override] if self._sort_key_function is NO_ARG: return self._db_value_for_elem else: return self._sort_key_function @property - def native(self): + def native(self): # type: ignore[override] return self.native_enum def _db_value_for_elem(self, elem): @@ -2755,7 +2755,7 @@ def _binary_w_type(self, typ, method_name): def python_type(self): return dict - @property # type: ignore # mypy property bug + @property def should_evaluate_none(self): """Alias of :attr:`_types.JSON.none_as_null`""" return not self.none_as_null @@ -3702,7 +3702,7 @@ def python_type(self): return _python_UUID if self.as_uuid else str @property - def native(self): + def native(self): # type: ignore[override] return self.native_uuid def coerce_compared_value(self, op, value): diff --git a/test/typing/plain_files/orm/mapped_covariant.py b/test/typing/plain_files/orm/mapped_covariant.py index 0b65073fde6..9eca6e9593f 100644 --- a/test/typing/plain_files/orm/mapped_covariant.py +++ b/test/typing/plain_files/orm/mapped_covariant.py @@ -21,7 +21,10 @@ class ParentProtocol(Protocol): - name: Mapped[str] + # Read-only for simplicity, mutable protocol members are complicated, + # see https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected + @property + def name(self) -> Mapped[str]: ... class ChildProtocol(Protocol): From 233a16871e9b2cda169956f04941b0f41904ec05 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 9 May 2025 11:50:26 -0400 Subject: [PATCH 539/544] add future mode tests for MappedAsDataclass; more py314b1 regressions for py314b2 all issues should be resolved py314: yes Change-Id: I498a1f623aeb5eb664289236e01e35d8a3dec99f (cherry picked from commit 68f33ac943f07aaa1ade85d86e5113d5bec4ce65) --- lib/sqlalchemy/testing/exclusions.py | 4 +- test/orm/declarative/test_dc_transforms.py | 13 + .../test_dc_transforms_future_anno_sync.py | 2212 +++++++++++++++++ test/typing/test_overloads.py | 10 +- tools/sync_test_files.py | 17 +- tox.ini | 2 +- 6 files changed, 2247 insertions(+), 11 deletions(-) create mode 100644 test/orm/declarative/test_dc_transforms_future_anno_sync.py diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index 8ff9b644384..d28e9d85e0c 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -392,8 +392,8 @@ def open(): # noqa return skip_if(BooleanPredicate(False, "mark as execute")) -def closed(): - return skip_if(BooleanPredicate(True, "marked as skip")) +def closed(reason="marked as skip"): + return skip_if(BooleanPredicate(True, reason)) def fails(reason=None): diff --git a/test/orm/declarative/test_dc_transforms.py b/test/orm/declarative/test_dc_transforms.py index 52c4dae51a5..53a9366c3a7 100644 --- a/test/orm/declarative/test_dc_transforms.py +++ b/test/orm/declarative/test_dc_transforms.py @@ -159,6 +159,8 @@ class B(dc_decl_base): a3 = A("data") eq_(repr(a3), "some_module.A(id=None, data='data', x=None, bs=[])") + # TODO: get this test to work with future anno mode as well + # anno only: @testing.exclusions.closed("doesn't work for future annotations mode yet") # noqa: E501 def test_generic_class(self): """further test for #8665""" @@ -303,6 +305,8 @@ class B: a3 = A("data") eq_(repr(a3), "some_module.A(id=None, data='data', x=None, bs=[])") + # TODO: get this test to work with future anno mode as well + # anno only: @testing.exclusions.closed("doesn't work for future annotations mode yet") # noqa: E501 @testing.variation("dc_type", ["decorator", "superclass"]) def test_dataclass_fn(self, dc_type: Variation): annotations = {} @@ -377,6 +381,9 @@ def test_combine_args_from_pep593(self, decl_base: Type[DeclarativeBase]): dataclass defaults """ + + # anno only: global intpk, str30, s_str30, user_fk + intpk = Annotated[int, mapped_column(primary_key=True)] str30 = Annotated[ str, mapped_column(String(30), insert_default=func.foo()) @@ -1172,6 +1179,8 @@ class Child(Mixin): c1 = Child() eq_regex(repr(c1), r".*\.Child\(a=10, b=7, c=9\)") + # TODO: get this test to work with future anno mode as well + # anno only: @testing.exclusions.closed("doesn't work for future annotations mode yet") # noqa: E501 def test_abstract_is_dc(self): collected_annotations = {} @@ -1193,6 +1202,8 @@ class Child(Mixin): eq_(collected_annotations, {Mixin: {"b": int}, Child: {"c": int}}) eq_regex(repr(Child(6, 7)), r".*\.Child\(b=6, c=7\)") + # TODO: get this test to work with future anno mode as well + # anno only: @testing.exclusions.closed("doesn't work for future annotations mode yet") # noqa: E501 @testing.variation("check_annotations", [True, False]) def test_abstract_is_dc_w_mapped(self, check_annotations): if check_annotations: @@ -1256,6 +1267,8 @@ class Child(Mixin, Parent): eq_regex(repr(Child(a=5, b=6, c=7)), r".*\.Child\(c=7\)") + # TODO: get this test to work with future anno mode as well + # anno only: @testing.exclusions.closed("doesn't work for future annotations mode yet") # noqa: E501 @testing.variation( "dataclass_scope", ["on_base", "on_mixin", "on_base_class", "on_sub_class"], diff --git a/test/orm/declarative/test_dc_transforms_future_anno_sync.py b/test/orm/declarative/test_dc_transforms_future_anno_sync.py new file mode 100644 index 00000000000..8701990526f --- /dev/null +++ b/test/orm/declarative/test_dc_transforms_future_anno_sync.py @@ -0,0 +1,2212 @@ +"""This file is automatically generated from the file +'test/orm/declarative/test_dc_transforms.py' +by the 'tools/sync_test_files.py' script. + +Do not edit manually, any change will be lost. +""" # noqa: E501 + +from __future__ import annotations + +import contextlib +import dataclasses +from dataclasses import InitVar +import functools +import inspect as pyinspect +from itertools import product +from typing import Any +from typing import ClassVar +from typing import Dict +from typing import Generic +from typing import List +from typing import Optional +from typing import Set +from typing import Type +from typing import TypeVar +from unittest import mock + +from typing_extensions import Annotated + +from sqlalchemy import BigInteger +from sqlalchemy import Column +from sqlalchemy import exc +from sqlalchemy import ForeignKey +from sqlalchemy import func +from sqlalchemy import inspect +from sqlalchemy import Integer +from sqlalchemy import JSON +from sqlalchemy import select +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy import testing +from sqlalchemy.ext.associationproxy import association_proxy +from sqlalchemy.orm import column_property +from sqlalchemy.orm import composite +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import declared_attr +from sqlalchemy.orm import deferred +from sqlalchemy.orm import interfaces +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import MappedAsDataclass +from sqlalchemy.orm import MappedColumn +from sqlalchemy.orm import query_expression +from sqlalchemy.orm import registry +from sqlalchemy.orm import registry as _RegistryType +from sqlalchemy.orm import relationship +from sqlalchemy.orm import Session +from sqlalchemy.orm import synonym +from sqlalchemy.sql.base import _NoArg +from sqlalchemy.testing import AssertsCompiledSQL +from sqlalchemy.testing import eq_ +from sqlalchemy.testing import eq_regex +from sqlalchemy.testing import expect_deprecated +from sqlalchemy.testing import expect_raises +from sqlalchemy.testing import expect_raises_message +from sqlalchemy.testing import fixtures +from sqlalchemy.testing import is_ +from sqlalchemy.testing import is_false +from sqlalchemy.testing import is_true +from sqlalchemy.testing import ne_ +from sqlalchemy.testing import Variation +from sqlalchemy.util import compat + + +def _dataclass_mixin_warning(clsname, attrnames): + return testing.expect_deprecated( + rf"When transforming .* to a dataclass, attribute\(s\) " + rf"{attrnames} originates from superclass .*{clsname}" + ) + + +class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase): + @testing.fixture(params=["(MAD, DB)", "(DB, MAD)"]) + def dc_decl_base(self, request, metadata): + _md = metadata + + if request.param == "(MAD, DB)": + + class Base(MappedAsDataclass, DeclarativeBase): + _mad_before = True + metadata = _md + type_annotation_map = { + str: String().with_variant(String(50), "mysql", "mariadb") + } + + else: + # test #8665 by reversing the order of the classes + class Base(DeclarativeBase, MappedAsDataclass): + _mad_before = False + metadata = _md + type_annotation_map = { + str: String().with_variant(String(50), "mysql", "mariadb") + } + + yield Base + Base.registry.dispose() + + def test_basic_constructor_repr_base_cls( + self, dc_decl_base: Type[MappedAsDataclass] + ): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + + x: Mapped[Optional[int]] = mapped_column(default=None) + + bs: Mapped[List["B"]] = relationship( # noqa: F821 + default_factory=list + ) + + class B(dc_decl_base): + __tablename__ = "b" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + a_id: Mapped[Optional[int]] = mapped_column( + ForeignKey("a.id"), init=False + ) + x: Mapped[Optional[int]] = mapped_column(default=None) + + A.__qualname__ = "some_module.A" + B.__qualname__ = "some_module.B" + + eq_( + pyinspect.getfullargspec(A.__init__), + pyinspect.FullArgSpec( + args=["self", "data", "x", "bs"], + varargs=None, + varkw=None, + defaults=(None, mock.ANY), + kwonlyargs=[], + kwonlydefaults=None, + annotations={}, + ), + ) + eq_( + pyinspect.getfullargspec(B.__init__), + pyinspect.FullArgSpec( + args=["self", "data", "x"], + varargs=None, + varkw=None, + defaults=(None,), + kwonlyargs=[], + kwonlydefaults=None, + annotations={}, + ), + ) + + a2 = A("10", x=5, bs=[B("data1"), B("data2", x=12)]) + eq_( + repr(a2), + "some_module.A(id=None, data='10', x=5, " + "bs=[some_module.B(id=None, data='data1', a_id=None, x=None), " + "some_module.B(id=None, data='data2', a_id=None, x=12)])", + ) + + a3 = A("data") + eq_(repr(a3), "some_module.A(id=None, data='data', x=None, bs=[])") + + # TODO: get this test to work with future anno mode as well + @testing.exclusions.closed( + "doesn't work for future annotations mode yet" + ) # noqa: E501 + def test_generic_class(self): + """further test for #8665""" + + T_Value = TypeVar("T_Value") + + class SomeBaseClass(DeclarativeBase): + pass + + class GenericSetting( + MappedAsDataclass, SomeBaseClass, Generic[T_Value] + ): + __tablename__ = "xx" + + id: Mapped[int] = mapped_column( + Integer, primary_key=True, init=False + ) + + key: Mapped[str] = mapped_column(String, init=True) + + value: Mapped[T_Value] = mapped_column( + JSON, init=True, default_factory=lambda: {} + ) + + new_instance: GenericSetting[Dict[str, Any]] = ( # noqa: F841 + GenericSetting(key="x", value={"foo": "bar"}) + ) + + def test_no_anno_doesnt_go_into_dc( + self, dc_decl_base: Type[MappedAsDataclass] + ): + class User(dc_decl_base): + __tablename__: ClassVar[Optional[str]] = "user" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + username: Mapped[str] + password: Mapped[str] + addresses: Mapped[List["Address"]] = relationship( # noqa: F821 + default_factory=list + ) + + class Address(dc_decl_base): + __tablename__: ClassVar[Optional[str]] = "address" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + # should not be in the dataclass constructor + user_id = mapped_column(ForeignKey(User.id)) + + email_address: Mapped[str] + + a1 = Address("email@address") + eq_(a1.email_address, "email@address") + + def test_warn_on_non_dc_mixin(self): + class _BaseMixin: + create_user: Mapped[int] = mapped_column() + update_user: Mapped[Optional[int]] = mapped_column( + default=None, init=False + ) + + class Base(DeclarativeBase, MappedAsDataclass, _BaseMixin): + pass + + class SubMixin: + foo: Mapped[str] + bar: Mapped[str] = mapped_column() + + with _dataclass_mixin_warning( + "_BaseMixin", "'create_user', 'update_user'" + ), _dataclass_mixin_warning("SubMixin", "'foo', 'bar'"): + + class User(SubMixin, Base): + __tablename__ = "sys_user" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + username: Mapped[str] = mapped_column(String) + password: Mapped[str] = mapped_column(String) + + def test_basic_constructor_repr_cls_decorator( + self, registry: _RegistryType + ): + @registry.mapped_as_dataclass() + class A: + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + + x: Mapped[Optional[int]] = mapped_column(default=None) + + bs: Mapped[List["B"]] = relationship( # noqa: F821 + default_factory=list + ) + + @registry.mapped_as_dataclass() + class B: + __tablename__ = "b" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + a_id = mapped_column(ForeignKey("a.id"), init=False) + data: Mapped[str] + x: Mapped[Optional[int]] = mapped_column(default=None) + + A.__qualname__ = "some_module.A" + B.__qualname__ = "some_module.B" + + eq_( + pyinspect.getfullargspec(A.__init__), + pyinspect.FullArgSpec( + args=["self", "data", "x", "bs"], + varargs=None, + varkw=None, + defaults=(None, mock.ANY), + kwonlyargs=[], + kwonlydefaults=None, + annotations={}, + ), + ) + eq_( + pyinspect.getfullargspec(B.__init__), + pyinspect.FullArgSpec( + args=["self", "data", "x"], + varargs=None, + varkw=None, + defaults=(None,), + kwonlyargs=[], + kwonlydefaults=None, + annotations={}, + ), + ) + + a2 = A("10", x=5, bs=[B("data1"), B("data2", x=12)]) + + # note a_id isn't included because it wasn't annotated + eq_( + repr(a2), + "some_module.A(id=None, data='10', x=5, " + "bs=[some_module.B(id=None, data='data1', x=None), " + "some_module.B(id=None, data='data2', x=12)])", + ) + + a3 = A("data") + eq_(repr(a3), "some_module.A(id=None, data='data', x=None, bs=[])") + + # TODO: get this test to work with future anno mode as well + @testing.exclusions.closed( + "doesn't work for future annotations mode yet" + ) # noqa: E501 + @testing.variation("dc_type", ["decorator", "superclass"]) + def test_dataclass_fn(self, dc_type: Variation): + annotations = {} + + def dc_callable(kls, **kw) -> Type[Any]: + annotations[kls] = kls.__annotations__ + return dataclasses.dataclass(kls, **kw) # type: ignore + + if dc_type.decorator: + reg = registry() + + @reg.mapped_as_dataclass(dataclass_callable=dc_callable) + class MappedClass: + __tablename__ = "mapped_class" + + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] + + eq_(annotations, {MappedClass: {"id": int, "name": str}}) + + elif dc_type.superclass: + + class Base(DeclarativeBase): + pass + + class Mixin(MappedAsDataclass, dataclass_callable=dc_callable): + id: Mapped[int] = mapped_column(primary_key=True) + + class MappedClass(Mixin, Base): + __tablename__ = "mapped_class" + name: Mapped[str] + + eq_( + annotations, + {Mixin: {"id": int}, MappedClass: {"id": int, "name": str}}, + ) + else: + dc_type.fail() + + def test_default_fn(self, dc_decl_base: Type[MappedAsDataclass]): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] = mapped_column(default="d1") + data2: Mapped[str] = mapped_column(default_factory=lambda: "d2") + + a1 = A() + eq_(a1.data, "d1") + eq_(a1.data2, "d2") + + def test_default_factory_vs_collection_class( + self, dc_decl_base: Type[MappedAsDataclass] + ): + # this is currently the error raised by dataclasses. We can instead + # do this validation ourselves, but overall I don't know that we + # can hit every validation and rule that's in dataclasses + with expect_raises_message( + ValueError, "cannot specify both default and default_factory" + ): + + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] = mapped_column( + default="d1", default_factory=lambda: "d2" + ) + + def test_combine_args_from_pep593(self, decl_base: Type[DeclarativeBase]): + """test that we can set up column-level defaults separate from + dataclass defaults + + """ + + global intpk, str30, s_str30, user_fk + + intpk = Annotated[int, mapped_column(primary_key=True)] + str30 = Annotated[ + str, mapped_column(String(30), insert_default=func.foo()) + ] + s_str30 = Annotated[ + str, + mapped_column(String(30), server_default="some server default"), + ] + user_fk = Annotated[int, mapped_column(ForeignKey("user_account.id"))] + + class User(MappedAsDataclass, decl_base): + __tablename__ = "user_account" + + # we need this case for dataclasses that can't derive things + # from Annotated yet at the typing level + id: Mapped[intpk] = mapped_column(init=False) + name_none: Mapped[Optional[str30]] = mapped_column(default=None) + name: Mapped[str30] = mapped_column(default="hi") + name2: Mapped[s_str30] = mapped_column(default="there") + addresses: Mapped[List["Address"]] = relationship( # noqa: F821 + back_populates="user", default_factory=list + ) + + class Address(MappedAsDataclass, decl_base): + __tablename__ = "address" + + id: Mapped[intpk] = mapped_column(init=False) + email_address: Mapped[str] + user_id: Mapped[user_fk] = mapped_column(init=False) + user: Mapped[Optional["User"]] = relationship( + back_populates="addresses", default=None + ) + + is_true(User.__table__.c.id.primary_key) + is_true(User.__table__.c.name_none.default.arg.compare(func.foo())) + is_true(User.__table__.c.name.default.arg.compare(func.foo())) + eq_(User.__table__.c.name2.server_default.arg, "some server default") + + is_true(Address.__table__.c.user_id.references(User.__table__.c.id)) + u1 = User() + eq_(u1.name_none, None) + eq_(u1.name, "hi") + eq_(u1.name2, "there") + + def test_inheritance(self, dc_decl_base: Type[MappedAsDataclass]): + class Person(dc_decl_base): + __tablename__ = "person" + person_id: Mapped[int] = mapped_column( + primary_key=True, init=False + ) + name: Mapped[str] + type: Mapped[str] = mapped_column(init=False) + + __mapper_args__ = {"polymorphic_on": type} + + class Engineer(Person): + __tablename__ = "engineer" + + person_id: Mapped[int] = mapped_column( + ForeignKey("person.person_id"), primary_key=True, init=False + ) + + status: Mapped[str] = mapped_column(String(30)) + engineer_name: Mapped[str] + primary_language: Mapped[str] + __mapper_args__ = {"polymorphic_identity": "engineer"} + + e1 = Engineer("nm", "st", "en", "pl") + eq_(e1.name, "nm") + eq_(e1.status, "st") + eq_(e1.engineer_name, "en") + eq_(e1.primary_language, "pl") + + def test_non_mapped_fields_wo_mapped_or_dc( + self, dc_decl_base: Type[MappedAsDataclass] + ): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: str + ctrl_one: str = dataclasses.field() + some_field: int = dataclasses.field(default=5) + + a1 = A("data", "ctrl_one", 5) + eq_( + dataclasses.asdict(a1), + { + "ctrl_one": "ctrl_one", + "data": "data", + "id": None, + "some_field": 5, + }, + ) + + def test_non_mapped_fields_wo_mapped_or_dc_w_inherits( + self, dc_decl_base: Type[MappedAsDataclass] + ): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: str + ctrl_one: str = dataclasses.field() + some_field: int = dataclasses.field(default=5) + + class B(A): + b_data: Mapped[str] = mapped_column(default="bd") + + # ensure we didnt break dataclasses contract of removing Field + # issue #8880 + eq_(A.__dict__["some_field"], 5) + assert "ctrl_one" not in A.__dict__ + + b1 = B(data="data", ctrl_one="ctrl_one", some_field=5, b_data="x") + eq_( + dataclasses.asdict(b1), + { + "ctrl_one": "ctrl_one", + "data": "data", + "id": None, + "some_field": 5, + "b_data": "x", + }, + ) + + def test_init_var(self, dc_decl_base: Type[MappedAsDataclass]): + class User(dc_decl_base): + __tablename__ = "user_account" + + id: Mapped[int] = mapped_column(init=False, primary_key=True) + name: Mapped[str] + + password: InitVar[str] + repeat_password: InitVar[str] + + password_hash: Mapped[str] = mapped_column( + init=False, nullable=False + ) + + def __post_init__(self, password: str, repeat_password: str): + if password != repeat_password: + raise ValueError("passwords do not match") + + self.password_hash = f"some hash... {password}" + + u1 = User(name="u1", password="p1", repeat_password="p1") + eq_(u1.password_hash, "some hash... p1") + self.assert_compile( + select(User), + "SELECT user_account.id, user_account.name, " + "user_account.password_hash FROM user_account", + ) + + def test_integrated_dc(self, dc_decl_base: Type[MappedAsDataclass]): + """We will be telling users "this is a dataclass that is also + mapped". Therefore, they will want *any* kind of attribute to do what + it would normally do in a dataclass, including normal types without any + field and explicit use of dataclasses.field(). additionally, we'd like + ``Mapped`` to mean "persist this attribute". So the absence of + ``Mapped`` should also mean something too. + + """ + + class A(dc_decl_base): + __tablename__ = "a" + + ctrl_one: str = dataclasses.field() + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + some_field: int = dataclasses.field(default=5) + + some_none_field: Optional[str] = dataclasses.field(default=None) + + some_other_int_field: int = 10 + + # some field is part of the constructor + a1 = A("ctrlone", "datafield") + eq_( + dataclasses.asdict(a1), + { + "ctrl_one": "ctrlone", + "data": "datafield", + "id": None, + "some_field": 5, + "some_none_field": None, + "some_other_int_field": 10, + }, + ) + + a2 = A( + "ctrlone", + "datafield", + some_field=7, + some_other_int_field=12, + some_none_field="x", + ) + eq_( + dataclasses.asdict(a2), + { + "ctrl_one": "ctrlone", + "data": "datafield", + "id": None, + "some_field": 7, + "some_none_field": "x", + "some_other_int_field": 12, + }, + ) + + # only Mapped[] is mapped + self.assert_compile(select(A), "SELECT a.id, a.data FROM a") + eq_( + pyinspect.getfullargspec(A.__init__), + pyinspect.FullArgSpec( + args=[ + "self", + "ctrl_one", + "data", + "some_field", + "some_none_field", + "some_other_int_field", + ], + varargs=None, + varkw=None, + defaults=(5, None, 10), + kwonlyargs=[], + kwonlydefaults=None, + annotations={}, + ), + ) + + def test_dc_on_top_of_non_dc(self, decl_base: Type[DeclarativeBase]): + class Person(decl_base): + __tablename__ = "person" + person_id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] + type: Mapped[str] = mapped_column() + + __mapper_args__ = {"polymorphic_on": type} + + class Engineer(MappedAsDataclass, Person): + __tablename__ = "engineer" + + person_id: Mapped[int] = mapped_column( + ForeignKey("person.person_id"), primary_key=True, init=False + ) + + status: Mapped[str] = mapped_column(String(30)) + engineer_name: Mapped[str] + primary_language: Mapped[str] + __mapper_args__ = {"polymorphic_identity": "engineer"} + + e1 = Engineer("st", "en", "pl") + eq_(e1.status, "st") + eq_(e1.engineer_name, "en") + eq_(e1.primary_language, "pl") + + eq_( + pyinspect.getfullargspec(Person.__init__), + # the boring **kw __init__ + pyinspect.FullArgSpec( + args=["self"], + varargs=None, + varkw="kwargs", + defaults=None, + kwonlyargs=[], + kwonlydefaults=None, + annotations={}, + ), + ) + + eq_( + pyinspect.getfullargspec(Engineer.__init__), + # the exciting dataclasses __init__ + pyinspect.FullArgSpec( + args=["self", "status", "engineer_name", "primary_language"], + varargs=None, + varkw=None, + defaults=None, + kwonlyargs=[], + kwonlydefaults=None, + annotations={}, + ), + ) + + def test_compare(self, dc_decl_base: Type[MappedAsDataclass]): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, compare=False) + data: Mapped[str] + + a1 = A(id=0, data="foo") + a2 = A(id=1, data="foo") + eq_(a1, a2) + + @testing.requires.python310 + def test_kw_only_attribute(self, dc_decl_base: Type[MappedAsDataclass]): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True) + data: Mapped[str] = mapped_column(kw_only=True) + + fas = pyinspect.getfullargspec(A.__init__) + eq_(fas.args, ["self", "id"]) + eq_(fas.kwonlyargs, ["data"]) + + @testing.combinations(True, False, argnames="unsafe_hash") + def test_hash_attribute( + self, dc_decl_base: Type[MappedAsDataclass], unsafe_hash + ): + class A(dc_decl_base, unsafe_hash=unsafe_hash): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, hash=False) + data: Mapped[str] = mapped_column(hash=True) + + a = A(id=1, data="x") + if not unsafe_hash or not dc_decl_base._mad_before: + with expect_raises(TypeError): + a_hash1 = hash(a) + else: + a_hash1 = hash(a) + a.id = 41 + eq_(hash(a), a_hash1) + a.data = "y" + ne_(hash(a), a_hash1) + + @testing.requires.python310 + def test_kw_only_dataclass_constant( + self, dc_decl_base: Type[MappedAsDataclass] + ): + class Mixin(MappedAsDataclass): + a: Mapped[int] = mapped_column(primary_key=True) + b: Mapped[int] = mapped_column(default=1) + + class Child(Mixin, dc_decl_base): + __tablename__ = "child" + + _: dataclasses.KW_ONLY + c: Mapped[int] + + c1 = Child(1, c=5) + eq_(c1, Child(a=1, b=1, c=5)) + + def test_mapped_column_overrides(self, dc_decl_base): + """test #8688""" + + class TriggeringMixin(MappedAsDataclass): + mixin_value: Mapped[int] = mapped_column(BigInteger) + + class NonTriggeringMixin(MappedAsDataclass): + mixin_value: Mapped[int] + + class Foo(dc_decl_base, TriggeringMixin): + __tablename__ = "foo" + id: Mapped[int] = mapped_column(primary_key=True, init=False) + foo_value: Mapped[float] = mapped_column(default=78) + + class Bar(dc_decl_base, NonTriggeringMixin): + __tablename__ = "bar" + id: Mapped[int] = mapped_column(primary_key=True, init=False) + bar_value: Mapped[float] = mapped_column(default=78) + + f1 = Foo(mixin_value=5) + eq_(f1.foo_value, 78) + + b1 = Bar(mixin_value=5) + eq_(b1.bar_value, 78) + + def test_mixing_MappedAsDataclass_with_decorator_raises(self, registry): + """test #9211""" + + class Mixin(MappedAsDataclass): + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + with expect_raises_message( + exc.InvalidRequestError, + "Class .*Foo.* is already a dataclass; ensure that " + "base classes / decorator styles of establishing dataclasses " + "are not being mixed. ", + ): + + @registry.mapped_as_dataclass + class Foo(Mixin): + bar_value: Mapped[float] = mapped_column(default=78) + + def test_MappedAsDataclass_table_provided(self, registry): + """test #11973""" + + with expect_raises_message( + exc.InvalidRequestError, + "Class .*Foo.* already defines a '__table__'. " + "ORM Annotated Dataclasses do not support a pre-existing " + "'__table__' element", + ): + + @registry.mapped_as_dataclass + class Foo: + __table__ = Table("foo", registry.metadata) + foo: Mapped[float] + + def test_dataclass_exception_wrapped(self, dc_decl_base): + with expect_raises_message( + exc.InvalidRequestError, + r"Python dataclasses error encountered when creating dataclass " + r"for \'Foo\': .*Please refer to Python dataclasses.*", + ) as ec: + + class Foo(dc_decl_base): + id: Mapped[int] = mapped_column(primary_key=True, init=False) + foo_value: Mapped[float] = mapped_column(default=78) + foo_no_value: Mapped[float] = mapped_column() + __tablename__ = "foo" + + is_true(isinstance(ec.error.__cause__, TypeError)) + + def test_dataclass_default(self, dc_decl_base): + """test for #9879""" + + def c10(): + return 10 + + def c20(): + return 20 + + class A(dc_decl_base): + __tablename__ = "a" + id: Mapped[int] = mapped_column(primary_key=True) + def_init: Mapped[int] = mapped_column(default=42) + call_init: Mapped[int] = mapped_column(default_factory=c10) + def_no_init: Mapped[int] = mapped_column(default=13, init=False) + call_no_init: Mapped[int] = mapped_column( + default_factory=c20, init=False + ) + + a = A(id=100) + eq_(a.def_init, 42) + eq_(a.call_init, 10) + eq_(a.def_no_init, 13) + eq_(a.call_no_init, 20) + + fields = {f.name: f for f in dataclasses.fields(A)} + eq_(fields["def_init"].default, 42) + eq_(fields["call_init"].default_factory, c10) + eq_(fields["def_no_init"].default, dataclasses.MISSING) + ne_(fields["def_no_init"].default_factory, dataclasses.MISSING) + eq_(fields["call_no_init"].default_factory, c20) + + def test_dataclass_default_callable(self, dc_decl_base): + """test for #9936""" + + def cd(): + return 42 + + with expect_deprecated( + "Callable object passed to the ``default`` parameter for " + "attribute 'value' in a ORM-mapped Dataclasses context is " + "ambiguous, and this use will raise an error in a future " + "release. If this callable is intended to produce Core level ", + "Callable object passed to the ``default`` parameter for " + "attribute 'no_init' in a ORM-mapped Dataclasses context is " + "ambiguous, and this use will raise an error in a future " + "release. If this callable is intended to produce Core level ", + ): + + class A(dc_decl_base): + __tablename__ = "a" + id: Mapped[int] = mapped_column(primary_key=True) + value: Mapped[int] = mapped_column(default=cd) + no_init: Mapped[int] = mapped_column(default=cd, init=False) + + a = A(id=100) + is_false("no_init" in a.__dict__) + eq_(a.value, cd) + eq_(a.no_init, None) + + fields = {f.name: f for f in dataclasses.fields(A)} + eq_(fields["value"].default, cd) + eq_(fields["no_init"].default, cd) + + +class RelationshipDefaultFactoryTest(fixtures.TestBase): + def test_list(self, dc_decl_base: Type[MappedAsDataclass]): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + bs: Mapped[List["B"]] = relationship( # noqa: F821 + default_factory=lambda: [B(data="hi")] + ) + + class B(dc_decl_base): + __tablename__ = "b" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + a_id = mapped_column(ForeignKey("a.id"), init=False) + data: Mapped[str] + + a1 = A() + eq_(a1.bs[0].data, "hi") + + def test_set(self, dc_decl_base: Type[MappedAsDataclass]): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + bs: Mapped[Set["B"]] = relationship( # noqa: F821 + default_factory=lambda: {B(data="hi")} + ) + + class B(dc_decl_base, unsafe_hash=True): + __tablename__ = "b" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + a_id = mapped_column(ForeignKey("a.id"), init=False) + data: Mapped[str] + + a1 = A() + eq_(a1.bs.pop().data, "hi") + + def test_oh_no_mismatch(self, dc_decl_base: Type[MappedAsDataclass]): + class A(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + bs: Mapped[Set["B"]] = relationship( # noqa: F821 + default_factory=lambda: [B(data="hi")] + ) + + class B(dc_decl_base, unsafe_hash=True): + __tablename__ = "b" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + a_id = mapped_column(ForeignKey("a.id"), init=False) + data: Mapped[str] + + # old school collection mismatch error FTW + with expect_raises_message( + TypeError, "Incompatible collection type: list is not set-like" + ): + A() + + def test_one_to_one_example(self, dc_decl_base: Type[MappedAsDataclass]): + """test example in the relationship docs will derive uselist=False + correctly""" + + class Parent(dc_decl_base): + __tablename__ = "parent" + + id: Mapped[int] = mapped_column(init=False, primary_key=True) + child: Mapped["Child"] = relationship( # noqa: F821 + back_populates="parent", default=None + ) + + class Child(dc_decl_base): + __tablename__ = "child" + + id: Mapped[int] = mapped_column(init=False, primary_key=True) + parent_id: Mapped[int] = mapped_column( + ForeignKey("parent.id"), init=False + ) + parent: Mapped["Parent"] = relationship( + back_populates="child", default=None + ) + + c1 = Child() + p1 = Parent(child=c1) + is_(p1.child, c1) + is_(c1.parent, p1) + + p2 = Parent() + is_(p2.child, None) + + def test_replace_operation_works_w_history_etc( + self, registry: _RegistryType + ): + @registry.mapped_as_dataclass + class A: + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + + x: Mapped[Optional[int]] = mapped_column(default=None) + + bs: Mapped[List["B"]] = relationship( # noqa: F821 + default_factory=list + ) + + @registry.mapped_as_dataclass + class B: + __tablename__ = "b" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + a_id = mapped_column(ForeignKey("a.id"), init=False) + data: Mapped[str] + x: Mapped[Optional[int]] = mapped_column(default=None) + + registry.metadata.create_all(testing.db) + + with Session(testing.db) as sess: + a1 = A("data", 10, [B("b1"), B("b2", x=5), B("b3")]) + sess.add(a1) + sess.commit() + + a2 = dataclasses.replace(a1, x=12, bs=[B("b4")]) + + assert a1 in sess + assert not sess.is_modified(a1, include_collections=True) + assert a2 not in sess + eq_(inspect(a2).attrs.x.history, ([12], (), ())) + sess.add(a2) + sess.commit() + + eq_(sess.scalars(select(A.x).order_by(A.id)).all(), [10, 12]) + eq_( + sess.scalars(select(B.data).order_by(B.id)).all(), + ["b1", "b2", "b3", "b4"], + ) + + def test_post_init(self, registry: _RegistryType): + @registry.mapped_as_dataclass + class A: + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] = mapped_column(init=False) + + def __post_init__(self): + self.data = "some data" + + a1 = A() + eq_(a1.data, "some data") + + def test_no_field_args_w_new_style(self, registry: _RegistryType): + with expect_raises_message( + exc.InvalidRequestError, + "SQLAlchemy mapped dataclasses can't consume mapping information", + ): + + @registry.mapped_as_dataclass() + class A: + __tablename__ = "a" + __sa_dataclass_metadata_key__ = "sa" + + account_id: int = dataclasses.field( + init=False, + metadata={"sa": Column(Integer, primary_key=True)}, + ) + + def test_no_field_args_w_new_style_two(self, registry: _RegistryType): + @dataclasses.dataclass + class Base: + pass + + with expect_raises_message( + exc.InvalidRequestError, + "SQLAlchemy mapped dataclasses can't consume mapping information", + ): + + @registry.mapped_as_dataclass() + class A(Base): + __tablename__ = "a" + __sa_dataclass_metadata_key__ = "sa" + + account_id: int = dataclasses.field( + init=False, + metadata={"sa": Column(Integer, primary_key=True)}, + ) + + +class DataclassesForNonMappedClassesTest(fixtures.TestBase): + """test for cases added in #9179""" + + def test_base_is_dc(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: int + + class Child(Parent): + __tablename__ = "child" + b: Mapped[int] = mapped_column(primary_key=True) + + eq_regex(repr(Child(5, 6)), r".*\.Child\(a=5, b=6\)") + + def test_base_is_dc_plus_options(self): + class Parent(MappedAsDataclass, DeclarativeBase, unsafe_hash=True): + a: int + + class Child(Parent, repr=False): + __tablename__ = "child" + b: Mapped[int] = mapped_column(primary_key=True) + + c1 = Child(5, 6) + eq_(hash(c1), hash(Child(5, 6))) + + # still reprs, because base has a repr, but b not included + eq_regex(repr(c1), r".*\.Child\(a=5\)") + + def test_base_is_dc_init_var(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: InitVar[int] + + class Child(Parent): + __tablename__ = "child" + b: Mapped[int] = mapped_column(primary_key=True) + + c1 = Child(a=5, b=6) + eq_regex(repr(c1), r".*\.Child\(b=6\)") + + def test_base_is_dc_field(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: int = dataclasses.field(default=10) + + class Child(Parent): + __tablename__ = "child" + b: Mapped[int] = mapped_column(primary_key=True, default=7) + + c1 = Child(a=5, b=6) + eq_regex(repr(c1), r".*\.Child\(a=5, b=6\)") + + c1 = Child(b=6) + eq_regex(repr(c1), r".*\.Child\(a=10, b=6\)") + + c1 = Child() + eq_regex(repr(c1), r".*\.Child\(a=10, b=7\)") + + def test_abstract_and_base_is_dc(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: int + + class Mixin(Parent): + __abstract__ = True + b: int + + class Child(Mixin): + __tablename__ = "child" + c: Mapped[int] = mapped_column(primary_key=True) + + eq_regex(repr(Child(5, 6, 7)), r".*\.Child\(a=5, b=6, c=7\)") + + def test_abstract_and_base_is_dc_plus_options(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: int + + class Mixin(Parent, unsafe_hash=True): + __abstract__ = True + b: int + + class Child(Mixin, repr=False): + __tablename__ = "child" + c: Mapped[int] = mapped_column(primary_key=True) + + eq_(hash(Child(5, 6, 7)), hash(Child(5, 6, 7))) + + eq_regex(repr(Child(5, 6, 7)), r".*\.Child\(a=5, b=6\)") + + def test_abstract_and_base_is_dc_init_var(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: InitVar[int] + + class Mixin(Parent): + __abstract__ = True + b: InitVar[int] + + class Child(Mixin): + __tablename__ = "child" + c: Mapped[int] = mapped_column(primary_key=True) + + c1 = Child(a=5, b=6, c=7) + eq_regex(repr(c1), r".*\.Child\(c=7\)") + + def test_abstract_and_base_is_dc_field(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: int = dataclasses.field(default=10) + + class Mixin(Parent): + __abstract__ = True + b: int = dataclasses.field(default=7) + + class Child(Mixin): + __tablename__ = "child" + c: Mapped[int] = mapped_column(primary_key=True, default=9) + + c1 = Child(b=6, c=7) + eq_regex(repr(c1), r".*\.Child\(a=10, b=6, c=7\)") + + c1 = Child() + eq_regex(repr(c1), r".*\.Child\(a=10, b=7, c=9\)") + + # TODO: get this test to work with future anno mode as well + @testing.exclusions.closed( + "doesn't work for future annotations mode yet" + ) # noqa: E501 + def test_abstract_is_dc(self): + collected_annotations = {} + + def check_args(cls, **kw): + collected_annotations[cls] = cls.__annotations__ + return dataclasses.dataclass(cls, **kw) + + class Parent(DeclarativeBase): + a: int + + class Mixin(MappedAsDataclass, Parent, dataclass_callable=check_args): + __abstract__ = True + b: int + + class Child(Mixin): + __tablename__ = "child" + c: Mapped[int] = mapped_column(primary_key=True) + + eq_(collected_annotations, {Mixin: {"b": int}, Child: {"c": int}}) + eq_regex(repr(Child(6, 7)), r".*\.Child\(b=6, c=7\)") + + # TODO: get this test to work with future anno mode as well + @testing.exclusions.closed( + "doesn't work for future annotations mode yet" + ) # noqa: E501 + @testing.variation("check_annotations", [True, False]) + def test_abstract_is_dc_w_mapped(self, check_annotations): + if check_annotations: + collected_annotations = {} + + def check_args(cls, **kw): + collected_annotations[cls] = cls.__annotations__ + return dataclasses.dataclass(cls, **kw) + + class_kw = {"dataclass_callable": check_args} + else: + class_kw = {} + + class Parent(DeclarativeBase): + a: int + + class Mixin(MappedAsDataclass, Parent, **class_kw): + __abstract__ = True + b: Mapped[int] = mapped_column() + + class Child(Mixin): + __tablename__ = "child" + c: Mapped[int] = mapped_column(primary_key=True) + + if check_annotations: + # note: current dataclasses process adds Field() object to Child + # based on attributes which include those from Mixin. This means + # the annotations of Child are also augmented while we do + # dataclasses collection. + eq_( + collected_annotations, + {Mixin: {"b": int}, Child: {"b": int, "c": int}}, + ) + eq_regex(repr(Child(6, 7)), r".*\.Child\(b=6, c=7\)") + + def test_mixin_and_base_is_dc(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: int + + @dataclasses.dataclass + class Mixin: + b: int + + class Child(Mixin, Parent): + __tablename__ = "child" + c: Mapped[int] = mapped_column(primary_key=True) + + eq_regex(repr(Child(5, 6, 7)), r".*\.Child\(a=5, b=6, c=7\)") + + def test_mixin_and_base_is_dc_init_var(self): + class Parent(MappedAsDataclass, DeclarativeBase): + a: InitVar[int] + + @dataclasses.dataclass + class Mixin: + b: InitVar[int] + + class Child(Mixin, Parent): + __tablename__ = "child" + c: Mapped[int] = mapped_column(primary_key=True) + + eq_regex(repr(Child(a=5, b=6, c=7)), r".*\.Child\(c=7\)") + + # TODO: get this test to work with future anno mode as well + @testing.exclusions.closed( + "doesn't work for future annotations mode yet" + ) # noqa: E501 + @testing.variation( + "dataclass_scope", + ["on_base", "on_mixin", "on_base_class", "on_sub_class"], + ) + @testing.variation( + "test_alternative_callable", + [True, False], + ) + def test_mixin_w_inheritance( + self, dataclass_scope, test_alternative_callable + ): + """test #9226""" + + expected_annotations = {} + + if test_alternative_callable: + collected_annotations = {} + + def check_args(cls, **kw): + collected_annotations[cls] = getattr( + cls, "__annotations__", {} + ) + return dataclasses.dataclass(cls, **kw) + + klass_kw = {"dataclass_callable": check_args} + else: + klass_kw = {} + + if dataclass_scope.on_base: + + class Base(MappedAsDataclass, DeclarativeBase, **klass_kw): + pass + + expected_annotations[Base] = {} + else: + + class Base(DeclarativeBase): + pass + + if dataclass_scope.on_mixin: + + class Mixin(MappedAsDataclass, **klass_kw): + @declared_attr.directive + @classmethod + def __tablename__(cls) -> str: + return cls.__name__.lower() + + @declared_attr.directive + @classmethod + def __mapper_args__(cls) -> Dict[str, Any]: + return { + "polymorphic_identity": cls.__name__, + "polymorphic_on": "polymorphic_type", + } + + @declared_attr + @classmethod + def polymorphic_type(cls) -> Mapped[str]: + return mapped_column( + String, + insert_default=cls.__name__, + init=False, + ) + + expected_annotations[Mixin] = {} + + non_dc_mixin = contextlib.nullcontext + + else: + + class Mixin: + @declared_attr.directive + @classmethod + def __tablename__(cls) -> str: + return cls.__name__.lower() + + @declared_attr.directive + @classmethod + def __mapper_args__(cls) -> Dict[str, Any]: + return { + "polymorphic_identity": cls.__name__, + "polymorphic_on": "polymorphic_type", + } + + if dataclass_scope.on_base or dataclass_scope.on_base_class: + + @declared_attr + @classmethod + def polymorphic_type(cls) -> Mapped[str]: + return mapped_column( + String, + insert_default=cls.__name__, + init=False, + ) + + else: + + @declared_attr + @classmethod + def polymorphic_type(cls) -> Mapped[str]: + return mapped_column( + String, + insert_default=cls.__name__, + ) + + non_dc_mixin = functools.partial( + _dataclass_mixin_warning, "Mixin", "'polymorphic_type'" + ) + + if dataclass_scope.on_base_class: + with non_dc_mixin(): + + class Book(Mixin, MappedAsDataclass, Base, **klass_kw): + id: Mapped[int] = mapped_column( + Integer, + primary_key=True, + init=False, + ) + + else: + if dataclass_scope.on_base: + local_non_dc_mixin = non_dc_mixin + else: + local_non_dc_mixin = contextlib.nullcontext + + with local_non_dc_mixin(): + + class Book(Mixin, Base): + if not dataclass_scope.on_sub_class: + id: Mapped[int] = mapped_column( # noqa: A001 + Integer, primary_key=True, init=False + ) + else: + id: Mapped[int] = mapped_column( # noqa: A001 + Integer, + primary_key=True, + ) + + if MappedAsDataclass in Book.__mro__: + expected_annotations[Book] = {"id": int, "polymorphic_type": str} + + if dataclass_scope.on_sub_class: + with non_dc_mixin(): + + class Novel(MappedAsDataclass, Book, **klass_kw): + id: Mapped[int] = mapped_column( # noqa: A001 + ForeignKey("book.id"), + primary_key=True, + init=False, + ) + description: Mapped[Optional[str]] + + else: + with non_dc_mixin(): + + class Novel(Book): + id: Mapped[int] = mapped_column( + ForeignKey("book.id"), + primary_key=True, + init=False, + ) + description: Mapped[Optional[str]] + + expected_annotations[Novel] = {"id": int, "description": Optional[str]} + + if test_alternative_callable: + eq_(collected_annotations, expected_annotations) + + n1 = Novel("the description") + eq_(n1.description, "the description") + + +class DataclassArgsTest(fixtures.TestBase): + dc_arg_names = ("init", "repr", "eq", "order", "unsafe_hash") + if compat.py310: + dc_arg_names += ("match_args", "kw_only") + + @testing.fixture(params=product(dc_arg_names, (True, False))) + def dc_argument_fixture(self, request: Any, registry: _RegistryType): + name, use_defaults = request.param + + args = {n: n == name for n in self.dc_arg_names} + if args["order"]: + args["eq"] = True + if use_defaults: + default = { + "init": True, + "repr": True, + "eq": True, + "order": False, + "unsafe_hash": False, + } + if compat.py310: + default |= {"match_args": True, "kw_only": False} + to_apply = {k: v for k, v in args.items() if v} + effective = {**default, **to_apply} + return to_apply, effective + else: + return args, args + + @testing.fixture(params=["mapped_column", "synonym", "deferred"]) + def mapped_expr_constructor(self, request): + name = request.param + + if name == "mapped_column": + yield mapped_column(default=7, init=True) + elif name == "synonym": + yield synonym("some_int", default=7, init=True) + elif name == "deferred": + yield deferred(Column(Integer), default=7, init=True) + + def test_attrs_rejected_if_not_a_dc( + self, mapped_expr_constructor, decl_base: Type[DeclarativeBase] + ): + if isinstance(mapped_expr_constructor, MappedColumn): + unwanted_args = "'init'" + else: + unwanted_args = "'default', 'init'" + with expect_raises_message( + exc.ArgumentError, + r"Attribute 'x' on class .*A.* includes dataclasses " + r"argument\(s\): " + rf"{unwanted_args} but class does not specify SQLAlchemy native " + "dataclass configuration", + ): + + class A(decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True) + + x: Mapped[int] = mapped_expr_constructor + + def _assert_cls(self, cls, dc_arguments): + if dc_arguments["init"]: + + def create(data, x): + if dc_arguments.get("kw_only"): + return cls(data=data, x=x) + else: + return cls(data, x) + + else: + + def create(data, x): + a1 = cls() + a1.data = data + a1.x = x + return a1 + + for n in self.dc_arg_names: + if dc_arguments[n]: + getattr(self, f"_assert_{n}")(cls, create, dc_arguments) + else: + getattr(self, f"_assert_not_{n}")(cls, create, dc_arguments) + + if dc_arguments["init"]: + a1 = cls(data="some data") + eq_(a1.x, 7) + + a1 = create("some data", 15) + some_int = a1.some_int + eq_( + dataclasses.asdict(a1), + {"data": "some data", "id": None, "some_int": some_int, "x": 15}, + ) + eq_(dataclasses.astuple(a1), (None, "some data", some_int, 15)) + + def _assert_unsafe_hash(self, cls, create, dc_arguments): + a1 = create("d1", 5) + hash(a1) + + def _assert_not_unsafe_hash(self, cls, create, dc_arguments): + a1 = create("d1", 5) + + if dc_arguments["eq"]: + with expect_raises(TypeError): + hash(a1) + else: + hash(a1) + + def _assert_eq(self, cls, create, dc_arguments): + a1 = create("d1", 5) + a2 = create("d2", 10) + a3 = create("d1", 5) + + eq_(a1, a3) + ne_(a1, a2) + + def _assert_not_eq(self, cls, create, dc_arguments): + a1 = create("d1", 5) + a2 = create("d2", 10) + a3 = create("d1", 5) + + eq_(a1, a1) + ne_(a1, a3) + ne_(a1, a2) + + def _assert_order(self, cls, create, dc_arguments): + is_false(create("g", 10) < create("b", 7)) + + is_true(create("g", 10) > create("b", 7)) + + is_false(create("g", 10) <= create("b", 7)) + + is_true(create("g", 10) >= create("b", 7)) + + eq_( + list(sorted([create("g", 10), create("g", 5), create("b", 7)])), + [ + create("b", 7), + create("g", 5), + create("g", 10), + ], + ) + + def _assert_not_order(self, cls, create, dc_arguments): + with expect_raises(TypeError): + create("g", 10) < create("b", 7) + + with expect_raises(TypeError): + create("g", 10) > create("b", 7) + + with expect_raises(TypeError): + create("g", 10) <= create("b", 7) + + with expect_raises(TypeError): + create("g", 10) >= create("b", 7) + + def _assert_repr(self, cls, create, dc_arguments): + assert "__repr__" in cls.__dict__ + a1 = create("some data", 12) + eq_regex(repr(a1), r".*A\(id=None, data='some data', x=12\)") + + def _assert_not_repr(self, cls, create, dc_arguments): + assert "__repr__" not in cls.__dict__ + + # if a superclass has __repr__, then we still get repr. + # so can't test this + # a1 = create("some data", 12) + # eq_regex(repr(a1), r"<.*A object at 0x.*>") + + def _assert_init(self, cls, create, dc_arguments): + if not dc_arguments.get("kw_only", False): + a1 = cls("some data", 5) + + eq_(a1.data, "some data") + eq_(a1.x, 5) + + a2 = cls(data="some data", x=5) + eq_(a2.data, "some data") + eq_(a2.x, 5) + + a3 = cls(data="some data") + eq_(a3.data, "some data") + eq_(a3.x, 7) + + def _assert_not_init(self, cls, create, dc_arguments): + with expect_raises(TypeError): + cls("Some data", 5) + + # we run real "dataclasses" on the class. so with init=False, it + # doesn't touch what was there, and the SQLA default constructor + # gets put on. + a1 = cls(data="some data") + eq_(a1.data, "some data") + eq_(a1.x, None) + + a1 = cls() + eq_(a1.data, None) + + # no constructor, it sets None for x...ok + eq_(a1.x, None) + + def _assert_match_args(self, cls, create, dc_arguments): + if not dc_arguments["kw_only"]: + is_true(len(cls.__match_args__) > 0) + + def _assert_not_match_args(self, cls, create, dc_arguments): + is_false(hasattr(cls, "__match_args__")) + + def _assert_kw_only(self, cls, create, dc_arguments): + if dc_arguments["init"]: + fas = pyinspect.getfullargspec(cls.__init__) + eq_(fas.args, ["self"]) + eq_( + len(fas.kwonlyargs), + len(pyinspect.signature(cls.__init__).parameters) - 1, + ) + + def _assert_not_kw_only(self, cls, create, dc_arguments): + if dc_arguments["init"]: + fas = pyinspect.getfullargspec(cls.__init__) + eq_( + len(fas.args), + len(pyinspect.signature(cls.__init__).parameters), + ) + eq_(fas.kwonlyargs, []) + + def test_dc_arguments_decorator( + self, + dc_argument_fixture, + mapped_expr_constructor, + registry: _RegistryType, + ): + @registry.mapped_as_dataclass(**dc_argument_fixture[0]) + class A: + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + + some_int: Mapped[int] = mapped_column(init=False, repr=False) + + x: Mapped[Optional[int]] = mapped_expr_constructor + + self._assert_cls(A, dc_argument_fixture[1]) + + def test_dc_arguments_base( + self, + dc_argument_fixture, + mapped_expr_constructor, + registry: _RegistryType, + ): + reg = registry + + class Base( + MappedAsDataclass, DeclarativeBase, **dc_argument_fixture[0] + ): + registry = reg + + class A(Base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + + some_int: Mapped[int] = mapped_column(init=False, repr=False) + + x: Mapped[Optional[int]] = mapped_expr_constructor + + self._assert_cls(A, dc_argument_fixture[1]) + + def test_dc_arguments_perclass( + self, + dc_argument_fixture, + mapped_expr_constructor, + decl_base: Type[DeclarativeBase], + ): + class A(MappedAsDataclass, decl_base, **dc_argument_fixture[0]): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + + some_int: Mapped[int] = mapped_column(init=False, repr=False) + + x: Mapped[Optional[int]] = mapped_expr_constructor + + self._assert_cls(A, dc_argument_fixture[1]) + + def test_dc_arguments_override_base(self, registry: _RegistryType): + reg = registry + + class Base(MappedAsDataclass, DeclarativeBase, init=False, order=True): + registry = reg + + class A(Base, init=True, repr=False): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] + + some_int: Mapped[int] = mapped_column(init=False, repr=False) + + x: Mapped[Optional[int]] = mapped_column(default=7) + + effective = { + "init": True, + "repr": False, + "eq": True, + "order": True, + "unsafe_hash": False, + } + if compat.py310: + effective |= {"match_args": True, "kw_only": False} + self._assert_cls(A, effective) + + def test_dc_base_unsupported_argument(self, registry: _RegistryType): + reg = registry + with expect_raises(TypeError): + + class Base(MappedAsDataclass, DeclarativeBase, slots=True): + registry = reg + + class Base2(MappedAsDataclass, DeclarativeBase, order=True): + registry = reg + + with expect_raises(TypeError): + + class A(Base2, slots=False): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + def test_dc_decorator_unsupported_argument(self, registry: _RegistryType): + reg = registry + with expect_raises(TypeError): + + @registry.mapped_as_dataclass(slots=True) + class Base(DeclarativeBase): + registry = reg + + class Base2(MappedAsDataclass, DeclarativeBase, order=True): + registry = reg + + with expect_raises(TypeError): + + @registry.mapped_as_dataclass(slots=True) + class A(Base2): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + def test_dc_raise_for_slots( + self, + registry: _RegistryType, + decl_base: Type[DeclarativeBase], + ): + reg = registry + with expect_raises_message( + exc.ArgumentError, + r"Dataclass argument\(s\) 'slots', 'unknown' are not accepted", + ): + + class A(MappedAsDataclass, decl_base): + __tablename__ = "a" + _sa_apply_dc_transforms = {"slots": True, "unknown": 5} + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + with expect_raises_message( + exc.ArgumentError, + r"Dataclass argument\(s\) 'slots' are not accepted", + ): + + class Base(MappedAsDataclass, DeclarativeBase, order=True): + registry = reg + _sa_apply_dc_transforms = {"slots": True} + + with expect_raises_message( + exc.ArgumentError, + r"Dataclass argument\(s\) 'slots', 'unknown' are not accepted", + ): + + @reg.mapped + class C: + __tablename__ = "a" + _sa_apply_dc_transforms = {"slots": True, "unknown": 5} + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + @testing.variation("use_arguments", [True, False]) + @testing.combinations( + mapped_column, + lambda **kw: synonym("some_int", **kw), + lambda **kw: deferred(Column(Integer), **kw), + lambda **kw: composite("foo", **kw), + lambda **kw: relationship("Foo", **kw), + lambda **kw: association_proxy("foo", "bar", **kw), + argnames="construct", + ) + def test_attribute_options(self, use_arguments, construct): + if use_arguments: + kw = { + "init": False, + "repr": False, + "default": False, + "default_factory": list, + "compare": True, + "kw_only": False, + "hash": False, + } + exp = interfaces._AttributeOptions( + False, False, False, list, True, False, False + ) + else: + kw = {} + exp = interfaces._DEFAULT_ATTRIBUTE_OPTIONS + + prop = construct(**kw) + eq_(prop._attribute_options, exp) + + @testing.variation("use_arguments", [True, False]) + @testing.combinations( + lambda **kw: column_property(Column(Integer), **kw), + lambda **kw: query_expression(**kw), + argnames="construct", + ) + def test_ro_attribute_options(self, use_arguments, construct): + if use_arguments: + kw = { + "repr": False, + "compare": True, + } + exp = interfaces._AttributeOptions( + False, + False, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + True, + _NoArg.NO_ARG, + _NoArg.NO_ARG, + ) + else: + kw = {} + exp = interfaces._DEFAULT_READONLY_ATTRIBUTE_OPTIONS + + prop = construct(**kw) + eq_(prop._attribute_options, exp) + + +class MixinColumnTest(fixtures.TestBase, testing.AssertsCompiledSQL): + """tests for #8718""" + + __dialect__ = "default" + + @testing.fixture + def model(self): + def go(use_mixin, use_inherits, mad_setup, dataclass_kw): + if use_mixin: + if mad_setup == "dc, mad": + + class BaseEntity( + DeclarativeBase, MappedAsDataclass, **dataclass_kw + ): + pass + + elif mad_setup == "mad, dc": + + class BaseEntity( + MappedAsDataclass, DeclarativeBase, **dataclass_kw + ): + pass + + elif mad_setup == "subclass": + + class BaseEntity(DeclarativeBase): + pass + + class IdMixin(MappedAsDataclass): + id: Mapped[int] = mapped_column( + primary_key=True, init=False + ) + + if mad_setup == "subclass": + + class A( + IdMixin, MappedAsDataclass, BaseEntity, **dataclass_kw + ): + __mapper_args__ = { + "polymorphic_on": "type", + "polymorphic_identity": "a", + } + + __tablename__ = "a" + type: Mapped[str] = mapped_column(String, init=False) + data: Mapped[str] = mapped_column(String, init=False) + + else: + + class A(IdMixin, BaseEntity): + __mapper_args__ = { + "polymorphic_on": "type", + "polymorphic_identity": "a", + } + + __tablename__ = "a" + type: Mapped[str] = mapped_column(String, init=False) + data: Mapped[str] = mapped_column(String, init=False) + + else: + if mad_setup == "dc, mad": + + class BaseEntity( + DeclarativeBase, MappedAsDataclass, **dataclass_kw + ): + id: Mapped[int] = mapped_column( + primary_key=True, init=False + ) + + elif mad_setup == "mad, dc": + + class BaseEntity( + MappedAsDataclass, DeclarativeBase, **dataclass_kw + ): + id: Mapped[int] = mapped_column( + primary_key=True, init=False + ) + + elif mad_setup == "subclass": + + class BaseEntity(MappedAsDataclass, DeclarativeBase): + id: Mapped[int] = mapped_column( + primary_key=True, init=False + ) + + if mad_setup == "subclass": + + class A(BaseEntity, **dataclass_kw): + __mapper_args__ = { + "polymorphic_on": "type", + "polymorphic_identity": "a", + } + + __tablename__ = "a" + type: Mapped[str] = mapped_column(String, init=False) + data: Mapped[str] = mapped_column(String, init=False) + + else: + + class A(BaseEntity): + __mapper_args__ = { + "polymorphic_on": "type", + "polymorphic_identity": "a", + } + + __tablename__ = "a" + type: Mapped[str] = mapped_column(String, init=False) + data: Mapped[str] = mapped_column(String, init=False) + + if use_inherits: + + class B(A): + __mapper_args__ = { + "polymorphic_identity": "b", + } + b_data: Mapped[str] = mapped_column(String, init=False) + + return B + else: + return A + + yield go + + @testing.combinations("inherits", "plain", argnames="use_inherits") + @testing.combinations("mixin", "base", argnames="use_mixin") + @testing.combinations( + "mad, dc", "dc, mad", "subclass", argnames="mad_setup" + ) + def test_mapping(self, model, use_inherits, use_mixin, mad_setup): + target_cls = model( + use_inherits=use_inherits == "inherits", + use_mixin=use_mixin == "mixin", + mad_setup=mad_setup, + dataclass_kw={}, + ) + + obj = target_cls() + assert "id" not in obj.__dict__ + + +class CompositeTest(fixtures.TestBase, testing.AssertsCompiledSQL): + __dialect__ = "default" + + def test_composite_setup(self, dc_decl_base: Type[MappedAsDataclass]): + @dataclasses.dataclass + class Point: + x: int + y: int + + class Edge(dc_decl_base): + __tablename__ = "edge" + id: Mapped[int] = mapped_column(primary_key=True, init=False) + graph_id: Mapped[int] = mapped_column( + ForeignKey("graph.id"), init=False + ) + + start: Mapped[Point] = composite( + Point, mapped_column("x1"), mapped_column("y1"), default=None + ) + + end: Mapped[Point] = composite( + Point, mapped_column("x2"), mapped_column("y2"), default=None + ) + + class Graph(dc_decl_base): + __tablename__ = "graph" + id: Mapped[int] = mapped_column(primary_key=True, init=False) + + edges: Mapped[List[Edge]] = relationship() + + Point.__qualname__ = "mymodel.Point" + Edge.__qualname__ = "mymodel.Edge" + Graph.__qualname__ = "mymodel.Graph" + g = Graph( + edges=[ + Edge(start=Point(1, 2), end=Point(3, 4)), + Edge(start=Point(7, 8), end=Point(5, 6)), + ] + ) + eq_( + repr(g), + "mymodel.Graph(id=None, edges=[mymodel.Edge(id=None, " + "graph_id=None, start=mymodel.Point(x=1, y=2), " + "end=mymodel.Point(x=3, y=4)), " + "mymodel.Edge(id=None, graph_id=None, " + "start=mymodel.Point(x=7, y=8), end=mymodel.Point(x=5, y=6))])", + ) + + def test_named_setup(self, dc_decl_base: Type[MappedAsDataclass]): + @dataclasses.dataclass + class Address: + street: str + state: str + zip_: str + + class User(dc_decl_base): + __tablename__ = "user" + + id: Mapped[int] = mapped_column( + primary_key=True, init=False, repr=False + ) + name: Mapped[str] = mapped_column() + + address: Mapped[Address] = composite( + Address, + mapped_column(), + mapped_column(), + mapped_column("zip"), + default=None, + ) + + Address.__qualname__ = "mymodule.Address" + User.__qualname__ = "mymodule.User" + u = User( + name="user 1", + address=Address("123 anywhere street", "NY", "12345"), + ) + u2 = User("u2") + eq_( + repr(u), + "mymodule.User(name='user 1', " + "address=mymodule.Address(street='123 anywhere street', " + "state='NY', zip_='12345'))", + ) + eq_(repr(u2), "mymodule.User(name='u2', address=None)") + + +class ReadOnlyAttrTest(fixtures.TestBase, testing.AssertsCompiledSQL): + """tests related to #9628""" + + __dialect__ = "default" + + @testing.combinations( + (query_expression,), (column_property,), argnames="construct" + ) + def test_default_behavior( + self, dc_decl_base: Type[MappedAsDataclass], construct + ): + class MyClass(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] = mapped_column() + + const: Mapped[str] = construct(data + "asdf") + + m1 = MyClass(data="foo") + eq_(m1, MyClass(data="foo")) + ne_(m1, MyClass(data="bar")) + + eq_regex( + repr(m1), + r".*MyClass\(id=None, data='foo', const=None\)", + ) + + @testing.combinations( + (query_expression,), (column_property,), argnames="construct" + ) + def test_no_repr_behavior( + self, dc_decl_base: Type[MappedAsDataclass], construct + ): + class MyClass(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] = mapped_column() + + const: Mapped[str] = construct(data + "asdf", repr=False) + + m1 = MyClass(data="foo") + + eq_regex( + repr(m1), + r".*MyClass\(id=None, data='foo'\)", + ) + + @testing.combinations( + (query_expression,), (column_property,), argnames="construct" + ) + def test_enable_compare( + self, dc_decl_base: Type[MappedAsDataclass], construct + ): + class MyClass(dc_decl_base): + __tablename__ = "a" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + data: Mapped[str] = mapped_column() + + const: Mapped[str] = construct(data + "asdf", compare=True) + + m1 = MyClass(data="foo") + eq_(m1, MyClass(data="foo")) + ne_(m1, MyClass(data="bar")) + + m2 = MyClass(data="foo") + m2.const = "some const" + ne_(m2, MyClass(data="foo")) + m3 = MyClass(data="foo") + m3.const = "some const" + eq_(m2, m3) diff --git a/test/typing/test_overloads.py b/test/typing/test_overloads.py index 2a99915748e..e58b78211b1 100644 --- a/test/typing/test_overloads.py +++ b/test/typing/test_overloads.py @@ -79,12 +79,10 @@ def test_methods(self, class_, expected): @testing.combinations( (CoreExecuteOptionsParameter, core_execution_options), - # https://github.com/python/cpython/issues/133701 - ( - OrmExecuteOptionsParameter, - orm_execution_options, - testing.requires.fail_python314b1, - ), + # note: this failed on python 3.14.0b1 + # due to https://github.com/python/cpython/issues/133701. + # something to keep in mind in case it breaks again + (OrmExecuteOptionsParameter, orm_execution_options), ) def test_typed_dicts(self, typ, expected): # we currently expect these to be union types with first entry diff --git a/tools/sync_test_files.py b/tools/sync_test_files.py index f855cd12c2d..4c825c2d7fb 100644 --- a/tools/sync_test_files.py +++ b/tools/sync_test_files.py @@ -6,6 +6,7 @@ from __future__ import annotations from pathlib import Path +from tempfile import NamedTemporaryFile from typing import Any from typing import Iterable @@ -34,7 +35,15 @@ def run_operation( source_data = Path(source).read_text().replace(remove_str, "") dest_data = header.format(source=source, this_file=this_file) + source_data - cmd.write_output_file_from_text(dest_data, dest) + with NamedTemporaryFile( + mode="w", + delete=False, + suffix=".py", + ) as buf: + buf.write(dest_data) + + cmd.run_black(buf.name) + cmd.write_output_file_from_tempfile(buf.name, dest) def main(file: str, cmd: code_writer_cmd) -> None: @@ -51,7 +60,11 @@ def main(file: str, cmd: code_writer_cmd) -> None: "typed_annotation": { "source": "test/orm/declarative/test_typed_mapping.py", "dest": "test/orm/declarative/test_tm_future_annotations_sync.py", - } + }, + "dc_typed_annotation": { + "source": "test/orm/declarative/test_dc_transforms.py", + "dest": "test/orm/declarative/test_dc_transforms_future_anno_sync.py", + }, } if __name__ == "__main__": diff --git a/tox.ini b/tox.ini index 6b3e970902b..f776b2a4b63 100644 --- a/tox.ini +++ b/tox.ini @@ -31,7 +31,7 @@ extras= # this can be limited to specific python versions IF there is no # greenlet available for the most recent python. otherwise # keep this present in all cases - py{38,39,310,311,312,313}: {[greenletextras]extras} + py{38,39,310,311,312,313,314}: {[greenletextras]extras} postgresql: postgresql postgresql: postgresql_pg8000 From e18f5da532e7be90f8d260d79361be5df32ec459 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 3 Jun 2025 14:28:19 -0400 Subject: [PATCH 540/544] add python 3.14 to run-test If I'm reading correctly at https://github.com/actions/python-versions , there are plenty of python 3.14 versions available, so this should "work". Still not sure about wheel building so leaving that separate Change-Id: Idd1ce0db124b700091f5499d6a7d087f6e31777e (cherry picked from commit 9128189eaacf05a8479b27ef5b2e77f27f2f5ec3) --- .github/workflows/run-on-pr.yaml | 2 +- .github/workflows/run-test.yaml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/run-on-pr.yaml b/.github/workflows/run-on-pr.yaml index 0d1313bf39c..889da8499f3 100644 --- a/.github/workflows/run-on-pr.yaml +++ b/.github/workflows/run-on-pr.yaml @@ -25,7 +25,7 @@ jobs: os: - "ubuntu-22.04" python-version: - - "3.12" + - "3.13" build-type: - "cext" - "nocext" diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index 6c93ef1b4f7..303f0b7aecb 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -39,6 +39,7 @@ jobs: - "3.11" - "3.12" - "3.13" + - "3.14" - "pypy-3.10" build-type: - "cext" From 5ebc3e095b4d3c16b28337fe87cab6ed5d766551 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 3 Jun 2025 17:15:54 -0400 Subject: [PATCH 541/544] give up on running py 3.14 in github actions not worth it this is a good learning case for why we use jenkins Change-Id: If70b0029545c70c0b5a9e1c203c853164caef874 (cherry picked from commit af2895a1d767a5357ccfeec9b57568cd6a6e0846) --- .github/workflows/run-test.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/run-test.yaml b/.github/workflows/run-test.yaml index 303f0b7aecb..6c93ef1b4f7 100644 --- a/.github/workflows/run-test.yaml +++ b/.github/workflows/run-test.yaml @@ -39,7 +39,6 @@ jobs: - "3.11" - "3.12" - "3.13" - - "3.14" - "pypy-3.10" build-type: - "cext" From fb319825dc0a0b4ef1c3aead13c179b0f97ca6a2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 5 Jun 2025 08:58:49 -0400 Subject: [PATCH 542/544] hardcode now(), current_timstamp() into the MySQL regex Fixed yet another regression caused by by the DEFAULT rendering changes in 2.0.40 :ticket:`12425`, similar to :ticket:`12488`, this time where using a CURRENT_TIMESTAMP function with a fractional seconds portion inside a textual default value would also fail to be recognized as a non-parenthesized server default. There's no way to do this other than start hardcoding a list of MySQL functions that demand that parenthesis are not added around them, I can think of no other heuristic that will work here. Suggestions welcome Fixes: #12648 Change-Id: I75d274b56306089929b369ecfb23604e9d6fa9dd (cherry picked from commit 8e9f789f1aa0309005e8b7725643b32802e7d214) --- doc/build/changelog/unreleased_20/12648.rst | 11 +++++++ lib/sqlalchemy/dialects/mysql/base.py | 5 ++++ test/dialect/mysql/test_compiler.py | 32 +++++++++++++++++++-- test/dialect/mysql/test_query.py | 11 +++++++ 4 files changed, 57 insertions(+), 2 deletions(-) create mode 100644 doc/build/changelog/unreleased_20/12648.rst diff --git a/doc/build/changelog/unreleased_20/12648.rst b/doc/build/changelog/unreleased_20/12648.rst new file mode 100644 index 00000000000..4abe0e395d6 --- /dev/null +++ b/doc/build/changelog/unreleased_20/12648.rst @@ -0,0 +1,11 @@ +.. change:: + :tags: bug, mysql + :tickets: 12648 + + Fixed yet another regression caused by by the DEFAULT rendering changes in + 2.0.40 :ticket:`12425`, similar to :ticket:`12488`, this time where using a + CURRENT_TIMESTAMP function with a fractional seconds portion inside a + textual default value would also fail to be recognized as a + non-parenthesized server default. + + diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index be49a7e7623..f398fe8a04c 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -2067,6 +2067,11 @@ def get_column_specification( self.dialect._support_default_function and not re.match(r"^\s*[\'\"\(]", default) and not re.search(r"ON +UPDATE", default, re.I) + and not re.match( + r"\bnow\(\d+\)|\bcurrent_timestamp\(\d+\)", + default, + re.I, + ) and re.match(r".*\W.*", default) ): colspec.append(f"DEFAULT ({default})") diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index 4a799234d93..4364872bafe 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -453,6 +453,26 @@ def test_create_server_default_with_function_using( DateTime, server_default=text("now() ON UPDATE now()"), ), + Column( + "updated4", + DateTime, + server_default=text("now(3)"), + ), + Column( + "updated5", + DateTime, + server_default=text("nOW(3)"), + ), + Column( + "updated6", + DateTime, + server_default=text("notnow(1)"), + ), + Column( + "updated7", + DateTime, + server_default=text("CURRENT_TIMESTAMP(3)"), + ), ) eq_(dialect._support_default_function, has_brackets) @@ -467,7 +487,11 @@ def test_create_server_default_with_function_using( "data JSON DEFAULT (json_object()), " "updated1 DATETIME DEFAULT now() on update now(), " "updated2 DATETIME DEFAULT now() On UpDate now(), " - "updated3 DATETIME DEFAULT now() ON UPDATE now())", + "updated3 DATETIME DEFAULT now() ON UPDATE now(), " + "updated4 DATETIME DEFAULT now(3), " + "updated5 DATETIME DEFAULT nOW(3), " + "updated6 DATETIME DEFAULT (notnow(1)), " + "updated7 DATETIME DEFAULT CURRENT_TIMESTAMP(3))", dialect=dialect, ) else: @@ -480,7 +504,11 @@ def test_create_server_default_with_function_using( "data JSON DEFAULT json_object(), " "updated1 DATETIME DEFAULT now() on update now(), " "updated2 DATETIME DEFAULT now() On UpDate now(), " - "updated3 DATETIME DEFAULT now() ON UPDATE now())", + "updated3 DATETIME DEFAULT now() ON UPDATE now(), " + "updated4 DATETIME DEFAULT now(3), " + "updated5 DATETIME DEFAULT nOW(3), " + "updated6 DATETIME DEFAULT notnow(1), " + "updated7 DATETIME DEFAULT CURRENT_TIMESTAMP(3))", dialect=dialect, ) diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py index a5111931312..890c9edbf9d 100644 --- a/test/dialect/mysql/test_query.py +++ b/test/dialect/mysql/test_query.py @@ -21,6 +21,7 @@ from sqlalchemy import testing from sqlalchemy import text from sqlalchemy import true +from sqlalchemy.dialects.mysql import TIMESTAMP from sqlalchemy.testing import assert_raises from sqlalchemy.testing import combinations from sqlalchemy.testing import eq_ @@ -85,6 +86,16 @@ class ServerDefaultCreateTest(fixtures.TestBase): DateTime, text("now() ON UPDATE now()"), ), + ( + TIMESTAMP(fsp=3), + text("now(3)"), + testing.requires.mysql_fsp, + ), + ( + TIMESTAMP(fsp=3), + text("CURRENT_TIMESTAMP(3)"), + testing.requires.mysql_fsp, + ), argnames="datatype, default", ) def test_create_server_defaults( From 0518e7564e3cd8ab7ba426d6b50bc5d83e9b50e3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 7 Jun 2025 09:01:14 -0400 Subject: [PATCH 543/544] update docs for "copy column" warning these docs failed to mention we're talking about ORM flush References: #12650 Change-Id: I3a1655ba99e98021327c90d5cd0c0f8258f4ddc6 (cherry picked from commit 39142af868c0bd98e6ce59c009e62a597a2452f2) --- doc/build/orm/join_conditions.rst | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/doc/build/orm/join_conditions.rst b/doc/build/orm/join_conditions.rst index 1a26d94a8b7..6422a74875d 100644 --- a/doc/build/orm/join_conditions.rst +++ b/doc/build/orm/join_conditions.rst @@ -424,13 +424,19 @@ What this refers to originates from the fact that ``Article.magazine_id`` is the subject of two different foreign key constraints; it refers to ``Magazine.id`` directly as a source column, but also refers to ``Writer.magazine_id`` as a source column in the context of the -composite key to ``Writer``. If we associate an ``Article`` with a -particular ``Magazine``, but then associate the ``Article`` with a -``Writer`` that's associated with a *different* ``Magazine``, the ORM -will overwrite ``Article.magazine_id`` non-deterministically, silently -changing which magazine to which we refer; it may -also attempt to place NULL into this column if we de-associate a -``Writer`` from an ``Article``. The warning lets us know this is the case. +composite key to ``Writer``. + +When objects are added to an ORM :class:`.Session` using :meth:`.Session.add`, +the ORM :term:`flush` process takes on the task of reconciling object +refereneces that correspond to :func:`_orm.relationship` configurations and +delivering this state to the databse using INSERT/UPDATE/DELETE statements. In +this specific example, if we associate an ``Article`` with a particular +``Magazine``, but then associate the ``Article`` with a ``Writer`` that's +associated with a *different* ``Magazine``, this flush process will overwrite +``Article.magazine_id`` non-deterministically, silently changing which magazine +to which we refer; it may also attempt to place NULL into this column if we +de-associate a ``Writer`` from an ``Article``. The warning lets us know that +this scenario may occur during ORM flush sequences. To solve this, we need to break out the behavior of ``Article`` to include all three of the following features: From 6703d87c5943440380d5c1d5bfab3fccb7278095 Mon Sep 17 00:00:00 2001 From: krave1986 Date: Sun, 8 Jun 2025 04:03:10 +0800 Subject: [PATCH 544/544] Fix missing data type in Article.writer_id mapping example (#12649) (cherry picked from commit f2eda87a6b7f1534851da2d0370bd034d1791bfc) --- doc/build/orm/join_conditions.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/orm/join_conditions.rst b/doc/build/orm/join_conditions.rst index 6422a74875d..8a220c9d8a1 100644 --- a/doc/build/orm/join_conditions.rst +++ b/doc/build/orm/join_conditions.rst @@ -389,7 +389,7 @@ for both; then to make ``Article`` refer to ``Writer`` as well, article_id = mapped_column(Integer) magazine_id = mapped_column(ForeignKey("magazine.id")) - writer_id = mapped_column() + writer_id = mapped_column(Integer) magazine = relationship("Magazine") writer = relationship("Writer")